Backup commit

My backspace key started ghosting. Nothing works atm.
This commit is contained in:
2024-01-27 14:50:33 +00:00
parent f77e4fd90a
commit a8887227e5
236 changed files with 10946 additions and 8977 deletions

294
Cargo.lock generated
View File

@@ -30,16 +30,15 @@ checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
[[package]] [[package]]
name = "anstream" name = "anstream"
version = "0.3.2" version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" checksum = "2ab91ebe16eb252986481c5b62f6098f3b698a45e34b5b98200cf20dd2484a44"
dependencies = [ dependencies = [
"anstyle", "anstyle",
"anstyle-parse", "anstyle-parse",
"anstyle-query", "anstyle-query",
"anstyle-wincon", "anstyle-wincon",
"colorchoice", "colorchoice",
"is-terminal",
"utf8parse", "utf8parse",
] ]
@@ -69,14 +68,25 @@ dependencies = [
[[package]] [[package]]
name = "anstyle-wincon" name = "anstyle-wincon"
version = "1.0.1" version = "3.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188" checksum = "f0699d10d2f4d628a98ee7b57b289abbc98ff3bad977cb3152709d4bf2330628"
dependencies = [ dependencies = [
"anstyle", "anstyle",
"windows-sys", "windows-sys",
] ]
[[package]]
name = "atty"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
"hermit-abi",
"libc",
"winapi 0.3.9",
]
[[package]] [[package]]
name = "autocfg" name = "autocfg"
version = "1.1.0" version = "1.1.0"
@@ -128,33 +138,31 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]] [[package]]
name = "clap" name = "clap"
version = "4.3.4" version = "4.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80672091db20273a15cf9fdd4e47ed43b5091ec9841bf4c6145c9dfbbcae09ed" checksum = "ac495e00dcec98c83465d5ad66c5c4fabd652fd6686e7c6269b117e729a6f17b"
dependencies = [ dependencies = [
"clap_builder", "clap_builder",
"clap_derive", "clap_derive",
"once_cell",
] ]
[[package]] [[package]]
name = "clap_builder" name = "clap_builder"
version = "4.3.4" version = "4.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1458a1df40e1e2afebb7ab60ce55c1fa8f431146205aa5f4887e0b111c27636" checksum = "c77ed9a32a62e6ca27175d00d29d05ca32e396ea1eb5fb01d8256b669cec7663"
dependencies = [ dependencies = [
"anstream", "anstream",
"anstyle", "anstyle",
"bitflags 1.3.2",
"clap_lex", "clap_lex",
"strsim", "strsim",
] ]
[[package]] [[package]]
name = "clap_derive" name = "clap_derive"
version = "4.3.2" version = "4.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8cd2b2a819ad6eec39e8f1d6b53001af1e5469f8c177579cdaeb313115b825f" checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442"
dependencies = [ dependencies = [
"heck", "heck",
"proc-macro2", "proc-macro2",
@@ -164,9 +172,9 @@ dependencies = [
[[package]] [[package]]
name = "clap_lex" name = "clap_lex"
version = "0.5.0" version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1"
[[package]] [[package]]
name = "colorchoice" name = "colorchoice"
@@ -183,6 +191,26 @@ dependencies = [
"crossbeam-utils", "crossbeam-utils",
] ]
[[package]]
name = "const_format"
version = "0.2.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3a214c7af3d04997541b18d432afaff4c455e79e2029079647e72fc2bd27673"
dependencies = [
"const_format_proc_macros",
]
[[package]]
name = "const_format_proc_macros"
version = "0.2.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7f6ff08fd20f4f299298a28e2dfa8a8ba1036e6cd2460ac1de7b425d76f2500"
dependencies = [
"proc-macro2",
"quote",
"unicode-xid",
]
[[package]] [[package]]
name = "cpufeatures" name = "cpufeatures"
version = "0.2.7" version = "0.2.7"
@@ -192,6 +220,30 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "crossbeam-deque"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef"
dependencies = [
"cfg-if",
"crossbeam-epoch",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-epoch"
version = "0.9.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7"
dependencies = [
"autocfg",
"cfg-if",
"crossbeam-utils",
"memoffset",
"scopeguard",
]
[[package]] [[package]]
name = "crossbeam-utils" name = "crossbeam-utils"
version = "0.8.16" version = "0.8.16"
@@ -285,9 +337,9 @@ dependencies = [
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
version = "0.14.0" version = "0.14.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156"
dependencies = [ dependencies = [
"ahash", "ahash",
"allocator-api2", "allocator-api2",
@@ -301,54 +353,54 @@ checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
[[package]] [[package]]
name = "hermit-abi" name = "hermit-abi"
version = "0.3.1" version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
[[package]]
name = "io-lifetimes"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220"
dependencies = [ dependencies = [
"hermit-abi",
"libc", "libc",
"windows-sys",
] ]
[[package]] [[package]]
name = "is-terminal" name = "intern-all"
version = "0.4.7" version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f" checksum = "d79d55e732e243f6762e0fc7b245bfd9fa0e0246356ed6cfdba62d9c707e36c1"
dependencies = [ dependencies = [
"hermit-abi", "hashbrown",
"io-lifetimes", "lazy_static",
"rustix 0.37.19",
"windows-sys",
] ]
[[package]] [[package]]
name = "itertools" name = "itertools"
version = "0.11.0" version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0"
dependencies = [ dependencies = [
"either", "either",
] ]
[[package]]
name = "kernel32-sys"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
dependencies = [
"winapi 0.2.8",
"winapi-build",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.148" version = "0.2.148"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b" checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b"
[[package]]
name = "linux-raw-sys"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ece97ea872ece730aed82664c424eb4c8291e1ff2480247ccf7409044bc6479f"
[[package]] [[package]]
name = "linux-raw-sys" name = "linux-raw-sys"
version = "0.4.7" version = "0.4.7"
@@ -370,6 +422,30 @@ version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memoffset"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
dependencies = [
"autocfg",
]
[[package]]
name = "memorize"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b505dbd3a88b64417e29469500c32af2b538ba5f703100761f657540a1c442d"
dependencies = [
"hashbrown",
]
[[package]]
name = "never"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c96aba5aa877601bb3f6dd6a63a969e1f82e60646e81e71b14496995e9853c91"
[[package]] [[package]]
name = "num-traits" name = "num-traits"
version = "0.2.15" version = "0.2.15"
@@ -380,24 +456,38 @@ dependencies = [
] ]
[[package]] [[package]]
name = "once_cell" name = "numtoa"
version = "1.18.0" version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" checksum = "b8f8bdf33df195859076e54ab11ee78a1b208382d3a26ec40d142ffc1ecc49ef"
[[package]]
name = "once_cell"
version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
[[package]] [[package]]
name = "orchidlang" name = "orchidlang"
version = "0.2.2" version = "0.2.2"
dependencies = [ dependencies = [
"clap", "clap",
"const_format",
"dyn-clone", "dyn-clone",
"hashbrown", "hashbrown",
"intern-all",
"itertools", "itertools",
"memorize",
"never",
"once_cell",
"ordered-float", "ordered-float",
"paste", "paste",
"polling", "polling",
"rayon",
"rust-embed", "rust-embed",
"substack",
"take_mut", "take_mut",
"termsize",
"trait-set", "trait-set",
"unicode-segmentation", "unicode-segmentation",
] ]
@@ -425,14 +515,14 @@ checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57"
[[package]] [[package]]
name = "polling" name = "polling"
version = "3.0.0" version = "3.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51348b98db9d4a18ada4fdf7ff5274666e7e6c5a50c42a7d77c5e5c0cb6b036b" checksum = "e53b6af1f60f36f8c2ac2aad5459d75a5a9b4be1e8cdd40264f315d78193e531"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"concurrent-queue", "concurrent-queue",
"pin-project-lite", "pin-project-lite",
"rustix 0.38.13", "rustix",
"tracing", "tracing",
"windows-sys", "windows-sys",
] ]
@@ -455,6 +545,41 @@ dependencies = [
"proc-macro2", "proc-macro2",
] ]
[[package]]
name = "rayon"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1"
dependencies = [
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed"
dependencies = [
"crossbeam-deque",
"crossbeam-utils",
]
[[package]]
name = "redox_syscall"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "redox_termios"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20145670ba436b55d91fc92d25e71160fbfbdd57831631c8d7d36377a476f1cb"
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.7.3" version = "1.7.3"
@@ -507,20 +632,6 @@ dependencies = [
"walkdir", "walkdir",
] ]
[[package]]
name = "rustix"
version = "0.37.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d"
dependencies = [
"bitflags 1.3.2",
"errno",
"io-lifetimes",
"libc",
"linux-raw-sys 0.3.7",
"windows-sys",
]
[[package]] [[package]]
name = "rustix" name = "rustix"
version = "0.38.13" version = "0.38.13"
@@ -530,7 +641,7 @@ dependencies = [
"bitflags 2.4.0", "bitflags 2.4.0",
"errno", "errno",
"libc", "libc",
"linux-raw-sys 0.4.7", "linux-raw-sys",
"windows-sys", "windows-sys",
] ]
@@ -543,6 +654,12 @@ dependencies = [
"winapi-util", "winapi-util",
] ]
[[package]]
name = "scopeguard"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.160" version = "1.0.160"
@@ -566,6 +683,12 @@ version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "substack"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffccc3d80f0a489de67aa74ff31ab852abb973e1c6dacf3704889e00ca544e7f"
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.109" version = "1.0.109"
@@ -594,6 +717,31 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60"
[[package]]
name = "termion"
version = "1.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "077185e2eac69c3f8379a4298e1e07cd36beb962290d4a51199acf0fdc10607e"
dependencies = [
"libc",
"numtoa",
"redox_syscall",
"redox_termios",
]
[[package]]
name = "termsize"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e86d824a8e90f342ad3ef4bd51ef7119a9b681b0cc9f8ee7b2852f02ccd2517"
dependencies = [
"atty",
"kernel32-sys",
"libc",
"termion",
"winapi 0.2.8",
]
[[package]] [[package]]
name = "tracing" name = "tracing"
version = "0.1.37" version = "0.1.37"
@@ -640,6 +788,12 @@ version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
[[package]]
name = "unicode-xid"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]] [[package]]
name = "utf8parse" name = "utf8parse"
version = "0.2.1" version = "0.2.1"
@@ -662,6 +816,12 @@ dependencies = [
"winapi-util", "winapi-util",
] ]
[[package]]
name = "winapi"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
[[package]] [[package]]
name = "winapi" name = "winapi"
version = "0.3.9" version = "0.3.9"
@@ -672,6 +832,12 @@ dependencies = [
"winapi-x86_64-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu",
] ]
[[package]]
name = "winapi-build"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
[[package]] [[package]]
name = "winapi-i686-pc-windows-gnu" name = "winapi-i686-pc-windows-gnu"
version = "0.4.0" version = "0.4.0"
@@ -684,7 +850,7 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
dependencies = [ dependencies = [
"winapi", "winapi 0.3.9",
] ]
[[package]] [[package]]

View File

@@ -24,12 +24,20 @@ doc = false
[dependencies] [dependencies]
hashbrown = "0.14" hashbrown = "0.14"
ordered-float = "4.1" ordered-float = "4.1"
itertools = "0.11" itertools = "0.12"
dyn-clone = "1.0" dyn-clone = "1.0"
clap = { version = "4.3", features = ["derive"] } clap = { version = "4.4", features = ["derive"] }
trait-set = "0.3" trait-set = "0.3"
paste = "1.0" paste = "1.0"
rust-embed = { version = "8.0", features = ["include-exclude"] } rust-embed = { version = "8.0", features = ["include-exclude"] }
take_mut = "0.2.2" take_mut = "0.2.2"
unicode-segmentation = "1.10.1" unicode-segmentation = "1.10.1"
polling = "3.0.0" polling = "3.3.0"
never = "0.1.0"
memorize = "2.0.0"
substack = "1.1.0"
rayon = "1.8.0"
intern-all = "0.2.0"
once_cell = "1.19.0"
const_format = "0.2.32"
termsize = "0.1.6"

View File

@@ -1,7 +1,8 @@
import system::(io, fs, async) import system::(io, fs, async)
import std::(to_string, to_uint, inspect) import std::(to_string, to_uint, inspect)
const folder_view := (path, next) => do{ --[
const folder_view_old := \path. do{
cps println $ "Contents of " ++ fs::os_print path; cps println $ "Contents of " ++ fs::os_print path;
cps entries = async::block_on $ fs::read_dir path; cps entries = async::block_on $ fs::read_dir path;
cps list::enumerate entries cps list::enumerate entries
@@ -13,13 +14,13 @@ const folder_view := (path, next) => do{
cps choice = readln; cps choice = readln;
if (choice == "..") then do { if (choice == "..") then do {
let parent_path = fs::pop_path path let parent_path = fs::pop_path path
|> option::unwrap |> option::assume
|> tuple::pick 0 2; |> tuple::pick 0 2;
next parent_path next parent_path
} else do { } else do {
let t[subname, is_dir] = to_uint choice let t[subname, is_dir] = to_uint choice
|> (list::get entries) |> (list::get entries)
|> option::unwrap; |> option::assume;
let subpath = fs::join_paths path subname; let subpath = fs::join_paths path subname;
if is_dir then next subpath if is_dir then next subpath
else do { else do {
@@ -30,8 +31,16 @@ const folder_view := (path, next) => do{
} }
} }
} }
]--
const main := loop_over (path = fs::cwd) { const folder_view := \path. do cps {
cps path = folder_view path; cps println $ "Contents of " ++ fs::os_print path;
cps entries = async::block_on $ fs::read_dir path;
let t[name, is_dir] = option::assume $ list::get entries 0;
cps println $ to_string name ++ " " ++ fs::os_print is_dir
}
const main := loop_over (path = fs::cwd) {
cps folder_view path;
} }

View File

@@ -1,4 +1,4 @@
import std::to_string import std::conv::to_string
const fizz_buzz := n => ( const fizz_buzz := n => (
(recursive r (i=0) list::cons i $ r (i + 1)) (recursive r (i=0) list::cons i $ r (i + 1))
@@ -10,7 +10,7 @@ const fizz_buzz := n => (
) )
|> list::take n |> list::take n
|> list::reduce ((l, r) => l ++ "\n" ++ r) |> list::reduce ((l, r) => l ++ "\n" ++ r)
|> option::unwrap |> option::assume
) )
const main := fizz_buzz 100 const main := fizz_buzz 100

View File

@@ -1,7 +1,9 @@
import std::exit_status import std::exit_status
import std::conv
const main := ( const main2 := (
println "Hello, world!" println "Hello, world!"
exit_status::success exit_status::success
) )
-- main := "Hello, World!\n"
const main := conv::to_string t[1, 2, 3]

View File

@@ -1,4 +1,4 @@
import std::to_string import std::conv::to_string
export const main := do{ export const main := do{
let foo = list::new[1, 2, 3, 4, 5, 6]; let foo = list::new[1, 2, 3, 4, 5, 6];
@@ -7,7 +7,7 @@ export const main := do{
|> list::skip 2 |> list::skip 2
|> list::take 3 |> list::take 3
|> list::reduce ((a, b) => a + b) |> list::reduce ((a, b) => a + b)
|> option::unwrap; |> option::assume;
cps println $ to_string sum; cps println $ to_string sum;
0 0
} }

View File

@@ -8,7 +8,7 @@ export const main := do{
"bar" = 4 "bar" = 4
]; ];
let num = map::get foo "bar" let num = map::get foo "bar"
|> option::unwrap; |> option::assume;
cps println $ to_string num; cps println $ to_string num;
0 0
} }

View File

@@ -1,4 +1,4 @@
import std::to_string import std::(conv, reflect)
const foo := t[option::some "world!", option::none] const foo := t[option::some "world!", option::none]
@@ -18,4 +18,6 @@ const test2 := match bar {
map::having ["is_alive" = true, "greeting" = foo] => foo map::having ["is_alive" = true, "greeting" = foo] => foo
} }
const main := test2 ++ ", " ++ test1 const tests := test2 ++ ", " ++ test1
const main := conv::to_string bar

View File

@@ -14,10 +14,10 @@ wrap_comments = true
overflow_delimited_expr = true overflow_delimited_expr = true
use_small_heuristics = "Max" use_small_heuristics = "Max"
fn_single_line = true fn_single_line = true
where_single_line = true
# literals # literals
hex_literal_case = "Lower" hex_literal_case = "Lower"
format_strings = true
# delimiters # delimiters
match_arm_blocks = false match_arm_blocks = false

View File

@@ -0,0 +1,62 @@
use itertools::Itertools;
use orchidlang::facade::macro_runner::MacroRunner;
use orchidlang::libs::std::exit_status::ExitStatus;
use orchidlang::name::Sym;
use crate::cli::cmd_prompt;
/// A little utility to step through the resolution of a macro set
pub fn main(macro_runner: MacroRunner, sym: Sym) -> ExitStatus {
let outname = sym.iter().join("::");
let (mut code, location) = match macro_runner.consts.get(&sym) {
Some(rep) => (rep.value.clone(), rep.range.clone()),
None => {
let valid = macro_runner.consts.keys();
let valid_str = valid.map(|t| t.iter().join("::")).join("\n\t");
eprintln!("Symbol {outname} not found\nvalid symbols: \n\t{valid_str}\n");
return ExitStatus::Failure;
},
};
print!("Debugging macros in {outname} defined at {location}");
println!("\nInitial state: {code}");
// print_for_debug(&code);
let mut steps = macro_runner.step(sym).enumerate();
loop {
let (cmd, _) = cmd_prompt("\ncmd> ").unwrap();
match cmd.trim() {
"" | "n" | "next" => match steps.next() {
None => print!("Halted"),
Some((idx, c)) => {
code = c;
print!("Step {idx}: {code}");
},
},
"p" | "print" => {
let glossary = code.value.collect_names();
let gl_str = glossary.iter().map(|t| t.iter().join("::")).join(", ");
print!("code: {code}\nglossary: {gl_str}")
},
"d" | "dump" => print!("Rules: {}", macro_runner.repo),
"q" | "quit" => return ExitStatus::Success,
"complete" => {
match steps.last() {
Some((idx, c)) => print!("Step {idx}: {c}"),
None => print!("Already halted"),
}
return ExitStatus::Success;
},
"h" | "help" => print!(
"Available commands:
\t<blank>, n, next\t\ttake a step
\tp, print\t\tprint the current state
\td, dump\t\tprint the rule table
\tq, quit\t\texit
\th, help\t\tprint this text"
),
_ => {
print!("unrecognized command \"{}\", try \"help\"", cmd);
continue;
},
}
}
}

2
src/bin/features/mod.rs Normal file
View File

@@ -0,0 +1,2 @@
pub mod macro_debug;
pub mod print_project;

View File

@@ -0,0 +1,59 @@
use itertools::Itertools;
use orchidlang::pipeline::project::{ItemKind, ProjItem, ProjectMod};
use orchidlang::tree::{ModEntry, ModMember};
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct ProjPrintOpts {
pub width: u16,
pub hide_locations: bool,
}
fn indent(amount: u16) -> String { " ".repeat(amount.into()) }
pub fn print_proj_mod(
module: &ProjectMod,
lvl: u16,
opts: ProjPrintOpts,
) -> String {
let mut acc = String::new();
let tab = indent(lvl);
for (key, ModEntry { member, x }) in &module.entries {
let mut line_acc = String::new();
for c in &x.comments {
line_acc += &format!("{tab}, --[|{}|]--\n", c);
}
if x.exported {
line_acc += &format!("{tab}export ");
} else {
line_acc += &tab
}
match member {
ModMember::Sub(module) => {
line_acc += &format!("module {key} {{\n");
line_acc += &print_proj_mod(module, lvl + 1, opts);
line_acc += &format!("{tab}}}");
},
ModMember::Item(ProjItem { kind: ItemKind::None }) => {
line_acc += &format!("keyword {key}");
},
ModMember::Item(ProjItem { kind: ItemKind::Alias(tgt) }) => {
line_acc += &format!("alias {key} => {tgt}");
},
ModMember::Item(ProjItem { kind: ItemKind::Const(val) }) => {
line_acc += &format!("const {key} := {val}");
},
}
if !x.locations.is_empty() && !opts.hide_locations {
let locs = x.locations.iter().map(|l| l.to_string()).join(", ");
let line_len = line_acc.split('\n').last().unwrap().len();
match usize::from(opts.width).checked_sub(locs.len() + line_len + 4) {
Some(padding) => line_acc += &" ".repeat(padding),
None => line_acc += &format!("\n{tab} @ "),
}
line_acc += &locs;
}
line_acc += "\n";
acc += &line_acc
}
acc
}

View File

@@ -1,39 +1,81 @@
mod cli; mod cli;
mod features;
use std::fs::File; use std::fs::File;
use std::io::BufReader; use std::io::BufReader;
use std::path::PathBuf; use std::num::NonZeroUsize;
use std::path::{Path, PathBuf};
use std::process::ExitCode; use std::process::ExitCode;
use std::thread::available_parallelism;
use clap::Parser; use clap::{Parser, Subcommand};
use hashbrown::{HashMap, HashSet};
use itertools::Itertools; use itertools::Itertools;
use orchidlang::facade::{Environment, PreMacro}; use orchidlang::error::{ProjectError, ProjectErrorObj, ProjectResult};
use orchidlang::systems::asynch::AsynchSystem; use orchidlang::facade::loader::Loader;
use orchidlang::systems::stl::{ExitStatus, StlConfig}; use orchidlang::facade::macro_runner::MacroRunner;
use orchidlang::systems::{directfs, io, scheduler}; use orchidlang::facade::merge_trees::merge_trees;
use orchidlang::{ast, interpreted, interpreter, Interner, Sym, VName}; use orchidlang::facade::process::Process;
use orchidlang::foreign::inert::Inert;
use orchidlang::interpreter::context::Halt;
use orchidlang::interpreter::nort;
use orchidlang::libs::asynch::system::AsynchSystem;
use orchidlang::libs::directfs::DirectFS;
use orchidlang::libs::io::{IOService, Stream};
use orchidlang::libs::scheduler::system::SeqScheduler;
use orchidlang::libs::std::exit_status::ExitStatus;
use orchidlang::libs::std::std_system::StdConfig;
use orchidlang::location::{CodeGenInfo, CodeLocation};
use orchidlang::name::Sym;
use orchidlang::tree::{ModMemberRef, TreeTransforms};
use rayon::prelude::ParallelIterator;
use rayon::slice::ParallelSlice;
use crate::cli::cmd_prompt; use crate::features::macro_debug;
use crate::features::print_project::{print_proj_mod, ProjPrintOpts};
#[derive(Subcommand, Debug)]
enum Command {
/// Run unit tests, any constant annotated --[[ test ]]--
Test {
/// Specify an exact test to run
#[arg(long)]
only: Option<String>,
#[arg(long, short)]
threads: Option<usize>,
#[arg(long)]
system: Option<String>,
},
#[command(arg_required_else_help = true)]
MacroDebug {
symbol: String,
},
ListMacros,
ProjectTree {
#[arg(long, default_value_t = false)]
hide_locations: bool,
#[arg(long)]
width: Option<u16>,
},
}
/// Orchid interpreter /// Orchid interpreter
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)] #[command(name = "Orchid Executor")]
#[command(author = "Lawrence Bethlenfalvy <lbfalvy@protonmail.com>")]
#[command(long_about = Some("Execute Orchid projects from the file system"))]
struct Args { struct Args {
/// Folder containing main.orc or the manually specified entry module /// Folder containing main.orc or the manually specified entry module
#[arg(short, long, default_value = ".")] #[arg(short, long, default_value = ".")]
pub dir: String, pub dir: String,
/// Entrypoint for the interpreter /// Alternative entrypoint for the interpreter
#[arg(short, long, default_value = "main::main")] #[arg(short, long)]
pub main: String, pub main: Option<String>,
/// Maximum number of steps taken by the macro executor /// Maximum number of steps taken by the macro executor
#[arg(long, default_value_t = 10_000)] #[arg(long, default_value_t = 10_000)]
pub macro_limit: usize, pub macro_limit: usize,
/// Print the parsed ruleset and exit
#[arg(long)] #[command(subcommand)]
pub list_macros: bool, pub command: Option<Command>,
/// Step through the macro execution process in the specified symbol
#[arg(long, default_value = "")]
pub macro_debug: String,
} }
impl Args { impl Args {
/// Validate the project directory and the /// Validate the project directory and the
@@ -42,21 +84,25 @@ impl Args {
if !dir_path.is_dir() { if !dir_path.is_dir() {
return Err(format!("{} is not a directory", dir_path.display())); return Err(format!("{} is not a directory", dir_path.display()));
} }
let segs = self.main.split("::").collect::<Vec<_>>(); let segs = match &self.main {
Some(s) => s.split("::").collect::<Vec<_>>(),
None => match File::open("./main.orc") {
Ok(_) => return Ok(()),
Err(e) => return Err(format!("Cannot open './main.orc'\n{e}")),
},
};
if segs.len() < 2 { if segs.len() < 2 {
return Err("Entry point too short".to_string()); return Err("Entry point too short".to_string());
} };
let (pathsegs, _) = segs.split_at(segs.len() - 1); let (_, pathsegs) = segs.split_last().unwrap();
let mut possible_files = pathsegs.iter().scan(dir_path, |path, seg| { let mut possible_files = pathsegs.iter().scan(dir_path, |path, seg| {
path.push(seg); path.push(seg);
Some(path.with_extension("orc")) Some(path.with_extension("orc"))
}); });
if possible_files.all(|p| File::open(p).is_err()) { if possible_files.all(|p| File::open(p).is_err()) {
return Err(format!( let out_path = pathsegs.join("::");
"{} not found in {}", let pbuf = PathBuf::from(&self.dir);
pathsegs.join("::"), return Err(format!("{out_path} not found in {}", pbuf.display()));
PathBuf::from(&self.dir).display()
));
} }
Ok(()) Ok(())
} }
@@ -64,121 +110,216 @@ impl Args {
pub fn chk_proj(&self) -> Result<(), String> { self.chk_dir_main() } pub fn chk_proj(&self) -> Result<(), String> { self.chk_dir_main() }
} }
#[must_use] macro_rules! unwrap_exit {
pub fn to_vname(data: &str, i: &Interner) -> VName { ($param:expr) => {
data.split("::").map(|s| i.i(s)).collect::<Vec<_>>() match $param {
} Ok(v) => v,
Err(e) => {
fn print_for_debug(e: &ast::Expr<Sym>) { eprintln!("{e}");
print!( return ExitCode::FAILURE;
"code: {}\nglossary: {}",
e,
(e.value.collect_names().into_iter())
.map(|t| t.iter().join("::"))
.join(", ")
)
}
/// A little utility to step through the resolution of a macro set
pub fn macro_debug(premacro: PreMacro, sym: Sym) -> ExitCode {
let (mut code, location) = (premacro.consts.get(&sym))
.unwrap_or_else(|| {
panic!(
"Symbol {} not found\nvalid symbols: \n\t{}\n",
sym.iter().join("::"),
(premacro.consts.keys()).map(|t| t.iter().join("::")).join("\n\t")
)
})
.clone();
println!(
"Debugging macros in {} defined at {}.
Initial state: ",
sym.iter().join("::"),
location
);
print_for_debug(&code);
let mut steps = premacro.step(sym).enumerate();
loop {
let (cmd, _) = cmd_prompt("\ncmd> ").unwrap();
match cmd.trim() {
"" | "n" | "next" =>
if let Some((idx, c)) = steps.next() {
code = c;
print!("Step {idx}: ");
print_for_debug(&code);
} else {
print!("Halted")
},
"p" | "print" => print_for_debug(&code),
"d" | "dump" => print!("Rules: {}", premacro.repo),
"q" | "quit" => return ExitCode::SUCCESS,
"complete" => {
if let Some((idx, c)) = steps.last() {
code = c;
print!("Step {idx}: ");
print_for_debug(&code);
} else {
print!("Already halted")
}
return ExitCode::SUCCESS;
},
"h" | "help" => print!(
"Available commands:
\t<blank>, n, next\t\ttake a step
\tp, print\t\tprint the current state
\td, dump\t\tprint the rule table
\tq, quit\t\texit
\th, help\t\tprint this text"
),
_ => {
print!("unrecognized command \"{}\", try \"help\"", cmd);
continue;
}, },
} }
};
}
pub fn with_std_proc<T>(
dir: &Path,
macro_limit: usize,
f: impl for<'a> FnOnce(Process<'a>) -> ProjectResult<T>,
) -> ProjectResult<T> {
with_std_env(|env| {
let mr = MacroRunner::new(&env.load_dir(dir.to_owned())?)?;
let source_syms = mr.run_macros(Some(macro_limit))?;
let consts = merge_trees(source_syms, env.systems())?;
let proc = Process::new(consts, env.handlers());
f(proc)
})
}
// TODO
pub fn run_test(proc: &mut Process, name: Sym) -> ProjectResult<()> { Ok(()) }
pub fn run_tests(
dir: &Path,
macro_limit: usize,
threads: Option<usize>,
tests: &[Sym],
) -> ProjectResult<()> {
with_std_proc(dir, macro_limit, |proc| proc.validate_refs())?;
let threads = threads
.or_else(|| available_parallelism().ok().map(NonZeroUsize::into))
.unwrap_or(1);
rayon::ThreadPoolBuilder::new().num_threads(threads).build_global().unwrap();
let batch_size = tests.len().div_ceil(threads);
let errors = tests
.par_chunks(batch_size)
.map(|tests| {
let res = with_std_proc(dir, macro_limit, |mut proc| {
let mut errors = HashMap::new();
for test in tests {
if let Err(e) = run_test(&mut proc, test.clone()) {
errors.insert(test.clone(), e);
}
}
Ok(errors)
});
res.expect("Tested earlier")
})
.reduce(HashMap::new, |l, r| l.into_iter().chain(r).collect());
if errors.is_empty() { Ok(()) } else { Err(TestsFailed(errors).pack()) }
}
pub struct TestsFailed(HashMap<Sym, ProjectErrorObj>);
impl ProjectError for TestsFailed {
const DESCRIPTION: &'static str = "Various tests failed";
fn message(&self) -> String {
format!(
"{} tests failed. Errors:\n{}",
self.0.len(),
self.0.iter().map(|(k, e)| format!("In {k}, {e}")).join("\n")
)
} }
} }
fn get_tree_tests(dir: &Path) -> ProjectResult<Vec<Sym>> {
with_std_env(|env| {
env.load_dir(dir.to_owned()).map(|tree| {
(tree.all_consts().into_iter())
.filter(|(_, rep)| rep.comments.iter().any(|s| s.trim() == "test"))
.map(|(k, _)| k.clone())
.collect::<Vec<_>>()
})
})
}
pub fn with_std_env<T>(cb: impl for<'a> FnOnce(Loader<'a>) -> T) -> T {
let mut asynch = AsynchSystem::new();
let scheduler = SeqScheduler::new(&mut asynch);
let std_streams = [
("stdin", Stream::Source(BufReader::new(Box::new(std::io::stdin())))),
("stdout", Stream::Sink(Box::new(std::io::stdout()))),
("stderr", Stream::Sink(Box::new(std::io::stderr()))),
];
let env = Loader::new()
.add_system(StdConfig { impure: true })
.add_system(asynch)
.add_system(scheduler.clone())
.add_system(IOService::new(scheduler.clone(), std_streams))
.add_system(DirectFS::new(scheduler));
cb(env)
}
pub fn main() -> ExitCode { pub fn main() -> ExitCode {
let args = Args::parse(); let args = Args::parse();
args.chk_proj().unwrap_or_else(|e| panic!("{e}")); unwrap_exit!(args.chk_proj());
let dir = PathBuf::try_from(args.dir).unwrap(); let dir = PathBuf::from(args.dir);
let i = Interner::new(); let main = args.main.map_or_else(
let main = to_vname(&args.main, &i); || Sym::literal("tree::main::main"),
let mut asynch = AsynchSystem::new(); |main| Sym::parse(&main).expect("--main cannot be empty"),
let scheduler = scheduler::SeqScheduler::new(&mut asynch); );
let std_streams = [
("stdin", io::Stream::Source(BufReader::new(Box::new(std::io::stdin())))), // subcommands
("stdout", io::Stream::Sink(Box::new(std::io::stdout()))), match args.command {
// ("stderr", io::Stream::Sink(Box::new(std::io::stderr()))), Some(Command::ListMacros) => with_std_env(|env| {
]; let tree = unwrap_exit!(env.load_main(dir, main));
let env = Environment::new(&i) let mr = unwrap_exit!(MacroRunner::new(&tree));
.add_system(StlConfig { impure: true }) println!("Parsed rules: {}", mr.repo);
.add_system(asynch) ExitCode::SUCCESS
.add_system(scheduler.clone()) }),
.add_system(io::Service::new(scheduler.clone(), std_streams)) Some(Command::ProjectTree { hide_locations, width }) => {
.add_system(directfs::DirectFS::new(scheduler)); let tree = unwrap_exit!(with_std_env(|env| env.load_main(dir, main)));
let premacro = env.load_dir(&dir, &main).unwrap(); let w = width.or_else(|| termsize::get().map(|s| s.cols)).unwrap_or(74);
if args.list_macros { let print_opts = ProjPrintOpts { width: w, hide_locations };
println!("Parsed rules: {}", premacro.repo); println!("Project tree: {}", print_proj_mod(&tree.0, 0, print_opts));
return ExitCode::SUCCESS;
}
if !args.macro_debug.is_empty() {
let sym = i.i(&to_vname(&args.macro_debug, &i));
return macro_debug(premacro, sym);
}
let mut proc = premacro.build_process(Some(args.macro_limit)).unwrap();
proc.validate_refs().unwrap();
let main = interpreted::Clause::Constant(i.i(&main)).wrap();
let ret = proc.run(main, None).unwrap();
let interpreter::Return { state, inert, .. } = ret;
drop(proc);
assert!(inert, "Gas is not used, only inert data should be yielded");
match state.clone().downcast::<ExitStatus>() {
Ok(ExitStatus::Success) => ExitCode::SUCCESS,
Ok(ExitStatus::Failure) => ExitCode::FAILURE,
Err(_) => {
println!("{}", state.expr().clause);
ExitCode::SUCCESS ExitCode::SUCCESS
}, },
Some(Command::MacroDebug { symbol }) => with_std_env(|env| {
let tree = unwrap_exit!(env.load_main(dir, main));
let symbol = Sym::parse(&symbol).expect("macro-debug needs an argument");
macro_debug::main(unwrap_exit!(MacroRunner::new(&tree)), symbol).code()
}),
Some(Command::Test { only: Some(_), threads: Some(_), .. }) => {
eprintln!(
"Each test case runs in a single thread.
--only and --threads cannot both be specified"
);
ExitCode::FAILURE
},
Some(Command::Test { only: Some(_), system: Some(_), .. }) => {
eprintln!(
"Conflicting test filters applied. --only runs a single test by
symbol name, while --system runs all tests in a system"
);
ExitCode::FAILURE
},
Some(Command::Test { only: None, threads, system: None }) => {
let tree_tests = unwrap_exit!(get_tree_tests(&dir));
unwrap_exit!(run_tests(&dir, args.macro_limit, threads, &tree_tests));
ExitCode::SUCCESS
},
Some(Command::Test { only: Some(symbol), threads: None, system: None }) => {
let symbol = Sym::parse(&symbol).expect("Test needs an argument");
unwrap_exit!(run_tests(&dir, args.macro_limit, Some(1), &[symbol]));
ExitCode::SUCCESS
},
Some(Command::Test { only: None, threads, system: Some(system) }) => {
let subtrees = unwrap_exit!(with_std_env(|env| {
match env.systems().find(|s| s.name == system) {
None => Err(format!("System {system} not found")),
Some(sys) => {
let mut paths = HashSet::new();
sys.code.search_all((), |path, node, ()| {
if matches!(node, ModMemberRef::Item(_)) {
let name = Sym::new(path.unreverse())
.expect("Empty path means global file");
paths.insert(name);
}
});
Ok(paths)
},
}
}));
let in_subtrees =
|sym: Sym| subtrees.iter().any(|sub| sym[..].starts_with(&sub[..]));
let tests = unwrap_exit!(with_std_env(|env| -> ProjectResult<_> {
let tree = env.load_main(dir.clone(), main.clone())?;
let mr = MacroRunner::new(&tree)?;
let src_consts = mr.run_macros(Some(args.macro_limit))?;
let consts = merge_trees(src_consts, env.systems())?;
let test_names = (consts.into_iter())
.filter(|(k, v)| {
in_subtrees(k.clone())
&& v.comments.iter().any(|c| c.trim() == "test")
})
.map(|p| p.0)
.collect_vec();
Ok(test_names)
}));
eprintln!("Running {} tests", tests.len());
unwrap_exit!(run_tests(&dir, args.macro_limit, threads, &tests));
eprintln!("All tests pass");
ExitCode::SUCCESS
},
None => with_std_env(|env| {
let tree = unwrap_exit!(env.load_main(dir, main.clone()));
let mr = unwrap_exit!(MacroRunner::new(&tree));
let src_consts = unwrap_exit!(mr.run_macros(Some(args.macro_limit)));
let consts = unwrap_exit!(merge_trees(src_consts, env.systems()));
let mut proc = Process::new(consts, env.handlers());
unwrap_exit!(proc.validate_refs());
let main = nort::Clause::Constant(main.clone())
.to_expr(CodeLocation::Gen(CodeGenInfo::no_details("entrypoint")));
let ret = unwrap_exit!(proc.run(main, None));
let Halt { state, inert, .. } = ret;
drop(proc);
assert!(inert, "Gas is not used, only inert data should be yielded");
match state.clone().downcast() {
Ok(Inert(ExitStatus::Success)) => ExitCode::SUCCESS,
Ok(Inert(ExitStatus::Failure)) => ExitCode::FAILURE,
Err(_) => {
println!("{}", state.clause);
ExitCode::SUCCESS
},
}
}),
} }
} }

185
src/error.rs Normal file
View File

@@ -0,0 +1,185 @@
//! Abstractions for handling various code-related errors under a common trait
//! object.
use core::fmt;
use std::any::Any;
use std::fmt::{Debug, Display};
use std::sync::Arc;
use dyn_clone::{clone_box, DynClone};
use crate::location::CodeLocation;
use crate::utils::boxed_iter::{box_once, BoxedIter};
#[allow(unused)] // for doc
use crate::virt_fs::CodeNotFound;
/// A point of interest in resolving the error, such as the point where
/// processing got stuck, a command that is likely to be incorrect
pub struct ErrorPosition {
/// The suspected location
pub location: CodeLocation,
/// Any information about the role of this location
pub message: Option<String>,
}
impl From<CodeLocation> for ErrorPosition {
fn from(location: CodeLocation) -> Self { Self { location, message: None } }
}
/// Errors addressed to the developer which are to be resolved with
/// code changes
pub trait ProjectError: Sized + Send + Sync + 'static {
/// A general description of this type of error
const DESCRIPTION: &'static str;
/// A formatted message that includes specific parameters
#[must_use]
fn message(&self) -> String { self.description().to_string() }
/// Code positions relevant to this error. If you don't implement this, you
/// must implement [ProjectError::one_position]
#[must_use]
fn positions(&self) -> impl IntoIterator<Item = ErrorPosition> {
box_once(ErrorPosition { location: self.one_position(), message: None })
}
/// Short way to provide a single location. If you don't implement this, you
/// must implement [ProjectError::positions]
#[must_use]
fn one_position(&self) -> CodeLocation { unimplemented!() }
/// Convert the error into an `Arc<dyn DynProjectError>` to be able to
/// handle various errors together
#[must_use]
fn pack(self) -> ProjectErrorObj { Arc::new(self) }
}
/// Object-safe version of [ProjectError]. Implement that instead of this.
pub trait DynProjectError: Send + Sync {
/// Access type information about this error
#[must_use]
fn as_any(&self) -> &dyn Any;
/// A general description of this type of error
#[must_use]
fn description(&self) -> &str;
/// A formatted message that includes specific parameters
#[must_use]
fn message(&self) -> String { self.description().to_string() }
/// Code positions relevant to this error.
#[must_use]
fn positions(&self) -> BoxedIter<ErrorPosition>;
}
impl<T> DynProjectError for T
where T: ProjectError
{
fn as_any(&self) -> &dyn Any { self }
fn description(&self) -> &str { T::DESCRIPTION }
fn message(&self) -> String { ProjectError::message(self) }
fn positions(&self) -> BoxedIter<ErrorPosition> {
Box::new(ProjectError::positions(self).into_iter())
}
}
impl Display for dyn DynProjectError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let description = self.description();
let message = self.message();
let positions = self.positions().collect::<Vec<_>>();
writeln!(f, "Project error: {description}\n{message}")?;
if positions.is_empty() {
writeln!(f, "No locations specified")?;
} else {
for ErrorPosition { location, message } in positions {
match message {
None => writeln!(f, "@{location}"),
Some(msg) => writeln!(f, "@{location}: {msg}"),
}?
}
}
Ok(())
}
}
impl Debug for dyn DynProjectError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{self}")
}
}
/// Type-erased [ProjectError] implementor through the [DynProjectError]
/// object-trait
pub type ProjectErrorObj = Arc<dyn DynProjectError>;
/// Alias for a result with an error of [Rc] of [ProjectError] trait object.
/// This is the type of result most commonly returned by pre-run operations.
pub type ProjectResult<T> = Result<T, ProjectErrorObj>;
/// A trait for error types that are only missing a location. Do not depend on
/// this trait, refer to [DynErrorSansLocation] instead.
pub trait ErrorSansLocation: Clone + Sized + Send + Sync + 'static {
/// General description of the error condition
const DESCRIPTION: &'static str;
/// Specific description of the error including code fragments or concrete
/// data if possible
fn message(&self) -> String { Self::DESCRIPTION.to_string() }
/// Convert the error to a type-erased structure for handling on shared
/// channels
fn pack(self) -> ErrorSansLocationObj { Box::new(self) }
}
/// Object-safe equivalent to [ErrorSansLocation]. Implement that one instead of
/// this. Typically found as [ErrorSansLocationObj]
pub trait DynErrorSansLocation: Any + Send + Sync + DynClone {
/// Allow to downcast the base object to distinguish between various errors.
/// The main intended purpose is to trigger a fallback when [CodeNotFound] is
/// encountered, but the possibilities are not limited to that.
fn as_any_ref(&self) -> &dyn Any;
/// Generic description of the error condition
fn description(&self) -> &str;
/// Specific description of this particular error
fn message(&self) -> String;
}
/// Type-erased [ErrorSansLocation] implementor through the object-trait
/// [DynErrorSansLocation]. This can be turned into a [ProjectErrorObj] with
/// [bundle_location].
pub type ErrorSansLocationObj = Box<dyn DynErrorSansLocation>;
/// A generic project result without location
pub type ResultSansLocation<T> = Result<T, ErrorSansLocationObj>;
impl<T: ErrorSansLocation + 'static> DynErrorSansLocation for T {
fn description(&self) -> &str { Self::DESCRIPTION }
fn message(&self) -> String { self.message() }
fn as_any_ref(&self) -> &dyn Any { self }
}
impl Clone for ErrorSansLocationObj {
fn clone(&self) -> Self { clone_box(&**self) }
}
impl DynErrorSansLocation for ErrorSansLocationObj {
fn description(&self) -> &str { (**self).description() }
fn message(&self) -> String { (**self).message() }
fn as_any_ref(&self) -> &dyn Any { (**self).as_any_ref() }
}
impl Display for ErrorSansLocationObj {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "{}\nLocation missing from error", self.message())
}
}
impl Debug for ErrorSansLocationObj {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{self}")
}
}
struct LocationBundle(CodeLocation, Box<dyn DynErrorSansLocation>);
impl DynProjectError for LocationBundle {
fn as_any(&self) -> &dyn Any { self.1.as_any_ref() }
fn description(&self) -> &str { self.1.description() }
fn message(&self) -> String { self.1.message() }
fn positions(&self) -> BoxedIter<ErrorPosition> {
box_once(ErrorPosition { location: self.0.clone(), message: None })
}
}
/// Add a location to an [ErrorSansLocation]
pub fn bundle_location(
location: &CodeLocation,
details: &dyn DynErrorSansLocation,
) -> ProjectErrorObj {
Arc::new(LocationBundle(location.clone(), clone_box(details)))
}

View File

@@ -1,41 +0,0 @@
use std::fmt::Display;
use std::sync::Arc;
use crate::foreign::{ExternError, XfnResult};
use crate::Location;
/// Some expectation (usually about the argument types of a function) did not
/// hold.
#[derive(Clone)]
pub struct AssertionError {
location: Location,
message: &'static str,
}
impl AssertionError {
/// Construct, upcast and wrap in a Result that never succeeds for easy
/// short-circuiting
pub fn fail<T>(location: Location, message: &'static str) -> XfnResult<T> {
Err(Self::ext(location, message))
}
/// Construct and upcast to [ExternError]
pub fn ext(
location: Location,
message: &'static str,
) -> Arc<dyn ExternError> {
Self { location, message }.into_extern()
}
}
impl Display for AssertionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Error: expected {}", self.message)?;
if self.location != Location::Unknown {
write!(f, " at {}", self.location)?;
}
Ok(())
}
}
impl ExternError for AssertionError {}

View File

@@ -1,31 +0,0 @@
use itertools::Itertools;
use super::{ErrorPosition, ProjectError};
use crate::utils::BoxedIter;
use crate::{Location, VName};
/// Error raised if the same name ends up assigned to more than one thing.
/// A name in Orchid has exactly one meaning, either a value or a module.
pub struct ConflictingRoles {
/// Name assigned to multiple things
pub name: VName,
/// Location of at least two occurrences
pub locations: Vec<Location>,
}
impl ProjectError for ConflictingRoles {
fn description(&self) -> &str {
"The same name is assigned multiple times to conflicting items"
}
fn message(&self) -> String {
format!(
"{} has multiple conflicting meanings",
self.name.iter().map(|t| t.as_str()).join("::")
)
}
fn positions(&self) -> BoxedIter<ErrorPosition> {
Box::new(
(self.locations.iter())
.map(|l| ErrorPosition { location: l.clone(), message: None }),
)
}
}

View File

@@ -1,26 +0,0 @@
use std::sync::Arc;
use itertools::Itertools;
use super::ProjectError;
use crate::representations::location::Location;
use crate::VName;
/// Error produced for the statement `import *`
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct ImportAll {
/// The file containing the offending import
pub offender_file: Arc<VName>,
/// The module containing the offending import
pub offender_mod: Arc<VName>,
}
impl ProjectError for ImportAll {
fn description(&self) -> &str { "a top-level glob import was used" }
fn message(&self) -> String {
format!("{} imports *", self.offender_mod.iter().join("::"))
}
fn one_position(&self) -> Location {
Location::File(self.offender_file.clone())
}
}

View File

@@ -1,22 +0,0 @@
//! Various errors the pipeline can produce
mod assertion_error;
mod conflicting_roles;
mod import_all;
mod no_targets;
mod not_exported;
mod project_error;
mod runtime_error;
mod too_many_supers;
mod unexpected_directory;
mod visibility_mismatch;
pub use assertion_error::AssertionError;
pub use conflicting_roles::ConflictingRoles;
pub use import_all::ImportAll;
pub use no_targets::NoTargets;
pub use not_exported::NotExported;
pub use project_error::{ErrorPosition, ProjectError, ProjectResult};
pub use runtime_error::RuntimeError;
pub use too_many_supers::TooManySupers;
pub use unexpected_directory::UnexpectedDirectory;
pub use visibility_mismatch::VisibilityMismatch;

View File

@@ -1,20 +0,0 @@
use super::{ErrorPosition, ProjectError};
#[allow(unused)] // for doc
use crate::parse_layer;
use crate::utils::boxed_iter::box_empty;
use crate::utils::BoxedIter;
/// Error produced when [parse_layer] is called without targets. This function
/// produces an error instead of returning a straightforward empty tree because
/// the edge case of no targets is often an error and should generally be
/// handled explicitly
#[derive(Debug)]
pub struct NoTargets;
impl ProjectError for NoTargets {
fn description(&self) -> &str {
"No targets were specified for layer parsing"
}
fn positions(&self) -> BoxedIter<ErrorPosition> { box_empty() }
}

View File

@@ -1,45 +0,0 @@
use std::sync::Arc;
use super::{ErrorPosition, ProjectError};
use crate::representations::location::Location;
use crate::utils::BoxedIter;
use crate::{Interner, VName};
/// An import refers to a symbol which exists but is not exported.
#[derive(Debug)]
pub struct NotExported {
/// The containing file - files are always exported
pub file: VName,
/// The path leading to the unexported module
pub subpath: VName,
/// The offending file
pub referrer_file: VName,
/// The module containing the offending import
pub referrer_subpath: VName,
}
impl ProjectError for NotExported {
fn description(&self) -> &str {
"An import refers to a symbol that exists but isn't exported"
}
fn positions(&self) -> BoxedIter<ErrorPosition> {
Box::new(
[
ErrorPosition {
location: Location::File(Arc::new(self.file.clone())),
message: Some(format!(
"{} isn't exported",
Interner::extern_all(&self.subpath).join("::")
)),
},
ErrorPosition {
location: Location::File(Arc::new(self.referrer_file.clone())),
message: Some(format!(
"{} cannot see this symbol",
Interner::extern_all(&self.referrer_subpath).join("::")
)),
},
]
.into_iter(),
)
}
}

View File

@@ -1,72 +0,0 @@
use std::fmt::{Debug, Display};
use std::rc::Rc;
use crate::representations::location::Location;
use crate::utils::boxed_iter::box_once;
use crate::utils::BoxedIter;
/// A point of interest in resolving the error, such as the point where
/// processing got stuck, a command that is likely to be incorrect
pub struct ErrorPosition {
/// The suspected location
pub location: Location,
/// Any information about the role of this location
pub message: Option<String>,
}
/// Errors addressed to the developer which are to be resolved with
/// code changes
pub trait ProjectError {
/// A general description of this type of error
#[must_use]
fn description(&self) -> &str;
/// A formatted message that includes specific parameters
#[must_use]
fn message(&self) -> String { self.description().to_string() }
/// Code positions relevant to this error. If you don't implement this, you
/// must implement [ProjectError::one_position]
#[must_use]
fn positions(&self) -> BoxedIter<ErrorPosition> {
box_once(ErrorPosition { location: self.one_position(), message: None })
}
/// Short way to provide a single location. If you don't implement this, you
/// must implement [ProjectError::positions]
#[must_use]
fn one_position(&self) -> Location { unimplemented!() }
/// Convert the error into an `Rc<dyn ProjectError>` to be able to
/// handle various errors together
#[must_use]
fn rc(self) -> Rc<dyn ProjectError>
where
Self: Sized + 'static,
{
Rc::new(self)
}
}
impl Display for dyn ProjectError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let description = self.description();
let message = self.message();
let positions = self.positions();
writeln!(f, "Project error: {description}\n{message}")?;
for ErrorPosition { location, message } in positions {
writeln!(
f,
"@{location}: {}",
message.unwrap_or("location of interest".to_string())
)?
}
Ok(())
}
}
impl Debug for dyn ProjectError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{self}")
}
}
/// Alias for a result with an error of [Rc] of [ProjectError] trait object.
/// This is the type of result most commonly returned by pre-run operations.
pub type ProjectResult<T> = Result<T, Rc<dyn ProjectError>>;

View File

@@ -1,27 +0,0 @@
use super::ProjectError;
use crate::representations::location::Location;
use crate::{Interner, VName};
/// Error produced when an import path starts with more `super` segments
/// than the current module's absolute path
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct TooManySupers {
/// The offending import path
pub path: VName,
/// The faulty import statement
pub location: Location,
}
impl ProjectError for TooManySupers {
fn description(&self) -> &str {
"an import path starts with more `super` segments than the current \
module's absolute path"
}
fn message(&self) -> String {
format!(
"path {} contains too many `super` steps.",
Interner::extern_all(&self.path).join("::"),
)
}
fn one_position(&self) -> Location { self.location.clone() }
}

View File

@@ -1,28 +0,0 @@
use std::sync::Arc;
use super::ProjectError;
use crate::{Interner, Location, VName};
/// Produced when a stage that deals specifically with code encounters
/// a path that refers to a directory
#[derive(Debug)]
pub struct UnexpectedDirectory {
/// Path to the offending collection
pub path: VName,
}
impl ProjectError for UnexpectedDirectory {
fn description(&self) -> &str {
"A stage that deals specifically with code encountered a path that refers \
to a directory"
}
fn one_position(&self) -> crate::Location {
Location::File(Arc::new(self.path.clone()))
}
fn message(&self) -> String {
format!(
"{} was expected to be a file but a directory was found",
Interner::extern_all(&self.path).join("/")
)
}
}

View File

@@ -1,28 +0,0 @@
use std::sync::Arc;
use super::project_error::ProjectError;
use crate::representations::location::Location;
use crate::{Interner, VName};
/// Multiple occurences of the same namespace with different visibility
#[derive(Debug)]
pub struct VisibilityMismatch {
/// The namespace with ambiguous visibility
pub namespace: VName,
/// The file containing the namespace
pub file: VName,
}
impl ProjectError for VisibilityMismatch {
fn description(&self) -> &str {
"Some occurences of a namespace are exported but others are not"
}
fn message(&self) -> String {
format!(
"{} is opened multiple times with different visibilities",
Interner::extern_all(&self.namespace).join("::")
)
}
fn one_position(&self) -> Location {
Location::File(Arc::new(self.file.clone()))
}
}

View File

@@ -1,106 +0,0 @@
use std::iter;
use std::path::Path;
use hashbrown::HashMap;
use super::system::{IntoSystem, System};
use super::PreMacro;
use crate::error::ProjectResult;
use crate::pipeline::file_loader;
use crate::sourcefile::FileEntry;
use crate::utils::never;
use crate::{
from_const_tree, parse_layer, vname_to_sym_tree, Interner, ProjectTree, Stok,
VName,
};
/// A compiled environment ready to load user code. It stores the list of
/// systems and combines with usercode to produce a [Process]
pub struct Environment<'a> {
/// [Interner] pseudo-global
pub i: &'a Interner,
systems: Vec<System<'a>>,
}
impl<'a> Environment<'a> {
/// Initialize a new environment
#[must_use]
pub fn new(i: &'a Interner) -> Self { Self { i, systems: Vec::new() } }
/// Register a new system in the environment
#[must_use]
pub fn add_system<'b: 'a>(mut self, is: impl IntoSystem<'b> + 'b) -> Self {
self.systems.push(Box::new(is).into_system(self.i));
self
}
/// Compile the environment from the set of systems and return it directly.
/// See [#load_dir]
pub fn compile(self) -> ProjectResult<CompiledEnv<'a>> {
let Self { i, systems, .. } = self;
let mut tree = from_const_tree(HashMap::new(), &[i.i("none")]);
for sys in systems.iter() {
let system_tree = from_const_tree(sys.constants.clone(), &sys.vname(i));
tree = ProjectTree(never::unwrap_always(tree.0.overlay(system_tree.0)));
}
let mut lexer_plugins = vec![];
let mut line_parsers = vec![];
let mut prelude = vec![];
for sys in systems.iter() {
lexer_plugins.extend(sys.lexer_plugins.iter().map(|b| &**b));
line_parsers.extend(sys.line_parsers.iter().map(|b| &**b));
if !sys.code.is_empty() {
tree = parse_layer(
sys.code.keys().map(|sym| &sym[..]),
&|k, referrer| sys.load_file(k, referrer),
&tree,
&prelude,
&lexer_plugins,
&line_parsers,
i,
)?;
}
prelude.extend_from_slice(&sys.prelude);
}
Ok(CompiledEnv { prelude, tree, systems })
}
/// Load a directory from the local file system as an Orchid project.
pub fn load_dir(
self,
dir: &Path,
target: &[Stok],
) -> ProjectResult<PreMacro<'a>> {
let i = self.i;
let CompiledEnv { prelude, systems, tree } = self.compile()?;
let file_cache = file_loader::mk_dir_cache(dir.to_path_buf());
let lexer_plugins = (systems.iter())
.flat_map(|s| s.lexer_plugins.iter().map(|b| &**b))
.collect::<Vec<_>>();
let line_parsers = (systems.iter())
.flat_map(|s| s.line_parsers.iter().map(|b| &**b))
.collect::<Vec<_>>();
let vname_tree = parse_layer(
iter::once(target),
&|path, _| file_cache.find(path),
&tree,
&prelude,
&lexer_plugins,
&line_parsers,
i,
)?;
let tree = vname_to_sym_tree(vname_tree, i);
PreMacro::new(tree, systems, i)
}
}
/// Compiled environment waiting for usercode. An intermediate step between
/// [Environment] and [Process]
pub struct CompiledEnv<'a> {
/// Namespace tree for pre-defined symbols with symbols at the leaves and
/// rules defined on the nodes
pub tree: ProjectTree<VName>,
/// Lines prepended to each usercode file
pub prelude: Vec<FileEntry>,
/// List of systems to source handlers for the interpreter
pub systems: Vec<System<'a>>,
}

143
src/facade/loader.rs Normal file
View File

@@ -0,0 +1,143 @@
use std::path::{Path, PathBuf};
use std::{fs, iter};
use intern_all::{i, Tok};
use substack::Substack;
use super::system::{IntoSystem, System};
use crate::error::ProjectResult;
use crate::gen::tree::ConstTree;
use crate::interpreter::handler::HandlerTable;
use crate::location::{CodeGenInfo, CodeLocation};
use crate::name::{Sym, VPath};
use crate::pipeline::load_solution::{load_solution, SolutionContext};
use crate::pipeline::project::ProjectTree;
use crate::utils::combine::Combine;
use crate::utils::sequence::Sequence;
use crate::utils::unwrap_or::unwrap_or;
use crate::virt_fs::{DeclTree, DirNode, VirtFS};
/// A compiled environment ready to load user code. It stores the list of
/// systems and combines with usercode to produce a [Process]
pub struct Loader<'a> {
systems: Vec<System<'a>>,
}
impl<'a> Loader<'a> {
/// Initialize a new environment
#[must_use]
pub fn new() -> Self { Self { systems: Vec::new() } }
/// Retrieve the list of systems
pub fn systems(&self) -> impl Iterator<Item = &System<'a>> {
self.systems.iter()
}
/// Register a new system in the environment
#[must_use]
pub fn add_system<'b: 'a>(mut self, is: impl IntoSystem<'b> + 'b) -> Self {
self.systems.push(Box::new(is).into_system());
self
}
/// Extract the systems from the environment
pub fn into_systems(self) -> Vec<System<'a>> { self.systems }
/// Initialize an environment with a prepared list of systems
pub fn from_systems(sys: impl IntoIterator<Item = System<'a>>) -> Self {
Self { systems: sys.into_iter().collect() }
}
/// Combine the `constants` fields of all systems
pub fn constants(&self) -> ConstTree {
(self.systems())
.try_fold(ConstTree::tree::<&str>([]), |acc, sys| {
acc.combine(sys.constants.clone())
})
.expect("Conflicting const trees")
}
pub fn handlers(self) -> HandlerTable<'a> {
(self.systems.into_iter())
.fold(HandlerTable::new(), |t, sys| t.combine(sys.handlers))
}
/// Compile the environment from the set of systems and return it directly.
/// See [#load_dir]
pub fn solution_ctx(&self) -> ProjectResult<SolutionContext> {
Ok(SolutionContext {
lexer_plugins: Sequence::new(|| {
self.systems().flat_map(|sys| &sys.lexer_plugins).map(|b| &**b)
}),
line_parsers: Sequence::new(|| {
self.systems().flat_map(|sys| &sys.line_parsers).map(|b| &**b)
}),
preludes: Sequence::new(|| self.systems().flat_map(|sys| &sys.prelude)),
})
}
/// Combine source code from all systems with the specified directory into a
/// common [VirtFS]
pub fn make_dir_tree(&self, dir: PathBuf) -> DeclTree {
let dir_node = DirNode::new(dir, ".orc").rc();
let base = DeclTree::tree([("tree", DeclTree::leaf(dir_node))]);
(self.systems().try_fold(base, |acc, sub| acc.combine(sub.code.clone())))
.expect("Conflicting system trees")
}
/// Load a directory from the local file system as an Orchid project.
/// File loading proceeds along import statements and ignores all files
/// not reachable from the specified file.
pub fn load_main(
&self,
dir: PathBuf,
target: Sym,
) -> ProjectResult<ProjectTree> {
let ctx = self.solution_ctx()?;
let tgt_loc =
CodeLocation::Gen(CodeGenInfo::no_details("facade::entrypoint"));
let root = self.make_dir_tree(dir.clone());
let targets = iter::once((target, tgt_loc));
let constants = self.constants().unwrap_mod();
load_solution(ctx, targets, &constants, &root)
}
/// Load every orchid file in a directory
pub fn load_dir(&self, dir: PathBuf) -> ProjectResult<ProjectTree> {
let ctx = self.solution_ctx()?;
let tgt_loc =
CodeLocation::Gen(CodeGenInfo::no_details("facade::entrypoint"));
let mut orc_files: Vec<VPath> = Vec::new();
find_all_orc_files(&dir, &mut orc_files, Substack::Bottom);
let root = self.make_dir_tree(dir.clone());
let constants = self.constants().unwrap_mod();
let targets = (orc_files.into_iter())
.map(|p| (p.as_suffix_of(i("tree")).to_sym(), tgt_loc.clone()));
load_solution(ctx, targets, &constants, &root)
}
}
impl<'a> Default for Loader<'a> {
fn default() -> Self { Self::new() }
}
fn find_all_orc_files(
path: &Path,
paths: &mut Vec<VPath>,
stack: Substack<'_, Tok<String>>,
) {
assert!(path.exists(), "find_all_orc_files encountered missing path");
if path.is_symlink() {
let path = unwrap_or!(fs::read_link(path).ok(); return);
find_all_orc_files(&path, paths, stack)
} else if path.is_file() {
if path.extension().and_then(|t| t.to_str()) == Some("orc") {
paths.push(VPath(stack.unreverse()))
}
} else if path.is_dir() {
let entries = unwrap_or!(path.read_dir().ok(); return);
for entry in entries.filter_map(Result::ok) {
let name = unwrap_or!(entry.file_name().into_string().ok(); return);
find_all_orc_files(&entry.path(), paths, stack.push(i(&name)))
}
}
}

View File

@@ -0,0 +1,84 @@
use std::iter;
use hashbrown::HashMap;
use crate::error::{ProjectError, ProjectResult};
use crate::location::CodeLocation;
use crate::name::Sym;
use crate::parse::parsed;
use crate::pipeline::project::{
ConstReport, ProjectTree,
};
use crate::rule::repository::Repo;
pub struct MacroRunner {
/// Optimized catalog of substitution rules
pub repo: Repo,
/// Runtime code containing macro invocations
pub consts: HashMap<Sym, ConstReport>,
}
impl MacroRunner {
pub fn new(tree: &ProjectTree) -> ProjectResult<Self> {
let rules = tree.all_rules();
let repo = Repo::new(rules).map_err(|(rule, e)| e.to_project(&rule))?;
Ok(Self { repo, consts: tree.all_consts().into_iter().collect() })
}
pub fn run_macros(
&self,
timeout: Option<usize>,
) -> ProjectResult<HashMap<Sym, ConstReport>> {
let mut symbols = HashMap::new();
for (name, report) in self.consts.iter() {
let value = match timeout {
None => (self.repo.pass(&report.value))
.unwrap_or_else(|| report.value.clone()),
Some(limit) => {
let (o, leftover_gas) = self.repo.long_step(&report.value, limit + 1);
match leftover_gas {
1.. => o,
_ => {
let err = MacroTimeout {
location: CodeLocation::Source(report.range.clone()),
symbol: name.clone(),
limit,
};
return Err(err.pack());
},
}
},
};
symbols.insert(name.clone(), ConstReport { value, ..report.clone() });
}
Ok(symbols)
}
/// Obtain an iterator that steps through the preprocessing of a constant
/// for debugging macros
pub fn step(&self, sym: Sym) -> impl Iterator<Item = parsed::Expr> + '_ {
let mut target =
self.consts.get(&sym).expect("Target not found").value.clone();
iter::from_fn(move || {
target = self.repo.step(&target)?;
Some(target.clone())
})
}
}
/// Error raised when a macro runs too long
#[derive(Debug)]
pub struct MacroTimeout {
location: CodeLocation,
symbol: Sym,
limit: usize,
}
impl ProjectError for MacroTimeout {
const DESCRIPTION: &'static str = "Macro execution has not halted";
fn message(&self) -> String {
let Self { symbol, limit, .. } = self;
format!("Macro processing in {symbol} took more than {limit} steps")
}
fn one_position(&self) -> CodeLocation { self.location.clone() }
}

61
src/facade/merge_trees.rs Normal file
View File

@@ -0,0 +1,61 @@
use std::sync::Arc;
use hashbrown::HashMap;
use never::Never;
use substack::Substack;
use super::system::System;
use crate::error::ProjectResult;
use crate::intermediate::ast_to_ir::ast_to_ir;
use crate::intermediate::ir_to_nort::ir_to_nort;
use crate::interpreter::nort;
use crate::location::{CodeGenInfo, CodeLocation};
use crate::name::{Sym, VPath};
use crate::pipeline::project::ConstReport;
use crate::tree::{ModMember, ModMemberRef, TreeTransforms};
/// Equivalent of [crate::pipeline::project::ConstReport] for the interpreter's
/// representation, [crate::interpreter::nort].
pub struct NortConst {
/// Comments associated with the constant which may affect its interpretation
pub comments: Vec<Arc<String>>,
/// Location of the definition, if known
pub location: CodeLocation,
/// Value assigned to the constant
pub value: nort::Expr,
}
/// Combine a list of symbols loaded from source and the constant trees from
/// each system.
pub fn merge_trees<'a: 'b, 'b>(
source: impl IntoIterator<Item = (Sym, ConstReport)> + 'b,
systems: impl IntoIterator<Item = &'b System<'a>> + 'b,
) -> ProjectResult<impl IntoIterator<Item = (Sym, NortConst)> + 'static> {
let mut out = HashMap::new();
for (name, rep) in source {
out.insert(name.clone(), NortConst {
value: ir_to_nort(&ast_to_ir(rep.value, name)?),
location: CodeLocation::Source(rep.range),
comments: rep.comments,
});
}
for sys in systems {
let const_module = sys.constants.unwrap_mod_ref();
const_module.search_all((), |path, node, ()| {
let m = if let ModMemberRef::Mod(m) = node { m } else { return };
for (key, ent) in &m.entries {
if let ModMember::Item(c) = &ent.member {
let path = VPath::new(path.unreverse()).as_prefix_of(key.clone());
let location = CodeLocation::Gen(CodeGenInfo::details(
"constant from",
format!("system.name={}", sys.name),
));
let value = c.gen_nort(location.clone());
let crep = NortConst { value, comments: vec![], location };
out.insert(path.to_sym(), crep);
}
}
});
}
Ok(out)
}

View File

@@ -1,12 +1,8 @@
//! A simplified set of commands each grouping a large subset of the operations //! A simplified set of commands each grouping a large subset of the operations
//! exposed by Orchid to make writing embeddings faster in the typical case. //! exposed by Orchid to make writing embeddings faster in the typical case.
mod environment; pub mod loader;
mod pre_macro; pub mod macro_runner;
mod process; pub mod process;
mod system; pub mod system;
pub mod merge_trees;
pub use environment::{CompiledEnv, Environment};
pub use pre_macro::{MacroTimeout, PreMacro};
pub use process::Process;
pub use system::{IntoSystem, MissingSystemCode, System};

View File

@@ -1,98 +1,80 @@
use std::iter; use std::iter;
use std::sync::Arc;
use hashbrown::HashMap; use hashbrown::HashMap;
use never::Never;
use super::{Process, System}; use super::process::Process;
use crate::error::{ProjectError, ProjectResult}; use super::system::System;
use crate::interpreter::HandlerTable; use crate::error::{ErrorPosition, ProjectError, ProjectResult};
use crate::rule::Repo; use crate::intermediate::ast_to_ir::ast_to_ir;
use crate::{ use crate::intermediate::ir_to_nort::ir_to_nort;
ast, ast_to_interpreted, collect_consts, collect_rules, rule, Interner, use crate::interpreter::handler::HandlerTable;
Location, ProjectTree, Sym, use crate::location::{CodeGenInfo, CodeLocation};
use crate::name::{Sym, VPath};
use crate::parse::parsed;
use crate::pipeline::project::{
collect_consts, collect_rules, ConstReport, ProjectTree,
}; };
use crate::rule::repository::Repo;
use crate::tree::ModMember;
/// Everything needed for macro execution, and constructing the process /// Everything needed for macro execution, and constructing the process
pub struct PreMacro<'a> { pub struct PreMacro<'a> {
/// Optimized catalog of substitution rules /// Optimized catalog of substitution rules
pub repo: Repo, pub repo: Repo,
/// Runtime code containing macro invocations /// Runtime code containing macro invocations
pub consts: HashMap<Sym, (ast::Expr<Sym>, Location)>, pub consts: HashMap<Sym, (ConstReport, CodeLocation)>,
/// Libraries and plug-ins /// Libraries and plug-ins
pub systems: Vec<System<'a>>, pub systems: Vec<System<'a>>,
/// [Interner] pseudo-global
pub i: &'a Interner,
} }
impl<'a> PreMacro<'a> { impl<'a> PreMacro<'a> {
/// Build a [PreMacro] from a source tree and system list /// Build a [PreMacro] from a source tree and system list
pub fn new( pub fn new(
tree: ProjectTree<Sym>, tree: &ProjectTree,
systems: Vec<System<'a>>, systems: Vec<System<'a>>,
i: &'a Interner,
) -> ProjectResult<Self> { ) -> ProjectResult<Self> {
let consts = collect_consts(&tree, i);
let rules = collect_rules(&tree);
let repo = match rule::Repo::new(rules, i) {
Ok(r) => r,
Err((rule, error)) => {
return Err(error.to_project_error(&rule));
},
};
Ok(Self { Ok(Self {
repo, repo,
consts: (consts.into_iter()) consts: (consts.into_iter())
.map(|(name, expr)| { .map(|(name, expr)| {
// Figure out the location of the constant let (ent, _) = (tree.0)
let location = (name.split_last()) .walk1_ref(&[], &name.split_last().1[..], |_| true)
.and_then(|(_, path)| { .expect("path sourced from symbol names");
let origin = (tree.0) let location = (ent.x.locations.first().cloned())
.walk_ref(&[], path, false) .unwrap_or_else(|| CodeLocation::Source(expr.value.range.clone()));
.unwrap_or_else(|_| panic!("path sourced from symbol names"));
(origin.extra.file.as_ref()).cloned()
})
.map(|p| Location::File(Arc::new(p)))
.unwrap_or(Location::Unknown);
(name, (expr, location)) (name, (expr, location))
}) })
.collect(), .collect(),
i,
systems, systems,
}) })
} }
/// Run all macros to termination or the optional timeout. If a timeout does /// Run all macros to termination or the optional timeout. If a timeout does
/// not occur, returns a process which can execute Orchid code /// not occur, returns a process which can execute Orchid code
pub fn build_process( pub fn run_macros(
self, self,
timeout: Option<usize>, timeout: Option<usize>,
) -> ProjectResult<Process<'a>> { ) -> ProjectResult<Process<'a>> {
let Self { i, systems, repo, consts } = self; let Self { systems, repo, consts } = self;
let mut symbols = HashMap::new(); for sys in systems.iter() {
for (name, (source, source_location)) in consts.iter() { let const_module = sys.constants.unwrap_mod_ref();
let unmatched = if let Some(limit) = timeout { let _ = const_module.search_all((), &mut |path, module, ()| {
let (unmatched, steps_left) = repo.long_step(source, limit + 1); for (key, ent) in &module.entries {
if steps_left == 0 { if let ModMember::Item(c) = &ent.member {
return Err( let path = VPath::new(path.unreverse()).as_prefix_of(key.clone());
MacroTimeout { let cginfo = CodeGenInfo::details(
location: source_location.clone(), "constant from",
symbol: name.clone(), format!("system.name={}", sys.name),
limit, );
} symbols
.rc(), .insert(path.to_sym(), c.gen_nort(CodeLocation::Gen(cginfo)));
); }
} else {
unmatched
} }
} else { Ok::<(), Never>(())
repo.pass(source).unwrap_or_else(|| source.clone()) });
};
let runtree =
ast_to_interpreted(&unmatched, name.clone()).map_err(|e| e.rc())?;
symbols.insert(name.clone(), runtree);
} }
Ok(Process { Ok(Process {
symbols, symbols,
i,
handlers: (systems.into_iter()) handlers: (systems.into_iter())
.fold(HandlerTable::new(), |tbl, sys| tbl.combine(sys.handlers)), .fold(HandlerTable::new(), |tbl, sys| tbl.combine(sys.handlers)),
}) })
@@ -100,8 +82,9 @@ impl<'a> PreMacro<'a> {
/// Obtain an iterator that steps through the preprocessing of a constant /// Obtain an iterator that steps through the preprocessing of a constant
/// for debugging macros /// for debugging macros
pub fn step(&self, sym: Sym) -> impl Iterator<Item = ast::Expr<Sym>> + '_ { pub fn step(&self, sym: Sym) -> impl Iterator<Item = parsed::Expr> + '_ {
let mut target = self.consts.get(&sym).expect("Target not found").0.clone(); let mut target =
self.consts.get(&sym).expect("Target not found").0.value.clone();
iter::from_fn(move || { iter::from_fn(move || {
target = self.repo.step(&target)?; target = self.repo.step(&target)?;
Some(target.clone()) Some(target.clone())
@@ -112,20 +95,17 @@ impl<'a> PreMacro<'a> {
/// Error raised when a macro runs too long /// Error raised when a macro runs too long
#[derive(Debug)] #[derive(Debug)]
pub struct MacroTimeout { pub struct MacroTimeout {
location: Location, location: CodeLocation,
symbol: Sym, symbol: Sym,
limit: usize, limit: usize,
} }
impl ProjectError for MacroTimeout { impl ProjectError for MacroTimeout {
fn description(&self) -> &str { "Macro execution has not halted" } const DESCRIPTION: &'static str = "Macro execution has not halted";
fn message(&self) -> String { fn message(&self) -> String {
format!( let Self { symbol, limit, .. } = self;
"Macro execution during the processing of {} took more than {} steps", format!("Macro processing in {symbol} took more than {limit} steps")
self.symbol.extern_vec().join("::"),
self.limit
)
} }
fn one_position(&self) -> Location { self.location.clone() } fn one_position(&self) -> CodeLocation { self.location.clone() }
} }

View File

@@ -1,23 +1,32 @@
use hashbrown::HashMap; use hashbrown::HashMap;
use itertools::Itertools; use itertools::Itertools;
use super::merge_trees::NortConst;
use crate::error::{ErrorPosition, ProjectError, ProjectResult}; use crate::error::{ErrorPosition, ProjectError, ProjectResult};
use crate::interpreted::{self, ExprInst}; use crate::interpreter::context::{Halt, RunContext};
#[allow(unused)] // for doc use crate::interpreter::error::RunError;
use crate::interpreter; use crate::interpreter::handler::{run_handler, HandlerTable};
use crate::interpreter::{ use crate::interpreter::nort::{Clause, Expr};
run_handler, Context, HandlerTable, Return, RuntimeError, use crate::location::CodeLocation;
}; use crate::name::Sym;
use crate::{Interner, Location, Sym}; use crate::utils::boxed_iter::BoxedIter;
/// This struct ties the state of systems to loaded code, and allows to call /// This struct ties the state of systems to loaded code, and allows to call
/// Orchid-defined functions /// Orchid-defined functions
pub struct Process<'a> { pub struct Process<'a> {
pub(crate) symbols: HashMap<Sym, ExprInst>, pub(crate) symbols: HashMap<Sym, Expr>,
pub(crate) handlers: HandlerTable<'a>, pub(crate) handlers: HandlerTable<'a>,
pub(crate) i: &'a Interner,
} }
impl<'a> Process<'a> { impl<'a> Process<'a> {
/// Build a process from the return value of [crate::facade::merge_trees] and
pub fn new(
consts: impl IntoIterator<Item = (Sym, NortConst)>,
handlers: HandlerTable<'a>,
) -> Self {
let symbols = consts.into_iter().map(|(k, v)| (k, v.value)).collect();
Self { handlers, symbols }
}
/// Execute the given command in this process. If gas is specified, at most as /// Execute the given command in this process. If gas is specified, at most as
/// many steps will be executed and then the partial result returned. /// many steps will be executed and then the partial result returned.
/// ///
@@ -25,24 +34,23 @@ impl<'a> Process<'a> {
/// yields /// yields
pub fn run( pub fn run(
&mut self, &mut self,
prompt: ExprInst, prompt: Expr,
gas: Option<usize>, gas: Option<usize>,
) -> Result<Return, RuntimeError> { ) -> Result<Halt, RunError> {
let ctx = Context { gas, interner: self.i, symbols: &self.symbols }; let ctx = RunContext { gas, symbols: &self.symbols };
run_handler(prompt, &mut self.handlers, ctx) run_handler(prompt, &mut self.handlers, ctx)
} }
/// Find all unbound constant names in a symbol. This is often useful to /// Find all unbound constant names in a symbol. This is often useful to
/// identify dynamic loading targets. /// identify dynamic loading targets.
#[must_use] #[must_use]
pub fn unbound_refs(&self, key: Sym) -> Vec<(Sym, Location)> { pub fn unbound_refs(&self, key: Sym) -> Vec<(Sym, CodeLocation)> {
let mut errors = Vec::new(); let mut errors = Vec::new();
let sym = self.symbols.get(&key).expect("symbol must exist"); let sym = self.symbols.get(&key).expect("symbol must exist");
sym.search_all(&mut |s: &ExprInst| { sym.search_all(&mut |s: &Expr| {
let expr = s.expr(); if let Clause::Constant(sym) = &*s.clause.cls() {
if let interpreted::Clause::Constant(sym) = &expr.clause {
if !self.symbols.contains_key(sym) { if !self.symbols.contains_key(sym) {
errors.push((sym.clone(), expr.location.clone())) errors.push((sym.clone(), s.location()))
} }
} }
None::<()> None::<()>
@@ -51,8 +59,8 @@ impl<'a> Process<'a> {
} }
/// Assert that the code contains no invalid constants. This ensures that, /// Assert that the code contains no invalid constants. This ensures that,
/// unless [interpreted::Clause::Constant]s are created procedurally, /// unless [Clause::Constant]s are created procedurally,
/// a [interpreter::RuntimeError::MissingSymbol] cannot be produced /// a [crate::interpreter::error::RunError::MissingSymbol] cannot be produced
pub fn validate_refs(&self) -> ProjectResult<()> { pub fn validate_refs(&self) -> ProjectResult<()> {
let mut errors = Vec::new(); let mut errors = Vec::new();
for key in self.symbols.keys() { for key in self.symbols.keys() {
@@ -66,45 +74,39 @@ impl<'a> Process<'a> {
} }
match errors.is_empty() { match errors.is_empty() {
true => Ok(()), true => Ok(()),
false => Err(MissingSymbols { errors }.rc()), false => Err(MissingSymbols { errors }.pack()),
} }
} }
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct MissingSymbol { struct MissingSymbol {
referrer: Sym, referrer: Sym,
location: Location, location: CodeLocation,
symbol: Sym, symbol: Sym,
} }
#[derive(Debug)] #[derive(Debug)]
pub struct MissingSymbols { struct MissingSymbols {
errors: Vec<MissingSymbol>, errors: Vec<MissingSymbol>,
} }
impl ProjectError for MissingSymbols { impl ProjectError for MissingSymbols {
fn description(&self) -> &str { const DESCRIPTION: &'static str = "A name not referring to a known symbol was found in the source after \
"A name not referring to a known symbol was found in the source after \
macro execution. This can either mean that a symbol name was mistyped, or \ macro execution. This can either mean that a symbol name was mistyped, or \
that macro execution didn't correctly halt." that macro execution didn't correctly halt.";
}
fn message(&self) -> String { fn message(&self) -> String {
format!( format!(
"The following symbols do not exist:\n{}", "The following symbols do not exist:\n{}",
(self.errors.iter()) (self.errors.iter())
.map(|e| format!( .map(|MissingSymbol { symbol, referrer, .. }| format!(
"{} referenced in {} ", "{symbol} referenced in {referrer}"
e.symbol.extern_vec().join("::"),
e.referrer.extern_vec().join("::")
)) ))
.join("\n") .join("\n")
) )
} }
fn positions(&self) -> crate::utils::BoxedIter<crate::error::ErrorPosition> { fn positions(&self) -> impl IntoIterator<Item = ErrorPosition> {
Box::new( (self.errors.iter())
(self.errors.clone().into_iter()) .map(|i| ErrorPosition { location: i.location.clone(), message: None })
.map(|i| ErrorPosition { location: i.location, message: None }),
)
} }
} }

View File

@@ -1,59 +1,41 @@
use hashbrown::HashMap;
use crate::error::{ErrorPosition, ProjectError}; use crate::error::{ErrorPosition, ProjectError};
use crate::interpreter::HandlerTable; use crate::gen::tree::ConstTree;
use crate::parse::{LexerPlugin, LineParser}; use crate::interpreter::handler::HandlerTable;
use crate::pipeline::file_loader::{IOResult, Loaded}; use crate::name::VName;
use crate::sourcefile::FileEntry; use crate::parse::lex_plugin::LexerPlugin;
use crate::utils::boxed_iter::box_empty; use crate::parse::parse_plugin::ParseLinePlugin;
use crate::utils::BoxedIter; use crate::pipeline::load_solution::Prelude;
use crate::{ConstTree, Interner, Tok, VName}; use crate::virt_fs::DeclTree;
/// A description of every point where an external library can hook into Orchid. /// A description of every point where an external library can hook into Orchid.
/// Intuitively, this can be thought of as a plugin /// Intuitively, this can be thought of as a plugin
pub struct System<'a> { pub struct System<'a> {
/// An identifier for the system used eg. in error reporting. /// An identifier for the system used eg. in error reporting.
pub name: Vec<String>, pub name: &'a str,
/// External functions and other constant values defined in AST form /// External functions and other constant values defined in AST form
pub constants: HashMap<Tok<String>, ConstTree>, pub constants: ConstTree,
/// Orchid libraries defined by this system /// Orchid libraries defined by this system
pub code: HashMap<VName, Loaded>, pub code: DeclTree,
/// Prelude lines to be added to **subsequent** systems and usercode to /// Prelude lines to be added to the head of files to expose the
/// expose the functionality of this system. The prelude is not added during /// functionality of this system. A glob import from the first path is
/// the loading of this system /// added to every file outside the prefix specified by the second path
pub prelude: Vec<FileEntry>, pub prelude: Vec<Prelude>,
/// Handlers for actions defined in this system /// Handlers for actions defined in this system
pub handlers: HandlerTable<'a>, pub handlers: HandlerTable<'a>,
/// Custom lexer for the source code representation atomic data. /// Custom lexer for the source code representation atomic data.
/// These take priority over builtin lexers so the syntax they /// These take priority over builtin lexers so the syntax they
/// match should be unambiguous /// match should be unambiguous
pub lexer_plugins: Vec<Box<dyn LexerPlugin>>, pub lexer_plugins: Vec<Box<dyn LexerPlugin + 'a>>,
/// Parser that processes custom line types into their representation in the /// Parser that processes custom line types into their representation in the
/// module tree /// module tree
pub line_parsers: Vec<Box<dyn LineParser>>, pub line_parsers: Vec<Box<dyn ParseLinePlugin>>,
} }
impl<'a> System<'a> { impl<'a> System<'a> {
/// Intern the name of the system so that it can be used as an Orchid /// Intern the name of the system so that it can be used as an Orchid
/// namespace /// namespace
#[must_use] #[must_use]
pub fn vname(&self, i: &Interner) -> VName { pub fn vname(&self) -> VName {
self.name.iter().map(|s| i.i(s)).collect::<Vec<_>>() VName::parse(self.name).expect("Systems must have a non-empty name")
}
/// Load a file from the system
pub fn load_file(
&self,
path: &[Tok<String>],
referrer: &[Tok<String>],
) -> IOResult {
(self.code.get(path)).cloned().ok_or_else(|| {
let err = MissingSystemCode {
path: path.to_vec(),
system: self.name.clone(),
referrer: referrer.to_vec(),
};
err.rc()
})
} }
} }
@@ -66,23 +48,22 @@ pub struct MissingSystemCode {
referrer: VName, referrer: VName,
} }
impl ProjectError for MissingSystemCode { impl ProjectError for MissingSystemCode {
fn description(&self) -> &str { const DESCRIPTION: &'static str =
"A system tried to import a path that doesn't exist" "A system tried to import a path that doesn't exist";
}
fn message(&self) -> String { fn message(&self) -> String {
format!( format!(
"Path {} imported by {} is not defined by {} or any system before it", "Path {} imported by {} is not defined by {} or any system before it",
Interner::extern_all(&self.path).join("::"), self.path,
Interner::extern_all(&self.referrer).join("::"), self.referrer,
self.system.join("::") self.system.join("::")
) )
} }
fn positions(&self) -> BoxedIter<ErrorPosition> { box_empty() } fn positions(&self) -> impl IntoIterator<Item = ErrorPosition> { [] }
} }
/// Trait for objects that can be converted into a [System] in the presence /// Trait for objects that can be converted into a [System] in the presence
/// of an [Interner]. /// of an [Interner].
pub trait IntoSystem<'a> { pub trait IntoSystem<'a> {
/// Convert this object into a system using an interner /// Convert this object into a system using an interner
fn into_system(self, i: &Interner) -> System<'a>; fn into_system(self) -> System<'a>;
} }

View File

@@ -1,45 +1,79 @@
use std::any::Any; use std::any::Any;
use std::fmt::Debug; use std::fmt::{Debug, Display};
use std::sync::{Arc, Mutex};
use dyn_clone::DynClone; use never::Never;
use super::XfnResult; use super::error::{ExternError, ExternResult};
use crate::ddispatch::request; use crate::interpreter::apply::CallData;
use crate::error::AssertionError; use crate::interpreter::context::RunContext;
use crate::interpreted::{ExprInst, TryFromExprInst}; use crate::interpreter::error::RunError;
use crate::interpreter::{Context, RuntimeError}; use crate::interpreter::nort;
use crate::representations::interpreted::Clause; use crate::interpreter::run::RunData;
use crate::utils::ddispatch::Responder; use crate::location::{CodeLocation, SourceRange};
use crate::{ast, NameLike}; use crate::name::NameLike;
use crate::parse::parsed;
use crate::utils::ddispatch::{request, Request, Responder};
/// Information returned by [Atomic::run]. This mirrors /// Information returned by [Atomic::run]. This mirrors
/// [crate::interpreter::Return] but with a clause instead of an Expr. /// [crate::interpreter::Return] but with a clause instead of an Expr.
pub struct AtomicReturn { pub struct AtomicReturn {
/// The next form of the expression /// The next form of the expression
pub clause: Clause, pub clause: nort::Clause,
/// Remaining gas /// Remaining gas
pub gas: Option<usize>, pub gas: Option<usize>,
/// Whether further normalization is possible by repeated calls to /// Whether further normalization is possible by repeated calls to
/// [Atomic::run] /// [Atomic::run]
pub inert: bool, pub inert: bool,
} }
impl AtomicReturn {
/// Report indicating that the value is inert
pub fn inert<T: Atomic, E>(this: T, ctx: RunContext) -> Result<Self, E> {
Ok(Self { clause: this.atom_cls(), gas: ctx.gas, inert: true })
}
/// Report indicating that the value has been processed
pub fn run<E>(clause: nort::Clause, run: RunData) -> Result<Self, E> {
Ok(Self { clause, gas: run.ctx.gas, inert: false })
}
}
/// Returned by [Atomic::run] /// Returned by [Atomic::run]
pub type AtomicResult = Result<AtomicReturn, RuntimeError>; pub type AtomicResult = Result<AtomicReturn, RunError>;
/// Trait for things that are _definitely_ equal. /// General error produced when a non-function [Atom] is applied to something as
pub trait StrictEq { /// a function.
/// must return true if the objects were produced via the exact same sequence #[derive(Clone)]
/// of transformations, including any relevant context data. Must return false pub struct NotAFunction(pub nort::Expr);
/// if the objects are of different type, or if their type is [PartialEq] impl ExternError for NotAFunction {}
/// and [PartialEq::eq] returns false. impl Display for NotAFunction {
fn strict_eq(&self, other: &dyn Any) -> bool; fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?} is not a function", self.0)
}
} }
/// Functionality the interpreter needs to handle a value /// Functionality the interpreter needs to handle a value
pub trait Atomic: Any + Debug + DynClone + StrictEq + Responder + Send ///
where /// # Lifecycle methods
Self: 'static, ///
/// Atomics expose the methods [Atomic::redirect], [Atomic::run],
/// [Atomic::apply] and [Atomic::apply_ref] to interact with the interpreter.
/// The interpreter first tries to call `redirect` to find a subexpression to
/// normalize. If it returns `None` or the subexpression is inert, `run` is
/// called. `run` takes ownership of the value and returns a new one.
///
/// If `run` indicated in its return value that the result is inert and the atom
/// is in the position of a function, `apply` or `apply_ref` is called depending
/// upon whether the atom is referenced elsewhere. `apply` falls back to
/// `apply_ref` so implementing it is considered an optimization to avoid
/// excessive copying.
///
/// Atoms don't generally have to be copyable because clauses are refcounted in
/// the interpreter, but Orchid code is always free to duplicate the references
/// and apply them as functions to multiple different arguments so atoms that
/// represent functions have to support application by-ref without consuming the
/// function itself.
pub trait Atomic: Any + Debug + Responder + Send
where Self: 'static
{ {
/// Casts this value to [Any] so that its original value can be salvaged /// Casts this value to [Any] so that its original value can be salvaged
/// during introspection by other external code. /// during introspection by other external code.
@@ -55,45 +89,90 @@ where
#[must_use] #[must_use]
fn as_any_ref(&self) -> &dyn Any; fn as_any_ref(&self) -> &dyn Any;
/// Returns a reference to a possible expression held inside the atom which
/// can be reduced. For an overview of the lifecycle see [Atomic]
fn redirect(&mut self) -> Option<&mut nort::ClauseInst>;
/// Attempt to normalize this value. If it wraps a value, this should report /// Attempt to normalize this value. If it wraps a value, this should report
/// inert. If it wraps a computation, it should execute one logical step of /// inert. If it wraps a computation, it should execute one logical step of
/// the computation and return a structure representing the ntext. /// the computation and return a structure representing the next.
fn run(self: Box<Self>, ctx: Context) -> AtomicResult; ///
/// For an overview of the lifecycle see [Atomic]
fn run(self: Box<Self>, run: RunData) -> AtomicResult;
/// Combine the function with an argument to produce a new clause. Falls back
/// to [Atomic::apply_ref] by default.
///
/// For an overview of the lifecycle see [Atomic]
fn apply(self: Box<Self>, call: CallData) -> ExternResult<nort::Clause> {
self.apply_ref(call)
}
/// Combine the function with an argument to produce a new clause
///
/// For an overview of the lifecycle see [Atomic]
fn apply_ref(&self, call: CallData) -> ExternResult<nort::Clause>;
/// Must return true for atoms parsed from identical source.
/// If the atom cannot be parsed from source, it can safely be ignored
#[allow(unused_variables)]
fn parser_eq(&self, other: &dyn Any) -> bool { false }
/// Wrap the atom in a clause to be placed in an [AtomicResult]. /// Wrap the atom in a clause to be placed in an [AtomicResult].
#[must_use] #[must_use]
fn atom_cls(self) -> Clause fn atom_cls(self) -> nort::Clause
where where Self: Sized {
Self: Sized, nort::Clause::Atom(Atom(Box::new(self)))
{ }
Clause::Atom(Atom(Box::new(self)))
/// Shorthand for `self.atom_cls().to_inst()`
fn atom_clsi(self) -> nort::ClauseInst
where Self: Sized {
self.atom_cls().to_inst()
} }
/// Wrap the atom in a new expression instance to be placed in a tree /// Wrap the atom in a new expression instance to be placed in a tree
#[must_use] #[must_use]
fn atom_exi(self) -> ExprInst fn atom_expr(self, location: CodeLocation) -> nort::Expr
where where Self: Sized {
Self: Sized, self.atom_clsi().to_expr(location)
{
self.atom_cls().wrap()
} }
/// Wrap the atom in a clause to be placed in a [sourcefile::FileEntry]. /// Wrap the atom in a clause to be placed in a [sourcefile::FileEntry].
#[must_use] #[must_use]
fn ast_cls<N: NameLike>(self) -> ast::Clause<N> fn ast_cls(self) -> parsed::Clause
where where Self: Sized + Clone {
Self: Sized, parsed::Clause::Atom(AtomGenerator::cloner(self))
{
ast::Clause::Atom(Atom::new(self))
} }
/// Wrap the atom in an expression to be placed in a [sourcefile::FileEntry]. /// Wrap the atom in an expression to be placed in a [sourcefile::FileEntry].
#[must_use] #[must_use]
fn ast_exp<N: NameLike>(self) -> ast::Expr<N> fn ast_exp<N: NameLike>(self, range: SourceRange) -> parsed::Expr
where where Self: Sized + Clone {
Self: Sized, self.ast_cls().into_expr(range)
{ }
self.ast_cls().into_expr() }
/// A struct for generating any number of [Atom]s. Since atoms aren't Clone,
/// this represents the ability to create any number of instances of an atom
#[derive(Clone)]
pub struct AtomGenerator(Arc<dyn Fn() -> Atom + Send + Sync>);
impl AtomGenerator {
/// Use a factory function to create any number of atoms
pub fn new(f: impl Fn() -> Atom + Send + Sync + 'static) -> Self {
Self(Arc::new(f))
}
/// Clone a representative atom when called
pub fn cloner(atom: impl Atomic + Clone) -> Self {
let lock = Mutex::new(atom);
Self::new(move || Atom::new(lock.lock().unwrap().clone()))
}
/// Generate an atom
pub fn run(&self) -> Atom { self.0() }
}
impl Debug for AtomGenerator {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("AtomGenerator").finish_non_exhaustive()
} }
} }
@@ -123,7 +202,7 @@ impl Atom {
*self.0.as_any().downcast().expect("Type mismatch on Atom::cast") *self.0.as_any().downcast().expect("Type mismatch on Atom::cast")
} }
/// Normalize the contained data /// Normalize the contained data
pub fn run(self, ctx: Context) -> AtomicResult { self.0.run(ctx) } pub fn run(self, run: RunData) -> AtomicResult { self.0.run(run) }
/// Request a delegate from the encapsulated data /// Request a delegate from the encapsulated data
pub fn request<T: 'static>(&self) -> Option<T> { request(self.0.as_ref()) } pub fn request<T: 'static>(&self) -> Option<T> { request(self.0.as_ref()) }
/// Downcast the atom to a concrete atomic type, or return the original atom /// Downcast the atom to a concrete atomic type, or return the original atom
@@ -134,10 +213,18 @@ impl Atom {
false => Err(self), false => Err(self),
} }
} }
} /// Downcast an atom by reference
pub fn downcast_ref<T: Atomic>(&self) -> Option<&T> {
impl Clone for Atom { self.0.as_any_ref().downcast_ref()
fn clone(&self) -> Self { Self(dyn_clone::clone_box(self.data())) } }
/// Combine the function with an argument to produce a new clause
pub fn apply(self, call: CallData) -> ExternResult<nort::Clause> {
self.0.apply(call)
}
/// Combine the function with an argument to produce a new clause
pub fn apply_ref(&self, call: CallData) -> ExternResult<nort::Clause> {
self.0.apply_ref(call)
}
} }
impl Debug for Atom { impl Debug for Atom {
@@ -146,12 +233,15 @@ impl Debug for Atom {
} }
} }
impl TryFromExprInst for Atom { impl Responder for Never {
fn from_exi(exi: ExprInst) -> XfnResult<Self> { fn respond(&self, _request: Request) { match *self {} }
let loc = exi.location(); }
match exi.expr_val().clause { impl Atomic for Never {
Clause::Atom(a) => Ok(a), fn as_any(self: Box<Self>) -> Box<dyn Any> { match *self {} }
_ => AssertionError::fail(loc, "atom"), fn as_any_ref(&self) -> &dyn Any { match *self {} }
} fn redirect(&mut self) -> Option<&mut nort::ClauseInst> { match *self {} }
fn run(self: Box<Self>, _: RunData) -> AtomicResult { match *self {} }
fn apply_ref(&self, _: CallData) -> ExternResult<nort::Clause> {
match *self {}
} }
} }

View File

@@ -4,99 +4,97 @@ use std::fmt::Debug;
use trait_set::trait_set; use trait_set::trait_set;
use super::{Atomic, ExternFn, InertAtomic, XfnResult}; use super::atom::{Atomic, AtomicResult, AtomicReturn, NotAFunction};
use crate::interpreted::{Clause, ExprInst}; use super::error::{ExternError, ExternResult};
use crate::interpreter::{Context, HandlerRes}; use crate::interpreter::apply::CallData;
use crate::interpreter::nort::{Clause, ClauseInst, Expr};
use crate::interpreter::run::RunData;
use crate::location::CodeLocation;
use crate::utils::ddispatch::{Request, Responder};
use crate::utils::pure_seq::pushed_ref; use crate::utils::pure_seq::pushed_ref;
use crate::ConstTree;
trait_set! { trait_set! {
/// A "well behaved" type that can be used as payload in a CPS box /// A "well behaved" type that can be used as payload in a CPS box
pub trait CPSPayload = Clone + Debug + Send + 'static; pub trait CPSPayload = Clone + Debug + Send + 'static;
/// A function to handle a CPS box with a specific payload /// A function to handle a CPS box with a specific payload
pub trait CPSHandler<T: CPSPayload> = FnMut(&T, &ExprInst) -> HandlerRes; pub trait CPSHandler<T: CPSPayload> = FnMut(&T, &Expr) -> ExternResult<Expr>;
} }
/// The pre-argument version of CPSBox /// An Orchid Atom value encapsulating a payload and continuation points
#[derive(Debug, Clone)]
struct CPSFn<T: CPSPayload> {
pub argc: usize,
pub continuations: Vec<ExprInst>,
pub payload: T,
}
impl<T: CPSPayload> CPSFn<T> {
#[must_use]
fn new(argc: usize, payload: T) -> Self {
debug_assert!(
argc > 0,
"Null-ary CPS functions are invalid, use an Atom instead"
);
Self { argc, continuations: Vec::new(), payload }
}
}
impl<T: CPSPayload> ExternFn for CPSFn<T> {
fn name(&self) -> &str { "CPS function without argument" }
fn apply(self: Box<Self>, arg: ExprInst, _ctx: Context) -> XfnResult<Clause> {
let payload = self.payload.clone();
let continuations = pushed_ref(&self.continuations, arg);
if self.argc == 1 {
Ok(CPSBox { payload, continuations }.atom_cls())
} else {
Ok(CPSFn { argc: self.argc - 1, payload, continuations }.xfn_cls())
}
}
}
/// An inert Orchid Atom value encapsulating a payload and a continuation point
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct CPSBox<T: CPSPayload> { pub struct CPSBox<T: CPSPayload> {
/// Number of arguments not provided yet
pub argc: usize,
/// Details about the command /// Details about the command
pub payload: T, pub payload: T,
/// Possible continuations, in the order they were provided /// Possible continuations, in the order they were provided
pub continuations: Vec<ExprInst>, pub continuations: Vec<Expr>,
} }
impl<T: CPSPayload> CPSBox<T> { impl<T: CPSPayload> CPSBox<T> {
/// Create a new command prepared to receive exacly N continuations
#[must_use]
pub fn new(argc: usize, payload: T) -> Self {
debug_assert!(argc > 0, "Null-ary CPS functions are invalid");
Self { argc, continuations: Vec::new(), payload }
}
/// Unpack the wrapped command and the continuation /// Unpack the wrapped command and the continuation
#[must_use] #[must_use]
pub fn unpack1(self) -> (T, ExprInst) { pub fn unpack1(&self) -> (&T, Expr) {
let [cont]: [ExprInst; 1] = match &self.continuations[..] {
self.continuations.try_into().expect("size checked"); [cont] => (&self.payload, cont.clone()),
(self.payload, cont) _ => panic!("size mismatch"),
}
} }
/// Unpack the wrapped command and 2 continuations (usually an async and a /// Unpack the wrapped command and 2 continuations (usually an async and a
/// sync) /// sync)
#[must_use] #[must_use]
pub fn unpack2(self) -> (T, ExprInst, ExprInst) { pub fn unpack2(&self) -> (&T, Expr, Expr) {
let [c1, c2]: [ExprInst; 2] = match &self.continuations[..] {
self.continuations.try_into().expect("size checked"); [c1, c2] => (&self.payload, c1.clone(), c2.clone()),
(self.payload, c1, c2) _ => panic!("size mismatch"),
}
} }
/// Unpack the wrapped command and 3 continuations (usually an async success, /// Unpack the wrapped command and 3 continuations (usually an async success,
/// an async fail and a sync) /// an async fail and a sync)
#[must_use] #[must_use]
pub fn unpack3(self) -> (T, ExprInst, ExprInst, ExprInst) { pub fn unpack3(&self) -> (&T, Expr, Expr, Expr) {
let [c1, c2, c3]: [ExprInst; 3] = match &self.continuations[..] {
self.continuations.try_into().expect("size checked"); [c1, c2, c3] => (&self.payload, c1.clone(), c2.clone(), c3.clone()),
(self.payload, c1, c2, c3) _ => panic!("size mismatch"),
}
}
fn assert_applicable(&self, err_loc: &CodeLocation) -> ExternResult<()> {
match self.argc {
0 => Err(NotAFunction(self.clone().atom_expr(err_loc.clone())).rc()),
_ => Ok(()),
}
} }
} }
impl<T: CPSPayload> Responder for CPSBox<T> {
impl<T: CPSPayload> InertAtomic for CPSBox<T> { fn respond(&self, _request: Request) {}
fn type_str() -> &'static str { "a CPS box" }
} }
impl<T: CPSPayload> Atomic for CPSBox<T> {
/// Like [init_cps] but wrapped in a [ConstTree] for init-time usage fn as_any(self: Box<Self>) -> Box<dyn std::any::Any> { self }
#[must_use] fn as_any_ref(&self) -> &dyn std::any::Any { self }
pub fn const_cps<T: CPSPayload>(argc: usize, payload: T) -> ConstTree { fn parser_eq(&self, _: &dyn std::any::Any) -> bool { false }
ConstTree::xfn(CPSFn::new(argc, payload)) fn redirect(&mut self) -> Option<&mut ClauseInst> { None }
} fn run(self: Box<Self>, run: RunData) -> AtomicResult {
AtomicReturn::inert(*self, run.ctx)
/// Construct a CPS function which takes an argument and then acts inert }
/// so that command executors can receive it. fn apply(mut self: Box<Self>, call: CallData) -> ExternResult<Clause> {
/// self.assert_applicable(&call.location)?;
/// This function is meant to be used in an external function defined with self.argc -= 1;
/// [crate::define_fn]. For usage in a [ConstTree], see [mk_const] self.continuations.push(call.arg);
#[must_use] Ok(self.atom_cls())
pub fn init_cps<T: CPSPayload>(argc: usize, payload: T) -> Clause { }
CPSFn::new(argc, payload).xfn_cls() fn apply_ref(&self, call: CallData) -> ExternResult<Clause> {
self.assert_applicable(&call.location)?;
let new = Self {
argc: self.argc - 1,
continuations: pushed_ref(&self.continuations, call.arg),
payload: self.payload.clone(),
};
Ok(new.atom_cls())
}
} }

68
src/foreign/error.rs Normal file
View File

@@ -0,0 +1,68 @@
use std::error::Error;
use std::fmt::{Debug, Display};
use std::sync::Arc;
use dyn_clone::DynClone;
use crate::location::CodeLocation;
/// Errors produced by external code
pub trait ExternError: Display + Send + Sync + DynClone {
/// Convert into trait object
#[must_use]
fn rc(self) -> Arc<dyn ExternError>
where Self: 'static + Sized {
Arc::new(self)
}
}
impl Debug for dyn ExternError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ExternError({self})")
}
}
impl Error for dyn ExternError {}
/// An error produced by Rust code called form Orchid. The error is type-erased.
pub type ExternResult<T> = Result<T, Arc<dyn ExternError>>;
/// Some expectation (usually about the argument types of a function) did not
/// hold.
#[derive(Clone)]
pub struct AssertionError {
location: CodeLocation,
message: &'static str,
details: String,
}
impl AssertionError {
/// Construct, upcast and wrap in a Result that never succeeds for easy
/// short-circuiting
pub fn fail<T>(
location: CodeLocation,
message: &'static str,
details: String,
) -> ExternResult<T> {
Err(Self::ext(location, message, details))
}
/// Construct and upcast to [ExternError]
pub fn ext(
location: CodeLocation,
message: &'static str,
details: String,
) -> Arc<dyn ExternError> {
Self { location, message, details }.rc()
}
}
impl Display for AssertionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Error: expected {}", self.message)?;
write!(f, " at {}", self.location)?;
write!(f, " details: {}", self.details)
}
}
impl ExternError for AssertionError {}

View File

@@ -1,94 +0,0 @@
use std::error::Error;
use std::fmt::{Debug, Display};
use std::hash::Hash;
use std::sync::Arc;
use dyn_clone::{clone_box, DynClone};
use super::XfnResult;
use crate::interpreted::ExprInst;
use crate::interpreter::Context;
use crate::representations::interpreted::Clause;
use crate::{ast, NameLike};
/// Errors produced by external code
pub trait ExternError: Display + Send + Sync + DynClone {
/// Convert into trait object
#[must_use]
fn into_extern(self) -> Arc<dyn ExternError>
where
Self: 'static + Sized,
{
Arc::new(self)
}
}
impl Debug for dyn ExternError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{self}")
}
}
impl Error for dyn ExternError {}
/// Represents an externally defined function from the perspective of
/// the executor. Since Orchid lacks basic numerical operations,
/// these are also external functions.
pub trait ExternFn: DynClone + Send {
/// Display name of the function
#[must_use]
fn name(&self) -> &str;
/// Combine the function with an argument to produce a new clause
fn apply(self: Box<Self>, arg: ExprInst, ctx: Context) -> XfnResult<Clause>;
/// Hash the name to get a somewhat unique hash.
fn hash(&self, mut state: &mut dyn std::hash::Hasher) {
self.name().hash(&mut state)
}
/// Wrap this function in a clause to be placed in an [AtomicResult].
#[must_use]
fn xfn_cls(self) -> Clause
where
Self: Sized + 'static,
{
Clause::ExternFn(ExFn(Box::new(self)))
}
/// Wrap this function in a clause to be placed in a [FileEntry].
#[must_use]
fn xfn_ast_cls<N: NameLike>(self) -> ast::Clause<N>
where
Self: Sized + 'static,
{
ast::Clause::ExternFn(ExFn(Box::new(self)))
}
}
impl Eq for dyn ExternFn {}
impl PartialEq for dyn ExternFn {
fn eq(&self, other: &Self) -> bool { self.name() == other.name() }
}
impl Hash for dyn ExternFn {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.name().hash(state)
}
}
impl Debug for dyn ExternFn {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "##EXTERN[{}]##", self.name())
}
}
/// Represents a black box function that can be applied to a [Clause] to produce
/// a new [Clause], typically an [Atom] representing external work, a new [ExFn]
/// to take additional arguments, or an Orchid tree to return control to the
/// interpreter
#[derive(Debug)]
pub struct ExFn(pub Box<dyn ExternFn + 'static>);
impl ExFn {
/// Combine the function with an argument to produce a new clause
pub fn apply(self, arg: ExprInst, ctx: Context) -> XfnResult<Clause> {
self.0.apply(arg, ctx)
}
}
impl Clone for ExFn {
fn clone(&self) -> Self { Self(clone_box(self.0.as_ref())) }
}

View File

@@ -1,47 +1,16 @@
use std::any::{Any, TypeId};
use std::fmt::Debug; use std::fmt::Debug;
use std::marker::PhantomData; use std::marker::PhantomData;
use std::sync::Arc;
use super::atom::StrictEq; use super::atom::{Atomic, AtomicResult, AtomicReturn};
use super::{ use super::error::ExternResult;
Atomic, AtomicResult, AtomicReturn, ExternError, ExternFn, XfnResult, use super::to_clause::ToClause;
}; use super::try_from_expr::TryFromExpr;
use crate::ddispatch::Responder; use crate::interpreter::apply::CallData;
use crate::interpreted::{Clause, ExprInst, TryFromExprInst}; use crate::interpreter::context::Halt;
use crate::interpreter::{run, Context, Return}; use crate::interpreter::nort::{Clause, ClauseInst, Expr};
use crate::systems::codegen::{opt, res}; use crate::interpreter::run::{run, RunData};
use crate::OrcString; use crate::utils::ddispatch::Responder;
/// A trait for things that are infallibly convertible to [Clause]. These types
/// can be returned by callbacks passed to the [super::xfn_1ary] family of
/// functions.
pub trait ToClause: Clone {
/// Convert the type to a [Clause].
fn to_clause(self) -> Clause;
/// Convert to an expression instance via [ToClause].
fn to_exi(self) -> ExprInst { self.to_clause().wrap() }
}
impl<T: Atomic + Clone> ToClause for T {
fn to_clause(self) -> Clause { self.atom_cls() }
}
impl ToClause for Clause {
fn to_clause(self) -> Clause { self }
}
impl ToClause for ExprInst {
fn to_clause(self) -> Clause { self.expr_val().clause }
}
impl ToClause for String {
fn to_clause(self) -> Clause { OrcString::from(self).atom_cls() }
}
impl<T: ToClause> ToClause for Option<T> {
fn to_clause(self) -> Clause { opt(self.map(|t| t.to_clause().wrap())) }
}
impl<T: ToClause, U: ToClause> ToClause for Result<T, U> {
fn to_clause(self) -> Clause {
res(self.map(|t| t.to_clause().wrap()).map_err(|u| u.to_clause().wrap()))
}
}
/// Return a unary lambda wrapped in this struct to take an additional argument /// Return a unary lambda wrapped in this struct to take an additional argument
/// in a function passed to Orchid through a member of the [super::xfn_1ary] /// in a function passed to Orchid through a member of the [super::xfn_1ary]
@@ -51,6 +20,11 @@ impl<T: ToClause, U: ToClause> ToClause for Result<T, U> {
/// type. Rust functions are never overloaded, but inexplicably the [Fn] traits /// type. Rust functions are never overloaded, but inexplicably the [Fn] traits
/// take the argument tuple as a generic parameter which means that it cannot /// take the argument tuple as a generic parameter which means that it cannot
/// be a unique dispatch target. /// be a unique dispatch target.
///
/// If the function takes an instance of [Lazy], it will contain the expression
/// the function was applied to without any specific normalization. If it takes
/// any other type, the argument will be fully normalized and cast using the
/// type's [TryFromExpr] impl.
pub struct Param<T, U, F> { pub struct Param<T, U, F> {
data: F, data: F,
_t: PhantomData<T>, _t: PhantomData<T>,
@@ -60,9 +34,7 @@ unsafe impl<T, U, F: Send> Send for Param<T, U, F> {}
impl<T, U, F> Param<T, U, F> { impl<T, U, F> Param<T, U, F> {
/// Wrap a new function in a parametric struct /// Wrap a new function in a parametric struct
pub fn new(f: F) -> Self pub fn new(f: F) -> Self
where where F: FnOnce(T) -> U {
F: FnOnce(T) -> Result<U, Arc<dyn ExternError>>,
{
Self { data: f, _t: PhantomData, _u: PhantomData } Self { data: f, _t: PhantomData, _u: PhantomData }
} }
/// Take out the function /// Take out the function
@@ -74,75 +46,91 @@ impl<T, U, F: Clone> Clone for Param<T, U, F> {
} }
} }
impl< /// A marker struct that gets assigned an expression without normalizing it.
T: 'static + TryFromExprInst, /// This behaviour cannot be replicated in usercode, it's implemented with an
U: 'static + ToClause, /// explicit runtime [TypeId] check invoked by [Param].
F: 'static + Clone + Send + FnOnce(T) -> Result<U, Arc<dyn ExternError>>, #[derive(Debug, Clone)]
> ToClause for Param<T, U, F> pub struct Thunk(pub Expr);
{ impl TryFromExpr for Thunk {
fn to_clause(self) -> Clause { self.xfn_cls() } fn from_expr(expr: Expr) -> ExternResult<Self> { Ok(Thunk(expr)) }
} }
struct FnMiddleStage<T, U, F> { struct FnMiddleStage<T, U, F> {
argument: ExprInst, arg: Expr,
f: Param<T, U, F>, f: Param<T, U, F>,
} }
impl<T, U, F> StrictEq for FnMiddleStage<T, U, F> {
fn strict_eq(&self, _other: &dyn std::any::Any) -> bool {
unimplemented!("This should never be able to appear in a pattern")
}
}
impl<T, U, F: Clone> Clone for FnMiddleStage<T, U, F> { impl<T, U, F: Clone> Clone for FnMiddleStage<T, U, F> {
fn clone(&self) -> Self { fn clone(&self) -> Self { Self { arg: self.arg.clone(), f: self.f.clone() } }
Self { argument: self.argument.clone(), f: self.f.clone() }
}
} }
impl<T, U, F> Debug for FnMiddleStage<T, U, F> { impl<T, U, F> Debug for FnMiddleStage<T, U, F> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("FnMiddleStage") f.debug_struct("FnMiddleStage")
.field("argument", &self.argument) .field("argument", &self.arg)
.finish_non_exhaustive() .finish_non_exhaustive()
} }
} }
impl<T, U, F> Responder for FnMiddleStage<T, U, F> {} impl<T, U, F> Responder for FnMiddleStage<T, U, F> {}
impl< impl<
T: 'static + TryFromExprInst, T: 'static + TryFromExpr,
U: 'static + ToClause, U: 'static + ToClause,
F: 'static + Clone + FnOnce(T) -> Result<U, Arc<dyn ExternError>> + Send, F: 'static + Clone + FnOnce(T) -> U + Any + Send,
> Atomic for FnMiddleStage<T, U, F> > Atomic for FnMiddleStage<T, U, F>
{ {
fn as_any(self: Box<Self>) -> Box<dyn std::any::Any> { self } fn as_any(self: Box<Self>) -> Box<dyn std::any::Any> { self }
fn as_any_ref(&self) -> &dyn std::any::Any { self } fn as_any_ref(&self) -> &dyn std::any::Any { self }
fn run(self: Box<Self>, ctx: Context) -> AtomicResult { fn redirect(&mut self) -> Option<&mut ClauseInst> {
let Return { gas, inert, state } = run(self.argument, ctx)?; // this should be ctfe'd
let clause = match inert { (TypeId::of::<T>() != TypeId::of::<Thunk>()).then(|| &mut self.arg.clause)
false => state.expr_val().clause, }
true => (self.f.data)(state.downcast()?)?.to_clause(), fn run(self: Box<Self>, r: RunData) -> AtomicResult {
}; let Self { arg, f: Param { data: f, .. } } = *self;
Ok(AtomicReturn { gas, inert: false, clause }) let clause = f(arg.downcast()?).to_clause(r.location);
Ok(AtomicReturn { gas: r.ctx.gas, inert: false, clause })
}
fn apply_ref(&self, _: CallData) -> ExternResult<Clause> {
panic!("Atom should have decayed")
}
}
impl<T, U, F> Responder for Param<T, U, F> {}
impl<T, U, F> Debug for Param<T, U, F> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Param")
} }
} }
impl< impl<
T: 'static + TryFromExprInst, T: 'static + TryFromExpr + Clone,
U: 'static + ToClause, U: 'static + ToClause,
F: 'static + Clone + Send + FnOnce(T) -> Result<U, Arc<dyn ExternError>>, F: 'static + Clone + Send + FnOnce(T) -> U,
> ExternFn for Param<T, U, F> > Atomic for Param<T, U, F>
{ {
fn name(&self) -> &str { "anonymous Rust function" } fn as_any(self: Box<Self>) -> Box<dyn std::any::Any> { self }
fn apply(self: Box<Self>, arg: ExprInst, _: Context) -> XfnResult<Clause> { fn as_any_ref(&self) -> &dyn std::any::Any { self }
Ok(FnMiddleStage { argument: arg, f: *self }.atom_cls()) fn redirect(&mut self) -> Option<&mut ClauseInst> { None }
fn run(self: Box<Self>, r: RunData) -> AtomicResult {
AtomicReturn::inert(*self, r.ctx)
}
fn apply_ref(&self, call: CallData) -> ExternResult<Clause> {
Ok(FnMiddleStage { arg: call.arg, f: self.clone() }.atom_cls())
}
fn apply(self: Box<Self>, call: CallData) -> ExternResult<Clause> {
Ok(FnMiddleStage { arg: call.arg, f: *self }.atom_cls())
} }
} }
/// Conversion functions from [Fn] traits into [Atomic]. Since Rust's type
/// system allows overloaded [Fn] implementations, we must specify the arity and
/// argument types for this process. Arities are only defined up to 9, but the
/// function can always return another call to `xfn_`N`ary` to consume more
/// arguments.
pub mod constructors { pub mod constructors {
use super::super::atom::Atomic;
use super::super::try_from_expr::TryFromExpr;
use std::sync::Arc; #[allow(unused)] // for doc
use super::Thunk;
use super::{Param, ToClause}; use super::{Param, ToClause};
use crate::foreign::{ExternError, ExternFn};
use crate::interpreted::TryFromExprInst;
macro_rules! xfn_variant { macro_rules! xfn_variant {
( (
@@ -154,18 +142,21 @@ pub mod constructors {
#[doc = "Convert a function of " $number " argument(s) into a curried" #[doc = "Convert a function of " $number " argument(s) into a curried"
" Orchid function. See also Constraints summarized:\n\n" " Orchid function. See also Constraints summarized:\n\n"
"- the callback must live as long as `'static`\n" "- the callback must live as long as `'static`\n"
"- All arguments must implement [TryFromExprInst]\n" "- All arguments must implement [TryFromExpr]\n"
"- all but the last argument must implement [Clone] and [Send]\n" "- all but the last argument must implement [Clone] and [Send]\n"
"- the return type must implement [ToClause].\n\n" "- the return type must implement [ToClause].\n\n"
] ]
#[doc = "Take [Lazy] to take the argument as-is,\n"
"without normalization\n\n"
]
#[doc = "Other arities: " $( "[xfn_" $alt "ary], " )+ ] #[doc = "Other arities: " $( "[xfn_" $alt "ary], " )+ ]
pub fn [< xfn_ $number ary >] < pub fn [< xfn_ $number ary >] <
$( $t : TryFromExprInst + Clone + Send + 'static, )* $( $t : TryFromExpr + Clone + Send + 'static, )*
TLast: TryFromExprInst + 'static, TLast: TryFromExpr + Clone + 'static,
TReturn: ToClause + Send + 'static, TReturn: ToClause + Send + 'static,
TFunction: FnOnce( $( $t , )* TLast ) TFunction: FnOnce( $( $t , )* TLast )
-> Result<TReturn, Arc<dyn ExternError>> + Clone + Send + 'static -> TReturn + Clone + Send + 'static
>(function: TFunction) -> impl ExternFn { >(function: TFunction) -> impl Atomic + Clone {
xfn_variant!(@BODY_LOOP function xfn_variant!(@BODY_LOOP function
( $( ( $t [< $t:lower >] ) )* ) ( $( ( $t [< $t:lower >] ) )* )
( $( [< $t:lower >] )* ) ( $( [< $t:lower >] )* )
@@ -178,7 +169,7 @@ pub mod constructors {
$( ( $T:ident $t:ident ) )* $( ( $T:ident $t:ident ) )*
) $full:tt) => { ) $full:tt) => {
Param::new(|$next : $Next| { Param::new(|$next : $Next| {
Ok(xfn_variant!(@BODY_LOOP $function ( $( ( $T $t ) )* ) $full)) xfn_variant!(@BODY_LOOP $function ( $( ( $T $t ) )* ) $full)
}) })
}; };
(@BODY_LOOP $function:ident () ( $( $t:ident )* )) => { (@BODY_LOOP $function:ident () ( $( $t:ident )* )) => {

View File

@@ -0,0 +1,172 @@
use std::any::Any;
use std::fmt::Debug;
use std::sync::Arc;
use super::atom::{Atom, Atomic, AtomicResult};
use super::error::{ExternError, ExternResult};
use super::process::Unstable;
use super::to_clause::ToClause;
use crate::gen::tpl;
use crate::gen::traits::Gen;
use crate::interpreter::apply::CallData;
use crate::interpreter::error::RunError;
use crate::interpreter::gen_nort::nort_gen;
use crate::interpreter::nort::{Clause, ClauseInst};
use crate::interpreter::run::RunData;
use crate::location::CodeLocation;
use crate::utils::clonable_iter::Clonable;
use crate::utils::ddispatch::Responder;
impl<T: ToClause> ToClause for Option<T> {
fn to_clause(self, location: CodeLocation) -> Clause {
let ctx = nort_gen(location.clone());
match self {
None => tpl::C("std::option::none").template(ctx, []),
Some(t) => tpl::A(tpl::C("std::option::some"), tpl::Slot)
.template(ctx, [t.to_clause(location)]),
}
}
}
impl<T: ToClause, U: ToClause> ToClause for Result<T, U> {
fn to_clause(self, location: CodeLocation) -> Clause {
let ctx = nort_gen(location.clone());
match self {
Ok(t) => tpl::A(tpl::C("std::result::ok"), tpl::Slot)
.template(ctx, [t.to_clause(location)]),
Err(e) => tpl::A(tpl::C("std::result::err"), tpl::Slot)
.template(ctx, [e.to_clause(location)]),
}
}
}
struct PendingError(Arc<dyn ExternError>);
impl Responder for PendingError {}
impl Debug for PendingError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "PendingError({})", self.0)
}
}
impl Atomic for PendingError {
fn as_any(self: Box<Self>) -> Box<dyn Any> { self }
fn as_any_ref(&self) -> &dyn Any { self }
fn redirect(&mut self) -> Option<&mut ClauseInst> { None }
fn run(self: Box<Self>, _: RunData) -> AtomicResult {
Err(RunError::Extern(self.0))
}
fn apply_ref(&self, _: CallData) -> ExternResult<Clause> {
panic!("This atom decays instantly")
}
}
impl<T: ToClause> ToClause for ExternResult<T> {
fn to_clause(self, location: CodeLocation) -> Clause {
match self {
Err(e) => PendingError(e).atom_cls(),
Ok(t) => t.to_clause(location),
}
}
}
struct ListGen<I>(Clonable<I>)
where
I: Iterator + Send,
I::Item: ToClause + Send;
impl<I> Clone for ListGen<I>
where
I: Iterator + Send,
I::Item: ToClause + Send,
{
fn clone(&self) -> Self { Self(self.0.clone()) }
}
impl<I> ToClause for ListGen<I>
where
I: Iterator + Send + 'static,
I::Item: ToClause + Clone + Send,
{
fn to_clause(mut self, location: CodeLocation) -> Clause {
let ctx = nort_gen(location.clone());
match self.0.next() {
None => tpl::C("std::lit::end").template(ctx, []),
Some(val) => {
let atom = Unstable::new(|run| self.to_clause(run.location));
tpl::a2(tpl::C("std::lit::cons"), tpl::Slot, tpl::V(atom))
.template(ctx, [val.to_clause(location)])
},
}
}
}
/// Convert an iterator into a lazy-evaluated Orchid list.
pub fn list<I>(items: I) -> impl ToClause
where
I: IntoIterator + Clone + Send + Sync + 'static,
I::IntoIter: Send,
I::Item: ToClause + Clone + Send,
{
Unstable::new(move |RunData { location, .. }| {
ListGen(Clonable::new(
items.clone().into_iter().map(move |t| t.to_clsi(location.clone())),
))
})
}
impl<T: ToClause + Clone + Send + Sync + 'static> ToClause for Vec<T> {
fn to_clause(self, location: CodeLocation) -> Clause {
list(self).to_clause(location)
}
}
impl ToClause for Atom {
fn to_clause(self, _: CodeLocation) -> Clause { Clause::Atom(self) }
}
mod tuple_impls {
use std::sync::Arc;
use super::ToClause;
use crate::foreign::atom::Atomic;
use crate::foreign::error::AssertionError;
use crate::foreign::implementations::ExternResult;
use crate::foreign::inert::Inert;
use crate::foreign::try_from_expr::TryFromExpr;
use crate::interpreter::nort::{Clause, Expr};
use crate::libs::std::tuple::Tuple;
use crate::location::CodeLocation;
macro_rules! gen_tuple_impl {
( ($($T:ident)*) ($($t:ident)*)) => {
impl<$($T: ToClause),*> ToClause for ($($T,)*) {
fn to_clause(self, location: CodeLocation) -> Clause {
let ($($t,)*) = self;
Inert(Tuple(Arc::new(vec![
$($t.to_expr(location.clone()),)*
]))).atom_cls()
}
}
impl<$($T: TryFromExpr),*> TryFromExpr for ($($T,)*) {
fn from_expr(ex: Expr) -> ExternResult<Self> {
let Inert(Tuple(slice)) = ex.clone().downcast()?;
match &slice[..] {
[$($t),*] => Ok(($($t.clone().downcast()?,)*)),
_ => AssertionError::fail(ex.location(), "Tuple length mismatch", format!("{ex}"))
}
}
}
};
}
gen_tuple_impl!((A)(a));
gen_tuple_impl!((A B) (a b));
gen_tuple_impl!((A B C) (a b c));
gen_tuple_impl!((A B C D) (a b c d));
gen_tuple_impl!((A B C D E) (a b c d e));
gen_tuple_impl!((A B C D E F) (a b c d e f));
gen_tuple_impl!((A B C D E F G) (a b c d e f g));
gen_tuple_impl!((A B C D E F G H) (a b c d e f g h));
gen_tuple_impl!((A B C D E F G H I) (a b c d e f g h i));
gen_tuple_impl!((A B C D E F G H I J) (a b c d e f g h i j));
gen_tuple_impl!((A B C D E F G H I J K) (a b c d e f g h i j k));
gen_tuple_impl!((A B C D E F G H I J K L) (a b c d e f g h i j k l));
}

View File

@@ -1,28 +1,30 @@
use std::any::Any; use std::any::Any;
use std::fmt::Debug; use std::fmt::{Debug, Display};
use std::ops::{Deref, DerefMut};
use ordered_float::NotNan; use ordered_float::NotNan;
use super::atom::StrictEq; use super::atom::{Atom, Atomic, AtomicResult, AtomicReturn, NotAFunction};
use super::{AtomicResult, AtomicReturn, XfnResult}; use super::error::{ExternError, ExternResult};
use crate::error::AssertionError; use super::try_from_expr::TryFromExpr;
#[allow(unused)] // for doc use crate::foreign::error::AssertionError;
// use crate::define_fn; use crate::interpreter::apply::CallData;
use crate::foreign::Atomic; use crate::interpreter::nort::{Clause, ClauseInst, Expr};
use crate::interpreted::{Clause, Expr, ExprInst, TryFromExprInst}; use crate::interpreter::run::RunData;
use crate::interpreter::Context; use crate::libs::std::number::Numeric;
use crate::systems::stl::Numeric; use crate::libs::std::string::OrcString;
use crate::utils::ddispatch::{Request, Responder}; use crate::utils::ddispatch::{Request, Responder};
/// A proxy trait that implements [Atomic] for blobs of data in Rust code that /// A proxy trait that implements [Atomic] for blobs of data in Rust code that
/// cannot be processed and always report inert. Since these are expected to be /// cannot be processed and always report inert. Since these are expected to be
/// parameters of functions defined with [define_fn] it also automatically /// parameters of functions defined with [define_fn] it also automatically
/// implements [TryFromExprInst] so that a conversion doesn't have to be /// implements [TryFromExpr] so that a conversion doesn't have to be
/// provided in argument lists. /// provided in argument lists.
pub trait InertAtomic: Debug + Clone + Send + 'static { pub trait InertPayload: Debug + Clone + Send + 'static {
/// Typename to be shown in the error when a conversion from [ExprInst] fails /// Typename to be shown in the error when a conversion from [ExprInst] fails
#[must_use] ///
fn type_str() -> &'static str; /// This will default to `type_name::<Self>()` when it becomes stable
const TYPE_STR: &'static str;
/// Proxies to [Responder] so that you don't have to implmeent it manually if /// Proxies to [Responder] so that you don't have to implmeent it manually if
/// you need it, but behaves exactly as the default implementation. /// you need it, but behaves exactly as the default implementation.
#[allow(unused_mut, unused_variables)] // definition should show likely usage #[allow(unused_mut, unused_variables)] // definition should show likely usage
@@ -37,59 +39,99 @@ pub trait InertAtomic: Debug + Clone + Send + 'static {
/// ``` /// ```
fn strict_eq(&self, _: &Self) -> bool { false } fn strict_eq(&self, _: &Self) -> bool { false }
} }
impl<T: InertAtomic> StrictEq for T {
fn strict_eq(&self, other: &dyn Any) -> bool { /// An atom that stores a value and rejects all interpreter interactions. It is
other.downcast_ref().map_or(false, |other| self.strict_eq(other)) /// used to reference foreign data in Orchid.
} #[derive(Debug, Clone)]
pub struct Inert<T: InertPayload>(pub T);
impl<T: InertPayload> Inert<T> {
/// Wrap the argument in a type-erased [Atom] for embedding in Orchid
/// structures.
pub fn atom(t: T) -> Atom { Atom::new(Inert(t)) }
} }
impl<T: InertAtomic> Responder for T {
impl<T: InertPayload> Deref for Inert<T> {
type Target = T;
fn deref(&self) -> &Self::Target { &self.0 }
}
impl<T: InertPayload> DerefMut for Inert<T> {
fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 }
}
impl<T: InertPayload> Responder for Inert<T> {
fn respond(&self, mut request: Request) { fn respond(&self, mut request: Request) {
if request.can_serve::<T>() { if request.can_serve::<T>() {
request.serve(self.clone()) request.serve(self.0.clone())
} else { } else {
self.respond(request) self.0.respond(request)
} }
} }
} }
impl<T: InertAtomic> Atomic for T { impl<T: InertPayload> Atomic for Inert<T> {
fn as_any(self: Box<Self>) -> Box<dyn Any> { self } fn as_any(self: Box<Self>) -> Box<dyn Any> { self }
fn as_any_ref(&self) -> &dyn Any { self } fn as_any_ref(&self) -> &dyn Any { self }
fn run(self: Box<Self>, ctx: Context) -> AtomicResult { fn redirect(&mut self) -> Option<&mut ClauseInst> { None }
Ok(AtomicReturn { gas: ctx.gas, inert: true, clause: self.atom_cls() }) fn run(self: Box<Self>, run: RunData) -> AtomicResult {
AtomicReturn::inert(*self, run.ctx)
}
fn apply_ref(&self, call: CallData) -> ExternResult<Clause> {
Err(NotAFunction(self.clone().atom_expr(call.location)).rc())
}
fn parser_eq(&self, other: &dyn Any) -> bool {
(other.downcast_ref::<Self>())
.map_or(false, |other| self.0.strict_eq(&other.0))
} }
} }
impl<T: InertAtomic> TryFromExprInst for T { impl<T: InertPayload> TryFromExpr for Inert<T> {
fn from_exi(exi: ExprInst) -> XfnResult<Self> { fn from_expr(expr: Expr) -> ExternResult<Self> {
let Expr { clause, location } = exi.expr_val(); let Expr { clause, location } = expr;
match clause { match clause.try_unwrap() {
Clause::Atom(a) => match a.0.as_any().downcast() { Ok(Clause::Atom(at)) => at.try_downcast::<Self>().map_err(|a| {
Ok(t) => Ok(*t), AssertionError::ext(location, T::TYPE_STR, format!("{a:?}"))
Err(_) => AssertionError::fail(location, Self::type_str()), }),
Err(inst) => match &*inst.cls() {
Clause::Atom(at) =>
at.downcast_ref::<Self>().cloned().ok_or_else(|| {
AssertionError::ext(location, T::TYPE_STR, format!("{inst}"))
}),
cls => AssertionError::fail(location, "atom", format!("{cls}")),
}, },
_ => AssertionError::fail(location, "atom"), Ok(cls) => AssertionError::fail(location, "atom", format!("{cls}")),
} }
} }
} }
impl InertAtomic for bool { impl<T: InertPayload + Display> Display for Inert<T> {
fn type_str() -> &'static str { "bool" } fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn strict_eq(&self, other: &Self) -> bool { self == other } write!(f, "{}", self.0)
}
impl InertAtomic for usize {
fn type_str() -> &'static str { "usize" }
fn strict_eq(&self, other: &Self) -> bool { self == other }
fn respond(&self, mut request: Request) {
request.serve(Numeric::Uint(*self))
} }
} }
impl InertAtomic for NotNan<f64> { impl InertPayload for bool {
fn type_str() -> &'static str { "NotNan<f64>" } const TYPE_STR: &'static str = "bool";
fn strict_eq(&self, other: &Self) -> bool { self == other } fn strict_eq(&self, other: &Self) -> bool { self == other }
fn respond(&self, mut request: Request) { fn respond(&self, mut request: Request) {
request.serve(Numeric::Float(*self)) request.serve_with(|| OrcString::from(self.to_string()))
}
}
impl InertPayload for usize {
const TYPE_STR: &'static str = "usize";
fn strict_eq(&self, other: &Self) -> bool { self == other }
fn respond(&self, mut request: Request) {
request.serve(Numeric::Uint(*self));
request.serve_with(|| OrcString::from(self.to_string()))
}
}
impl InertPayload for NotNan<f64> {
const TYPE_STR: &'static str = "NotNan<f64>";
fn strict_eq(&self, other: &Self) -> bool { self == other }
fn respond(&self, mut request: Request) {
request.serve(Numeric::Float(*self));
request.serve_with(|| OrcString::from(self.to_string()))
} }
} }

View File

@@ -2,24 +2,12 @@
//! //!
//! Structures and traits used in the exposure of external functions and values //! Structures and traits used in the exposure of external functions and values
//! to Orchid code //! to Orchid code
mod atom; pub mod atom;
pub mod cps_box; pub mod cps_box;
mod extern_fn; pub mod error;
mod fn_bridge; pub mod fn_bridge;
mod inert; pub mod implementations;
pub mod inert;
use std::sync::Arc; pub mod process;
pub mod to_clause;
pub use atom::{Atom, Atomic, AtomicResult, AtomicReturn, StrictEq}; pub mod try_from_expr;
pub use extern_fn::{ExFn, ExternError, ExternFn};
pub use fn_bridge::constructors::{
xfn_1ary, xfn_2ary, xfn_3ary, xfn_4ary, xfn_5ary, xfn_6ary, xfn_7ary,
xfn_8ary, xfn_9ary,
};
pub use fn_bridge::{Param, ToClause};
pub use inert::InertAtomic;
pub use crate::representations::interpreted::Clause;
/// Return type of the argument to the [xfn_1ary] family of functions
pub type XfnResult<T> = Result<T, Arc<dyn ExternError>>;

39
src/foreign/process.rs Normal file
View File

@@ -0,0 +1,39 @@
use std::fmt::Debug;
use super::atom::{Atomic, AtomicReturn};
use super::error::ExternResult;
use super::to_clause::ToClause;
use crate::interpreter::apply::CallData;
use crate::interpreter::nort::{Clause, ClauseInst};
use crate::interpreter::run::RunData;
use crate::utils::ddispatch::Responder;
/// An atom that immediately decays to the result of the function when
/// normalized. Can be used to build infinite recursive datastructures from
/// Rust.
#[derive(Clone)]
pub struct Unstable<F>(F);
impl<F: FnOnce(RunData) -> R + Send + 'static, R: ToClause> Unstable<F> {
/// Wrap a function in an Unstable
pub const fn new(f: F) -> Self { Self(f) }
}
impl<F> Responder for Unstable<F> {}
impl<F> Debug for Unstable<F> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Unstable").finish_non_exhaustive()
}
}
impl<F: FnOnce(RunData) -> R + Send + 'static, R: ToClause> Atomic
for Unstable<F>
{
fn as_any(self: Box<Self>) -> Box<dyn std::any::Any> { self }
fn as_any_ref(&self) -> &dyn std::any::Any { self }
fn apply_ref(&self, _: CallData) -> ExternResult<Clause> {
panic!("This atom decays instantly")
}
fn run(self: Box<Self>, run: RunData) -> super::atom::AtomicResult {
let clause = self.0(run.clone()).to_clause(run.location.clone());
AtomicReturn::run(clause, run)
}
fn redirect(&mut self) -> Option<&mut ClauseInst> { None }
}

43
src/foreign/to_clause.rs Normal file
View File

@@ -0,0 +1,43 @@
use super::atom::Atomic;
use crate::interpreter::nort::{Clause, ClauseInst, Expr};
use crate::location::CodeLocation;
/// A trait for things that are infallibly convertible to [ClauseInst]. These
/// types can be returned by callbacks passed to the [super::xfn_1ary] family of
/// functions.
pub trait ToClause: Sized {
/// Convert this value to a [Clause]. If your value can only be directly
/// converted to a [ClauseInst], you can call `ClauseInst::to_clause` to
/// unwrap it if possible or fall back to [Clause::Identity].
fn to_clause(self, location: CodeLocation) -> Clause;
/// Convert the type to a [Clause].
fn to_clsi(self, location: CodeLocation) -> ClauseInst {
ClauseInst::new(self.to_clause(location))
}
/// Convert to an expression via [ToClause].
fn to_expr(self, location: CodeLocation) -> Expr {
Expr { clause: self.to_clsi(location.clone()), location }
}
}
impl<T: Atomic + Clone> ToClause for T {
fn to_clause(self, _: CodeLocation) -> Clause { self.atom_cls() }
}
impl ToClause for Clause {
fn to_clause(self, _: CodeLocation) -> Clause { self }
}
impl ToClause for ClauseInst {
fn to_clause(self, _: CodeLocation) -> Clause {
self.into_cls()
}
fn to_clsi(self, _: CodeLocation) -> ClauseInst { self }
}
impl ToClause for Expr {
fn to_clause(self, location: CodeLocation) -> Clause {
self.clause.to_clause(location)
}
fn to_clsi(self, _: CodeLocation) -> ClauseInst { self.clause }
fn to_expr(self, _: CodeLocation) -> Expr { self }
}

View File

@@ -0,0 +1,28 @@
use super::error::ExternResult;
use crate::interpreter::nort::{ClauseInst, Expr};
use crate::location::CodeLocation;
/// Types automatically convertible from an [Expr]. Most notably, this is how
/// foreign functions request automatic argument downcasting.
pub trait TryFromExpr: Sized {
/// Match and clone the value out of an [Expr]
fn from_expr(expr: Expr) -> ExternResult<Self>;
}
impl TryFromExpr for Expr {
fn from_expr(expr: Expr) -> ExternResult<Self> { Ok(expr) }
}
impl TryFromExpr for ClauseInst {
fn from_expr(expr: Expr) -> ExternResult<Self> { Ok(expr.clause.clone()) }
}
/// Request a value of a particular type and also return its location for
/// further error reporting
#[derive(Debug, Clone)]
pub struct WithLoc<T>(pub CodeLocation, pub T);
impl<T: TryFromExpr> TryFromExpr for WithLoc<T> {
fn from_expr(expr: Expr) -> ExternResult<Self> {
Ok(Self(expr.location(), T::from_expr(expr)?))
}
}

7
src/gen/mod.rs Normal file
View File

@@ -0,0 +1,7 @@
//! Abstractions and primitives for defining Orchid code in compile-time Rust
//! constants. This is used both to generate glue code such as function call
//! expressions at runtime and to define completely static intrinsics and
//! constants accessible to usercode.
pub mod tpl;
pub mod traits;
pub mod tree;

80
src/gen/tpl.rs Normal file
View File

@@ -0,0 +1,80 @@
//! Various elemental components to build expression trees that all implement
//! [GenClause].
use std::fmt::Debug;
use super::traits::{GenClause, Generable};
use crate::foreign::atom::{Atom, AtomGenerator, Atomic};
/// Atom, Embed a Rust value. See also [AnyAtom]
#[derive(Debug, Clone)]
pub struct V<A: Atomic + Clone>(pub A);
impl<A: Atomic + Clone> GenClause for V<A> {
fn generate<T: Generable>(&self, ctx: T::Ctx<'_>, _: &impl Fn() -> T) -> T {
T::atom(ctx, Atom::new(self.0.clone()))
}
}
/// Atom, embed a Rust value of unspecified type. See also [V]
#[derive(Debug)]
pub struct AnyAtom(pub AtomGenerator);
impl GenClause for AnyAtom {
fn generate<T: Generable>(&self, ctx: T::Ctx<'_>, _: &impl Fn() -> T) -> T {
T::atom(ctx, self.0.run())
}
}
/// Const, Reference a constant from the execution environment. Unlike Orchid
/// syntax, this doesn't include lambda arguments. For that, use [P]
#[derive(Debug, Clone)]
pub struct C(pub &'static str);
impl GenClause for C {
fn generate<T: Generable>(&self, ctx: T::Ctx<'_>, _: &impl Fn() -> T) -> T {
T::constant(ctx, self.0.split("::"))
}
}
/// Apply a function to a value provided by [L]
#[derive(Debug, Clone)]
pub struct A<F: GenClause, X: GenClause>(pub F, pub X);
impl<F: GenClause, X: GenClause> GenClause for A<F, X> {
fn generate<T: Generable>(&self, ctx: T::Ctx<'_>, p: &impl Fn() -> T) -> T {
T::apply(ctx, |gen| self.0.generate(gen, p), |gen| self.1.generate(gen, p))
}
}
/// Apply a function to two arguments
pub fn a2(
f: impl GenClause,
x: impl GenClause,
y: impl GenClause,
) -> impl GenClause {
A(A(f, x), y)
}
/// Lambda expression. The argument can be referenced with [P]
#[derive(Debug, Clone)]
pub struct L<B: GenClause>(pub &'static str, pub B);
impl<B: GenClause> GenClause for L<B> {
fn generate<T: Generable>(&self, ctx: T::Ctx<'_>, p: &impl Fn() -> T) -> T {
T::lambda(ctx, self.0, |gen| self.1.generate(gen, p))
}
}
/// Parameter to a lambda expression
#[derive(Debug, Clone)]
pub struct P(pub &'static str);
impl GenClause for P {
fn generate<T: Generable>(&self, ctx: T::Ctx<'_>, _: &impl Fn() -> T) -> T {
T::arg(ctx, self.0)
}
}
/// Slot for an Orchid value to be specified during execution
#[derive(Debug, Clone)]
pub struct Slot;
impl GenClause for Slot {
fn generate<T: Generable>(&self, _: T::Ctx<'_>, pop: &impl Fn() -> T) -> T {
pop()
}
}

74
src/gen/traits.rs Normal file
View File

@@ -0,0 +1,74 @@
//! Abstractions used to generate Orchid expressions
use std::backtrace::Backtrace;
use std::cell::RefCell;
use std::collections::VecDeque;
use std::fmt::Debug;
use crate::foreign::atom::Atom;
/// Representations of the Orchid expression tree that can describe basic
/// language elements.
pub trait Generable: Sized {
/// Context information defined by parents. Generators just forward this.
type Ctx<'a>: Sized;
/// Wrap external data.
fn atom(ctx: Self::Ctx<'_>, a: Atom) -> Self;
/// Generate a reference to a constant
fn constant<'a>(
ctx: Self::Ctx<'_>,
name: impl IntoIterator<Item = &'a str>,
) -> Self;
/// Generate a function call given the function and its argument
fn apply(
ctx: Self::Ctx<'_>,
f: impl FnOnce(Self::Ctx<'_>) -> Self,
x: impl FnOnce(Self::Ctx<'_>) -> Self,
) -> Self;
/// Generate a function. The argument name is only valid within the same
/// [Generable].
fn lambda(
ctx: Self::Ctx<'_>,
name: &str,
body: impl FnOnce(Self::Ctx<'_>) -> Self,
) -> Self;
/// Generate a reference to a function argument. The argument name is only
/// valid within the same [Generable].
fn arg(ctx: Self::Ctx<'_>, name: &str) -> Self;
}
/// Expression templates which can be instantiated in multiple representations
/// of Orchid. Expressions can be built from the elements defined in
/// [super::lit].
///
/// Do not depend on this trait, use [Gen] instead. Conversely, implement this
/// instead of [Gen].
pub trait GenClause: Debug + Sized {
/// Enact the template at runtime to build a given type.
/// `pop` pops from the runtime template parameter list passed to the
/// generator.
///
/// Do not call this, it's the backing operation of [Gen#template]
fn generate<T: Generable>(&self, ctx: T::Ctx<'_>, pop: &impl Fn() -> T) -> T;
}
/// Expression generators
///
/// Do not implement this trait, it's the frontend for [GenClause]. Conversely,
/// do not consume [GenClause].
pub trait Gen<T: Generable, U>: Debug {
/// Create an instance of this template with some parameters
fn template(&self, ctx: T::Ctx<'_>, params: U) -> T;
}
impl<T: Generable, I: IntoIterator<Item = T>, G: GenClause> Gen<T, I> for G {
fn template(&self, ctx: T::Ctx<'_>, params: I) -> T {
let values = RefCell::new(params.into_iter().collect::<VecDeque<_>>());
let t = self.generate(ctx, &|| {
values.borrow_mut().pop_front().expect("Not enough values provided")
});
let leftover = values.borrow().len();
assert_eq!(leftover, 0, "Too many values provided ({leftover} left) {}", Backtrace::force_capture());
t
}
}

81
src/gen/tree.rs Normal file
View File

@@ -0,0 +1,81 @@
//! Components to build in-memory module trees that in Orchid. These modules
//! can only contain constants and other modules.
use std::fmt::Debug;
use dyn_clone::{clone_box, DynClone};
use trait_set::trait_set;
use super::tpl;
use super::traits::{Gen, GenClause};
use crate::foreign::atom::Atomic;
use crate::interpreter::gen_nort::nort_gen;
use crate::interpreter::nort::Expr;
use crate::location::CodeLocation;
use crate::tree::{ModEntry, ModMember, TreeConflict};
use crate::utils::combine::Combine;
trait_set! {
trait TreeLeaf = Gen<Expr, [Expr; 0]> + DynClone;
}
/// A leaf in the [ConstTree]
#[derive(Debug)]
pub struct GenConst(Box<dyn TreeLeaf>);
impl GenConst {
fn new(data: impl GenClause + Clone + 'static) -> Self {
Self(Box::new(data))
}
/// Instantiate template as [crate::interpreter::nort]
pub fn gen_nort(&self, location: CodeLocation) -> Expr {
self.0.template(nort_gen(location), [])
}
}
impl Clone for GenConst {
fn clone(&self) -> Self { Self(clone_box(&*self.0)) }
}
/// Error condition when constant trees that define the the same constant are
/// merged. Produced during system loading if multiple modules define the
/// same constant
#[derive(Debug, Clone)]
pub struct ConflictingConsts;
impl Combine for GenConst {
type Error = ConflictingConsts;
fn combine(self, _: Self) -> Result<Self, Self::Error> {
Err(ConflictingConsts)
}
}
/// A lightweight module tree that can be built declaratively by hand to
/// describe libraries of external functions in Rust. It implements [Add] for
/// added convenience
pub type ConstTree = ModEntry<GenConst, (), ()>;
/// Describe a constant
#[must_use]
pub fn leaf(value: impl GenClause + Clone + 'static) -> ConstTree {
ModEntry { x: (), member: ModMember::Item(GenConst::new(value)) }
}
/// Describe an [Atomic]
#[must_use]
pub fn atom_leaf(atom: impl Atomic + Clone + 'static) -> ConstTree {
leaf(tpl::V(atom))
}
/// Describe an [Atomic] which appears as an entry in a [ConstTree#tree]
///
/// The unarray is used to trick rustfmt into breaking the atom into a block
/// without breaking this call into a block
#[must_use]
pub fn atom_ent<K: AsRef<str>>(
key: K,
[atom]: [impl Atomic + Clone + 'static; 1],
) -> (K, ConstTree) {
(key, atom_leaf(atom))
}
/// Errors produced duriung the merger of constant trees
pub type ConstCombineErr = TreeConflict<GenConst, (), ()>;

View File

@@ -0,0 +1,142 @@
//! Convert the preprocessed AST into IR
use std::collections::VecDeque;
use std::rc::Rc;
use substack::Substack;
use super::ir;
use crate::error::{ProjectError, ProjectResult};
use crate::location::{CodeLocation, SourceRange};
use crate::name::Sym;
use crate::parse::parsed;
use crate::utils::unwrap_or::unwrap_or;
trait IRErrorKind: Clone + Send + Sync + 'static {
const DESCR: &'static str;
}
#[derive(Clone)]
struct IRError<T: IRErrorKind>(SourceRange, Sym, T);
impl<T: IRErrorKind> ProjectError for IRError<T> {
const DESCRIPTION: &'static str = T::DESCR;
fn message(&self) -> String { format!("In {}, {}", self.1, T::DESCR) }
fn one_position(&self) -> CodeLocation {
CodeLocation::Source(self.0.clone())
}
}
#[derive(Clone)]
struct EmptyS;
impl IRErrorKind for EmptyS {
const DESCR: &'static str =
"`()` as a clause is meaningless in lambda calculus";
}
#[derive(Clone)]
struct BadGroup;
impl IRErrorKind for BadGroup {
const DESCR: &'static str = "Only `(...)` may be used after macros. \
`[...]` and `{...}` left in the code are signs of incomplete macro execution";
}
#[derive(Clone)]
struct InvalidArg;
impl IRErrorKind for InvalidArg {
const DESCR: &'static str = "Argument names can only be Name nodes";
}
#[derive(Clone)]
struct PhLeak;
impl IRErrorKind for PhLeak {
const DESCR: &'static str = "Placeholders shouldn't even appear \
in the code during macro execution, this is likely a compiler bug";
}
/// Try to convert an expression from AST format to typed lambda
pub fn ast_to_ir(expr: parsed::Expr, symbol: Sym) -> ProjectResult<ir::Expr> {
expr_rec(expr, Context::new(symbol))
}
#[derive(Clone)]
struct Context<'a> {
names: Substack<'a, Sym>,
symbol: Sym,
}
impl<'a> Context<'a> {
#[must_use]
fn w_name<'b>(&'b self, name: Sym) -> Context<'b>
where 'a: 'b {
Context { names: self.names.push(name), symbol: self.symbol.clone() }
}
}
impl Context<'static> {
#[must_use]
fn new(symbol: Sym) -> Self { Self { names: Substack::Bottom, symbol } }
}
/// Process an expression sequence
fn exprv_rec(
mut v: VecDeque<parsed::Expr>,
ctx: Context<'_>,
location: SourceRange,
) -> ProjectResult<ir::Expr> {
let last = unwrap_or! {v.pop_back(); {
return Err(IRError(location, ctx.symbol, EmptyS).pack());
}};
let v_end = match v.back() {
None => return expr_rec(last, ctx),
Some(penultimate) => penultimate.range.range.end,
};
let f = exprv_rec(v, ctx.clone(), location.map_range(|r| r.start..v_end))?;
let x = expr_rec(last, ctx)?;
let value = ir::Clause::Apply(Rc::new(f), Rc::new(x));
Ok(ir::Expr { value, location: CodeLocation::Source(location) })
}
/// Process an expression
fn expr_rec(
parsed::Expr { value, range }: parsed::Expr,
ctx: Context,
) -> ProjectResult<ir::Expr> {
match value {
parsed::Clause::S(parsed::PType::Par, body) => {
return exprv_rec(body.to_vec().into(), ctx, range);
},
parsed::Clause::S(..) =>
return Err(IRError(range, ctx.symbol, BadGroup).pack()),
_ => (),
}
let value = match value {
parsed::Clause::Atom(a) => ir::Clause::Atom(a.clone()),
parsed::Clause::Lambda(arg, b) => {
let name = match &arg[..] {
[parsed::Expr { value: parsed::Clause::Name(name), .. }] => name,
[parsed::Expr { value: parsed::Clause::Placeh { .. }, .. }] =>
return Err(IRError(range.clone(), ctx.symbol, PhLeak).pack()),
_ => return Err(IRError(range.clone(), ctx.symbol, InvalidArg).pack()),
};
let body_ctx = ctx.w_name(name.clone());
let body = exprv_rec(b.to_vec().into(), body_ctx, range.clone())?;
ir::Clause::Lambda(Rc::new(body))
},
parsed::Clause::Name(name) => {
let lvl_opt = (ctx.names.iter())
.enumerate()
.find(|(_, n)| **n == name)
.map(|(lvl, _)| lvl);
match lvl_opt {
Some(lvl) => ir::Clause::LambdaArg(lvl),
None => ir::Clause::Constant(name.clone()),
}
},
parsed::Clause::S(parsed::PType::Par, entries) =>
exprv_rec(entries.to_vec().into(), ctx, range.clone())?.value,
parsed::Clause::S(..) =>
return Err(IRError(range, ctx.symbol, BadGroup).pack()),
parsed::Clause::Placeh { .. } =>
return Err(IRError(range, ctx.symbol, PhLeak).pack()),
};
Ok(ir::Expr::new(value, range.clone()))
}

View File

@@ -1,10 +1,15 @@
//! IR is an abstract representation of Orchid expressions that's impractical
//! for all purposes except converting to and from other representations. Future
//! innovations in the processing and execution of code will likely operate on
//! this representation.
use std::fmt::{Debug, Write}; use std::fmt::{Debug, Write};
use std::rc::Rc; use std::rc::Rc;
use super::location::Location; use crate::foreign::atom::AtomGenerator;
use crate::foreign::{Atom, ExFn}; use crate::location::{CodeLocation, SourceRange};
use crate::utils::string_from_charset; use crate::name::Sym;
use crate::Sym; use crate::utils::string_from_charset::string_from_charset;
/// Indicates whether either side needs to be wrapped. Syntax whose end is /// Indicates whether either side needs to be wrapped. Syntax whose end is
/// ambiguous on that side must use parentheses, or forward the flag /// ambiguous on that side must use parentheses, or forward the flag
@@ -14,10 +19,14 @@ struct Wrap(bool, bool);
#[derive(Clone)] #[derive(Clone)]
pub struct Expr { pub struct Expr {
pub value: Clause, pub value: Clause,
pub location: Location, pub location: CodeLocation,
} }
impl Expr { impl Expr {
pub fn new(value: Clause, location: SourceRange) -> Self {
Self { value, location: CodeLocation::Source(location) }
}
fn deep_fmt( fn deep_fmt(
&self, &self,
f: &mut std::fmt::Formatter<'_>, f: &mut std::fmt::Formatter<'_>,
@@ -42,10 +51,8 @@ pub enum Clause {
Lambda(Rc<Expr>), Lambda(Rc<Expr>),
Constant(Sym), Constant(Sym),
LambdaArg(usize), LambdaArg(usize),
/// An opaque function, eg. an effectful function employing CPS
ExternFn(ExFn),
/// An opaque non-callable value, eg. a file handle /// An opaque non-callable value, eg. a file handle
Atom(Atom), Atom(AtomGenerator),
} }
const ARGNAME_CHARSET: &str = "abcdefghijklmnopqrstuvwxyz"; const ARGNAME_CHARSET: &str = "abcdefghijklmnopqrstuvwxyz";
@@ -79,7 +86,6 @@ impl Clause {
) -> std::fmt::Result { ) -> std::fmt::Result {
match self { match self {
Self::Atom(a) => write!(f, "{a:?}"), Self::Atom(a) => write!(f, "{a:?}"),
Self::ExternFn(fun) => write!(f, "{fun:?}"),
Self::Lambda(body) => parametric_fmt(f, depth, "\\", body, wr), Self::Lambda(body) => parametric_fmt(f, depth, "\\", body, wr),
Self::LambdaArg(skip) => { Self::LambdaArg(skip) => {
let lambda_depth = (depth - skip - 1).try_into().unwrap(); let lambda_depth = (depth - skip - 1).try_into().unwrap();

View File

@@ -0,0 +1,30 @@
//! Convert IR to the interpreter's NORT representation
use super::ir;
use crate::interpreter::nort;
use crate::interpreter::nort_builder::NortBuilder;
fn expr(expr: &ir::Expr, ctx: NortBuilder<(), usize>) -> nort::Expr {
clause(&expr.value, ctx).to_expr(expr.location.clone())
}
fn clause(cls: &ir::Clause, ctx: NortBuilder<(), usize>) -> nort::Clause {
match cls {
ir::Clause::Constant(name) => nort::Clause::Constant(name.clone()),
ir::Clause::Atom(a) => nort::Clause::Atom(a.run()),
ir::Clause::LambdaArg(n) => {
ctx.arg_logic(n);
nort::Clause::LambdaArg
},
ir::Clause::Apply(f, x) => ctx.apply_logic(|c| expr(f, c), |c| expr(x, c)),
ir::Clause::Lambda(body) => ctx.lambda_logic(&(), |c| expr(body, c)),
}
}
pub fn ir_to_nort(expr: &ir::Expr) -> nort::Expr {
let c = NortBuilder::new(&|count| {
let mut count: usize = *count;
Box::new(move |()| count.checked_sub(1).map(|v| count = v).is_none())
});
nort::ClauseInst::new(clause(&expr.value, c)).to_expr(expr.location.clone())
}

3
src/intermediate/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
pub(crate) mod ast_to_ir;
pub(crate) mod ir;
pub(crate) mod ir_to_nort;

View File

@@ -1,11 +0,0 @@
//! A type-agnostic interner
//!
//! Can be used to deduplicate various structures for fast equality comparisons.
//! The parser uses it to intern strings.
mod monotype;
mod multitype;
mod token;
pub use monotype::TypedInterner;
pub use multitype::Interner;
pub use token::Tok;

View File

@@ -1,55 +0,0 @@
use std::borrow::Borrow;
use std::hash::{BuildHasher, Hash};
use std::sync::{Arc, RwLock};
use hashbrown::HashMap;
use super::token::Tok;
/// An interner for any type that implements [Borrow]. This is inspired by
/// Lasso but much simpler, in part because not much can be known about the
/// type.
pub struct TypedInterner<T: 'static + Eq + Hash + Clone> {
tokens: RwLock<HashMap<Arc<T>, Tok<T>>>,
}
impl<T: Eq + Hash + Clone> TypedInterner<T> {
/// Create a fresh interner instance
#[must_use]
pub fn new() -> Arc<Self> {
Arc::new(Self { tokens: RwLock::new(HashMap::new()) })
}
/// Intern an object, returning a token
#[must_use]
pub fn i<Q: ?Sized + Eq + Hash + ToOwned<Owned = T>>(
self: &Arc<Self>,
q: &Q,
) -> Tok<T>
where
T: Borrow<Q>,
{
let mut tokens = self.tokens.write().unwrap();
let hash = compute_hash(tokens.hasher(), q);
let raw_entry = tokens
.raw_entry_mut()
.from_hash(hash, |k| <T as Borrow<Q>>::borrow(k) == q);
let kv = raw_entry.or_insert_with(|| {
let keyrc = Arc::new(q.to_owned());
let token = Tok::<T>::new(keyrc.clone(), Arc::downgrade(self));
(keyrc, token)
});
kv.1.clone()
}
}
/// Helper function to compute hashes outside a hashmap
#[must_use]
fn compute_hash(
hash_builder: &impl BuildHasher,
key: &(impl Hash + ?Sized),
) -> u64 {
use core::hash::Hasher;
let mut state = hash_builder.build_hasher();
key.hash(&mut state);
state.finish()
}

View File

@@ -1,88 +0,0 @@
use std::any::{Any, TypeId};
use std::borrow::Borrow;
use std::cell::{RefCell, RefMut};
use std::hash::Hash;
use std::sync::Arc;
use hashbrown::HashMap;
use super::monotype::TypedInterner;
use super::token::Tok;
/// A collection of interners based on their type. Allows to intern any object
/// that implements [ToOwned]. Objects of the same type are stored together in a
/// [TypedInterner].
pub struct Interner {
interners: RefCell<HashMap<TypeId, Arc<dyn Any + Send + Sync>>>,
}
impl Interner {
/// Create a new interner
#[must_use]
pub fn new() -> Self { Self { interners: RefCell::new(HashMap::new()) } }
/// Intern something
#[must_use]
pub fn i<Q: ?Sized + Eq + Hash + ToOwned>(&self, q: &Q) -> Tok<Q::Owned>
where
Q::Owned: 'static + Eq + Hash + Clone + Borrow<Q> + Send + Sync,
{
let mut interners = self.interners.borrow_mut();
let interner = get_interner(&mut interners);
interner.i(q)
}
/// Fully resolve a list of interned things.
#[must_use]
pub fn extern_all<T: 'static + Eq + Hash + Clone>(s: &[Tok<T>]) -> Vec<T> {
s.iter().map(|t| (**t).clone()).collect()
}
}
impl Default for Interner {
fn default() -> Self { Self::new() }
}
/// Get or create an interner for a given type.
#[must_use]
fn get_interner<T: 'static + Eq + Hash + Clone + Send + Sync>(
interners: &mut RefMut<HashMap<TypeId, Arc<dyn Any + Send + Sync>>>,
) -> Arc<TypedInterner<T>> {
let boxed = interners
.raw_entry_mut()
.from_key(&TypeId::of::<T>())
.or_insert_with(|| (TypeId::of::<T>(), TypedInterner::<T>::new()))
.1
.clone();
boxed.downcast().expect("the typeid is supposed to protect from this")
}
#[cfg(test)]
mod test {
use super::*;
#[test]
pub fn test_string() {
let interner = Interner::new();
let key1 = interner.i("foo");
let key2 = interner.i(&"foo".to_string());
assert_eq!(key1, key2)
}
#[test]
pub fn test_slice() {
let interner = Interner::new();
let key1 = interner.i(&vec![1, 2, 3]);
let key2 = interner.i(&[1, 2, 3][..]);
assert_eq!(key1, key2);
}
// #[test]
#[allow(unused)]
pub fn test_str_slice() {
let interner = Interner::new();
let key1 =
interner.i(&vec!["a".to_string(), "b".to_string(), "c".to_string()]);
let key2 = interner.i(&["a", "b", "c"][..]);
// assert_eq!(key1, key2);
}
}

View File

@@ -1,93 +0,0 @@
use std::cmp::PartialEq;
use std::fmt::{Debug, Display};
use std::hash::Hash;
use std::num::NonZeroUsize;
use std::ops::Deref;
use std::sync::{Arc, Weak};
use super::TypedInterner;
/// A number representing an object of type `T` stored in some interner. It is a
/// logic error to compare tokens obtained from different interners, or to use a
/// token with an interner other than the one that created it, but this is
/// currently not enforced.
#[derive(Clone)]
pub struct Tok<T: Eq + Hash + Clone + 'static> {
data: Arc<T>,
interner: Weak<TypedInterner<T>>,
}
impl<T: Eq + Hash + Clone + 'static> Tok<T> {
/// Create a new token. Used exclusively by the interner
#[must_use]
pub(crate) fn new(data: Arc<T>, interner: Weak<TypedInterner<T>>) -> Self {
Self { data, interner }
}
/// Take the ID number out of a token
#[must_use]
pub fn id(&self) -> NonZeroUsize {
((self.data.as_ref() as *const T as usize).try_into())
.expect("Pointer can always be cast to nonzero")
}
/// Cast into usize
#[must_use]
pub fn usize(&self) -> usize { self.id().into() }
///
pub fn assert_comparable(&self, other: &Self) {
let iref = self.interner.as_ptr() as usize;
assert!(
iref == other.interner.as_ptr() as usize,
"Tokens must come from the same interner"
);
}
}
impl<T: Eq + Hash + Clone + 'static> Tok<Vec<Tok<T>>> {
/// Extern all elements of the vector in a new vector
pub fn extern_vec(&self) -> Vec<T> {
self.iter().map(|t| (**t).clone()).collect()
}
}
impl<T: Eq + Hash + Clone + 'static> Deref for Tok<T> {
type Target = T;
fn deref(&self) -> &Self::Target { self.data.as_ref() }
}
impl<T: Eq + Hash + Clone + 'static + Debug> Debug for Tok<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Token({} -> {:?})", self.id(), self.data.as_ref())
}
}
impl<T: Eq + Hash + Clone + Display + 'static> Display for Tok<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", **self)
}
}
impl<T: Eq + Hash + Clone + 'static> Eq for Tok<T> {}
impl<T: Eq + Hash + Clone + 'static> PartialEq for Tok<T> {
fn eq(&self, other: &Self) -> bool {
self.assert_comparable(other);
self.id() == other.id()
}
}
impl<T: Eq + Hash + Clone + 'static> Ord for Tok<T> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.assert_comparable(other);
self.id().cmp(&other.id())
}
}
impl<T: Eq + Hash + Clone + 'static> PartialOrd for Tok<T> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl<T: Eq + Hash + Clone + 'static> Hash for Tok<T> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
state.write_usize(self.usize())
}
}

View File

@@ -1,113 +1,188 @@
use super::context::Context; use std::collections::VecDeque;
use super::error::RuntimeError; use std::mem;
use super::Return;
use crate::foreign::AtomicReturn; use never::Never;
use crate::representations::interpreted::{Clause, ExprInst};
use crate::representations::PathSet; use super::context::RunContext;
use crate::utils::never::{unwrap_always, Always}; use super::error::RunError;
use crate::utils::Side; use super::nort::{Clause, ClauseInst, Expr};
use super::path_set::{PathSet, Step};
use super::run::run;
use crate::location::CodeLocation;
/// Information about a function call presented to an external function
pub struct CallData<'a> {
/// Location of the function expression
pub location: CodeLocation,
/// The argument the function was called on. Functions are curried
pub arg: Expr,
/// Information relating to this interpreter run
pub ctx: RunContext<'a>,
}
/// Process the clause at the end of the provided path. Note that paths always /// Process the clause at the end of the provided path. Note that paths always
/// point to at least one target. Note also that this is not cached as a /// point to at least one target. Note also that this is not cached as a
/// normalization step in the intermediate expressions. /// normalization step in the intermediate expressions.
fn map_at<E>( fn map_at<E>(
path: &[Side], mut path: impl Iterator<Item = Step>,
source: ExprInst, source: &Clause,
mapper: &mut impl FnMut(Clause) -> Result<Clause, E>, mapper: &mut impl FnMut(&Clause) -> Result<Clause, E>,
) -> Result<ExprInst, E> { ) -> Result<Clause, E> {
source // Pass through some unambiguous wrapper clauses
.try_update(|value, _loc| { match source {
// Pass right through lambdas Clause::Identity(alt) => return map_at(path, &alt.cls(), mapper),
if let Clause::Lambda { args, body } = value { Clause::Lambda { args, body: Expr { location: b_loc, clause } } =>
return Ok(( return Ok(Clause::Lambda {
Clause::Lambda { args, body: map_at(path, body, mapper)? }, args: args.clone(),
(), body: Expr {
)); clause: map_at(path, &clause.cls(), mapper)?.to_inst(),
} location: b_loc.clone(),
// If the path ends here, process the next (non-lambda) node },
let (head, tail) = if let Some(sf) = path.split_first() { }),
sf _ => (),
} else { }
return Ok((mapper(value)?, ())); Ok(match (source, path.next()) {
(Clause::Lambda { .. } | Clause::Identity(_), _) =>
unreachable!("Handled above"),
// If the path ends and this isn't a lambda, process it
(val, None) => mapper(val)?,
// If it's an Apply, execute the next step in the path
(Clause::Apply { f, x }, Some(head)) => {
let proc = |x: &Expr| {
Ok(map_at(path, &x.clause.cls(), mapper)?.to_expr(x.location()))
}; };
// If it's an Apply, execute the next step in the path match head {
if let Clause::Apply { f, x } = value { None => Clause::Apply { f: proc(f)?, x: x.clone() },
return Ok(( Some(n) => {
match head { let i = x.len() - n - 1;
Side::Left => Clause::Apply { f: map_at(tail, f, mapper)?, x }, let mut argv = x.clone();
Side::Right => Clause::Apply { f, x: map_at(tail, x, mapper)? }, argv[i] = proc(&x[i])?;
}, Clause::Apply { f: f.clone(), x: argv }
(), },
));
} }
panic!("Invalid path") },
}) (_, Some(_)) => panic!("Path leads into node that isn't Apply or Lambda"),
.map(|p| p.0) })
} }
/// Replace the [Clause::LambdaArg] placeholders at the ends of the [PathSet] /// Replace the [Clause::LambdaArg] placeholders at the ends of the [PathSet]
/// with the value in the body. Note that a path may point to multiple /// with the value in the body. Note that a path may point to multiple
/// placeholders. /// placeholders.
#[must_use] #[must_use]
fn substitute(paths: &PathSet, value: Clause, body: ExprInst) -> ExprInst { fn substitute(paths: &PathSet, value: ClauseInst, body: &Clause) -> Clause {
let PathSet { steps, next } = paths; let PathSet { steps, next } = paths;
unwrap_always(map_at(steps, body, &mut |checkpoint| -> Always<Clause> { map_at(steps.iter().cloned(), body, &mut |chkpt| -> Result<Clause, Never> {
match (checkpoint, next) { match (chkpt, next) {
(Clause::Lambda { .. }, _) => unreachable!("Handled by map_at"), (Clause::Lambda { .. } | Clause::Identity(_), _) => {
(Clause::Apply { f, x }, Some((left, right))) => Ok(Clause::Apply { unreachable!("Handled by map_at")
f: substitute(left, value.clone(), f),
x: substitute(right, value.clone(), x),
}),
(Clause::LambdaArg, None) => Ok(value.clone()),
(_, None) => {
panic!("Substitution path ends in something other than LambdaArg")
},
(_, Some(_)) => {
panic!("Substitution path leads into something other than Apply")
}, },
(Clause::Apply { f, x }, Some(conts)) => {
let mut argv = x.clone();
let f = match conts.get(&None) {
None => f.clone(),
Some(sp) => substitute(sp, value.clone(), &f.clause.cls())
.to_expr(f.location()),
};
for (i, old) in argv.iter_mut().rev().enumerate() {
if let Some(sp) = conts.get(&Some(i)) {
let tmp = substitute(sp, value.clone(), &old.clause.cls());
*old = tmp.to_expr(old.location());
}
}
Ok(Clause::Apply { f, x: argv })
},
(Clause::LambdaArg, None) => Ok(Clause::Identity(value.clone())),
(_, None) => panic!("Argument path must point to LambdaArg"),
(_, Some(_)) => panic!("Argument path can only fork at Apply"),
} }
})) })
.unwrap_or_else(|e| match e {})
}
pub(super) fn apply_as_atom(
f: Expr,
arg: Expr,
ctx: RunContext,
) -> Result<Clause, RunError> {
let call = CallData { location: f.location(), arg, ctx };
match f.clause.try_unwrap() {
Ok(clause) => match clause {
Clause::Atom(atom) => Ok(atom.apply(call)?),
_ => panic!("Not an atom"),
},
Err(clsi) => match &*clsi.cls() {
Clause::Atom(atom) => Ok(atom.apply_ref(call)?),
_ => panic!("Not an atom"),
},
}
} }
/// Apply a function-like expression to a parameter. /// Apply a function-like expression to a parameter.
pub fn apply( pub(super) fn apply(
f: ExprInst, mut f: Expr,
x: ExprInst, mut argv: VecDeque<Expr>,
ctx: Context, mut ctx: RunContext,
) -> Result<Return, RuntimeError> { ) -> Result<(Option<usize>, Clause), RunError> {
let (state, (gas, inert)) = f.try_update(|clause, loc| match clause { // allow looping but break on the main path so that `continue` functions as a
// apply an ExternFn or an internal function // trampoline
Clause::ExternFn(f) => { loop {
let clause = f.apply(x, ctx.clone()).map_err(RuntimeError::Extern)?; if argv.is_empty() {
Ok((clause, (ctx.gas.map(|g| g - 1), false))) return Ok((ctx.gas, f.clause.into_cls()));
}, } else if ctx.gas == Some(0) {
Clause::Lambda { args, body } => Ok(if let Some(args) = args { return Ok((Some(0), Clause::Apply { f, x: argv }));
let x_cls = x.expr_val().clause; }
let result = substitute(&args, x_cls, body); let mut f_cls = f.clause.cls_mut();
// cost of substitution match &mut *f_cls {
// XXX: should this be the number of occurrences instead? // apply an ExternFn or an internal function
(result.expr_val().clause, (ctx.gas.map(|x| x - 1), false)) Clause::Atom(_) => {
} else { mem::drop(f_cls);
(body.expr_val().clause, (ctx.gas, false)) // take a step in expanding atom
}), let halt = run(f, ctx.clone())?;
Clause::Constant(name) => ctx.gas = halt.gas;
if let Some(sym) = ctx.symbols.get(&name) { if halt.inert && halt.state.clause.is_atom() {
Ok((Clause::Apply { f: sym.clone(), x }, (ctx.gas, false))) let arg = argv.pop_front().expect("checked above");
} else { let loc = halt.state.location();
Err(RuntimeError::MissingSymbol(name.clone(), loc)) f = apply_as_atom(halt.state, arg, ctx.clone())?.to_expr(loc)
} else {
f = halt.state
}
}, },
Clause::Atom(atom) => { Clause::Lambda { args, body } => {
// take a step in expanding atom match args {
let AtomicReturn { clause, gas, inert } = atom.run(ctx.clone())?; None => *f_cls = body.clause.clone().into_cls(),
Ok((Clause::Apply { f: clause.wrap(), x }, (gas, inert))) Some(args) => {
}, let arg = argv.pop_front().expect("checked above").clause.clone();
Clause::Apply { f: fun, x: arg } => { let cls = substitute(args, arg, &body.clause.cls());
// take a step in resolving pre-function // cost of substitution
let ret = apply(fun, arg, ctx.clone())?; // XXX: should this be the number of occurrences instead?
let Return { state, inert, gas } = ret; ctx.use_gas(1);
Ok((Clause::Apply { f: state, x }, (gas, inert))) mem::drop(f_cls);
}, f = cls.to_expr(f.location());
_ => Err(RuntimeError::NonFunctionApplication(loc)), },
})?; }
Ok(Return { state, gas, inert }) },
Clause::Constant(name) => {
let name = name.clone();
mem::drop(f_cls);
f = (ctx.symbols.get(&name).cloned())
.ok_or_else(|| RunError::MissingSymbol(name, f.location()))?;
ctx.use_gas(1);
},
Clause::Apply { f: fun, x } => {
for item in x.drain(..).rev() {
argv.push_front(item)
}
let tmp = fun.clone();
mem::drop(f_cls);
f = tmp;
},
Clause::Identity(f2) => {
let tmp = f2.clone();
mem::drop(f_cls);
f.clause = tmp
},
Clause::Bottom(bottom) => return Err(bottom.clone()),
Clause::LambdaArg => panic!("Leftover argument marker"),
}
}
} }

View File

@@ -1,32 +1,38 @@
use hashbrown::HashMap; use hashbrown::HashMap;
use crate::interner::Interner; use super::nort::Expr;
use crate::representations::interpreted::ExprInst; use crate::name::Sym;
use crate::Sym;
/// All the data associated with an interpreter run /// All the data associated with an interpreter run
#[derive(Clone)] #[derive(Clone)]
pub struct Context<'a> { pub struct RunContext<'a> {
/// Table used to resolve constants /// Table used to resolve constants
pub symbols: &'a HashMap<Sym, ExprInst>, pub symbols: &'a HashMap<Sym, Expr>,
/// The interner used for strings internally, so external functions can
/// deduce referenced constant names on the fly
pub interner: &'a Interner,
/// The number of reduction steps the interpreter can take before returning /// The number of reduction steps the interpreter can take before returning
pub gas: Option<usize>, pub gas: Option<usize>,
} }
impl<'a> RunContext<'a> {
/// Consume some gas if it is being counted
pub fn use_gas(&mut self, amount: usize) {
if let Some(g) = self.gas.as_mut() {
*g = g.saturating_sub(amount)
}
}
/// Gas is being counted and there is none left
pub fn no_gas(&self) -> bool { self.gas == Some(0) }
}
/// All the data produced by an interpreter run /// All the data produced by an interpreter run
#[derive(Clone)] #[derive(Clone)]
pub struct Return { pub struct Halt {
/// The new expression tree /// The new expression tree
pub state: ExprInst, pub state: Expr,
/// Leftover [Context::gas] if counted /// Leftover [Context::gas] if counted
pub gas: Option<usize>, pub gas: Option<usize>,
/// If true, the next run would not modify the expression /// If true, the next run would not modify the expression
pub inert: bool, pub inert: bool,
} }
impl Return { impl Halt {
/// Check if gas has run out. Returns false if gas is not being used /// Check if gas has run out. Returns false if gas is not being used
pub fn preempted(&self) -> bool { self.gas.map_or(false, |g| g == 0) } pub fn preempted(&self) -> bool { self.gas.map_or(false, |g| g == 0) }
/// Returns a general report of the return /// Returns a general report of the return

View File

@@ -1,37 +1,33 @@
use std::fmt::{Debug, Display}; use std::fmt::{Debug, Display};
use std::sync::Arc; use std::sync::Arc;
use crate::foreign::ExternError; use crate::foreign::error::ExternError;
use crate::{Location, Sym}; use crate::location::CodeLocation;
use crate::name::Sym;
use super::run::Interrupted;
/// Problems in the process of execution /// Problems in the process of execution
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum RuntimeError { pub enum RunError {
/// A Rust function encountered an error /// A Rust function encountered an error
Extern(Arc<dyn ExternError>), Extern(Arc<dyn ExternError>),
/// Primitive applied as function
NonFunctionApplication(Location),
/// Symbol not in context /// Symbol not in context
MissingSymbol(Sym, Location), MissingSymbol(Sym, CodeLocation),
/// Ran out of gas
Interrupted(Interrupted)
} }
impl From<Arc<dyn ExternError>> for RuntimeError { impl From<Arc<dyn ExternError>> for RunError {
fn from(value: Arc<dyn ExternError>) -> Self { Self::Extern(value) } fn from(value: Arc<dyn ExternError>) -> Self { Self::Extern(value) }
} }
impl Display for RuntimeError { impl Display for RunError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self { match self {
Self::Extern(e) => write!(f, "Error in external function: {e}"), Self::Extern(e) => write!(f, "Error in external function: {e}"),
Self::NonFunctionApplication(location) => {
write!(f, "Primitive applied as function at {}", location)
},
Self::MissingSymbol(sym, loc) => { Self::MissingSymbol(sym, loc) => {
write!( write!(f, "{sym}, called at {loc} is not loaded")
f,
"{}, called at {loc} is not loaded",
sym.extern_vec().join("::")
)
}, },
} }
} }

126
src/interpreter/gen_nort.rs Normal file
View File

@@ -0,0 +1,126 @@
//! Implementations of [Generable] for [super::nort]
use intern_all::i;
use super::nort_builder::NortBuilder;
use crate::foreign::atom::Atom;
use crate::foreign::to_clause::ToClause;
use crate::gen::traits::Generable;
use crate::interpreter::nort::{Clause, ClauseInst, Expr};
use crate::location::CodeLocation;
use crate::name::Sym;
/// Context data for instantiating templated expressions as [super::nort].
/// Instances of this type are created via [nort_gen]
pub type NortGenCtx<'a> = (CodeLocation, NortBuilder<'a, str, str>);
/// Create [NortGenCtx] instances to generate interpreted expressions
pub fn nort_gen<'a>(location: CodeLocation) -> NortGenCtx<'a> {
(location, NortBuilder::new(&|l| Box::new(move |r| l == r)))
}
impl Generable for Expr {
type Ctx<'a> = NortGenCtx<'a>;
fn apply(
ctx: Self::Ctx<'_>,
f_cb: impl FnOnce(Self::Ctx<'_>) -> Self,
x_cb: impl FnOnce(Self::Ctx<'_>) -> Self,
) -> Self {
(ctx
.1
.apply_logic(|c| f_cb((ctx.0.clone(), c)), |c| x_cb((ctx.0.clone(), c))))
.to_expr(ctx.0.clone())
}
fn arg(ctx: Self::Ctx<'_>, name: &str) -> Self {
Clause::arg(ctx.clone(), name).to_expr(ctx.0.clone())
}
fn atom(ctx: Self::Ctx<'_>, a: Atom) -> Self {
Clause::atom(ctx.clone(), a).to_expr(ctx.0.clone())
}
fn constant<'a>(
ctx: Self::Ctx<'_>,
name: impl IntoIterator<Item = &'a str>,
) -> Self {
Clause::constant(ctx.clone(), name).to_expr(ctx.0.clone())
}
fn lambda(
ctx: Self::Ctx<'_>,
name: &str,
body: impl FnOnce(Self::Ctx<'_>) -> Self,
) -> Self {
(ctx.1.lambda_logic(name, |c| body((ctx.0.clone(), c))))
.to_expr(ctx.0.clone())
}
}
impl Generable for ClauseInst {
type Ctx<'a> = NortGenCtx<'a>;
fn arg(ctx: Self::Ctx<'_>, name: &str) -> Self {
Clause::arg(ctx, name).to_inst()
}
fn atom(ctx: Self::Ctx<'_>, a: Atom) -> Self {
Clause::atom(ctx, a).to_inst()
}
fn constant<'a>(
ctx: Self::Ctx<'_>,
name: impl IntoIterator<Item = &'a str>,
) -> Self {
Clause::constant(ctx, name).to_inst()
}
fn lambda(
ctx: Self::Ctx<'_>,
name: &str,
body: impl FnOnce(Self::Ctx<'_>) -> Self,
) -> Self {
(ctx
.1
.lambda_logic(name, |c| body((ctx.0.clone(), c)).to_expr(ctx.0.clone())))
.to_clsi(ctx.0.clone())
}
fn apply(
ctx: Self::Ctx<'_>,
f: impl FnOnce(Self::Ctx<'_>) -> Self,
x: impl FnOnce(Self::Ctx<'_>) -> Self,
) -> Self {
(ctx.1.apply_logic(
|c| f((ctx.0.clone(), c)).to_expr(ctx.0.clone()),
|c| x((ctx.0.clone(), c)).to_expr(ctx.0.clone()),
))
.to_clsi(ctx.0.clone())
}
}
impl Generable for Clause {
type Ctx<'a> = NortGenCtx<'a>;
fn atom(_: Self::Ctx<'_>, a: Atom) -> Self { Clause::Atom(a) }
fn constant<'a>(
_: Self::Ctx<'_>,
name: impl IntoIterator<Item = &'a str>,
) -> Self {
let sym = Sym::new(name.into_iter().map(i)).expect("Empty constant");
Clause::Constant(sym)
}
fn apply(
ctx: Self::Ctx<'_>,
f: impl FnOnce(Self::Ctx<'_>) -> Self,
x: impl FnOnce(Self::Ctx<'_>) -> Self,
) -> Self {
ctx.1.apply_logic(
|c| f((ctx.0.clone(), c)).to_expr(ctx.0.clone()),
|c| x((ctx.0.clone(), c)).to_expr(ctx.0.clone()),
)
}
fn arg(ctx: Self::Ctx<'_>, name: &str) -> Self {
ctx.1.arg_logic(name);
Clause::LambdaArg
}
fn lambda(
ctx: Self::Ctx<'_>,
name: &str,
body: impl FnOnce(Self::Ctx<'_>) -> Self,
) -> Self {
ctx
.1
.lambda_logic(name, |c| body((ctx.0.clone(), c)).to_expr(ctx.0.clone()))
}
}

View File

@@ -1,16 +1,19 @@
use std::any::{Any, TypeId}; use std::any::{Any, TypeId};
use std::sync::Arc;
use hashbrown::HashMap; use hashbrown::HashMap;
use trait_set::trait_set; use trait_set::trait_set;
use super::{run, Context, Return, RuntimeError}; use super::context::{Halt, RunContext};
use crate::foreign::{Atom, Atomic, ExternError}; use super::error::RunError;
use crate::interpreted::{Clause, Expr, ExprInst}; use super::nort::{Clause, Expr};
use crate::utils::take_with_output; use super::run::run;
use crate::foreign::atom::{Atom, Atomic};
use crate::foreign::error::ExternResult;
use crate::foreign::to_clause::ToClause;
use crate::location::CodeLocation;
trait_set! { trait_set! {
trait Handler = FnMut(Box<dyn Any>) -> HandlerRes; trait Handler = for<'a> FnMut(&'a dyn Any, CodeLocation) -> Expr;
} }
/// A table of command handlers /// A table of command handlers
@@ -23,26 +26,37 @@ impl<'a> HandlerTable<'a> {
#[must_use] #[must_use]
pub fn new() -> Self { Self { handlers: HashMap::new() } } pub fn new() -> Self { Self { handlers: HashMap::new() } }
/// Add a handler function to interpret a type of atom and decide what happens /// Add a handler function to interpret a command and select the continuation.
/// next. This function can be impure. /// See [HandlerTable#with] for a declarative option.
pub fn register<T: 'static>( pub fn register<T: 'static, R: ToClause>(
&mut self, &mut self,
mut f: impl FnMut(Box<T>) -> HandlerRes + 'a, mut f: impl for<'b> FnMut(&'b T) -> R + 'a,
) { ) {
let cb = move |a: Box<dyn Any>| f(a.downcast().expect("found by TypeId")); let cb = move |a: &dyn Any, loc: CodeLocation| {
f(a.downcast_ref().expect("found by TypeId")).to_expr(loc)
};
let prev = self.handlers.insert(TypeId::of::<T>(), Box::new(cb)); let prev = self.handlers.insert(TypeId::of::<T>(), Box::new(cb));
assert!(prev.is_none(), "A handler for this type is already registered"); assert!(prev.is_none(), "A handler for this type is already registered");
} }
/// Add a handler function to interpret a command and select the continuation.
/// See [HandlerTable#register] for a procedural option.
pub fn with<T: 'static>(
mut self,
f: impl FnMut(&T) -> ExternResult<Expr> + 'a,
) -> Self {
self.register(f);
self
}
/// Find and execute the corresponding handler for this type /// Find and execute the corresponding handler for this type
pub fn dispatch( pub fn dispatch(
&mut self, &mut self,
arg: Box<dyn Atomic>, arg: &dyn Atomic,
) -> Result<HandlerRes, Box<dyn Atomic>> { loc: CodeLocation,
match self.handlers.get_mut(&arg.as_any_ref().type_id()) { ) -> Option<Expr> {
Some(f) => Ok(f(arg.as_any())), (self.handlers.get_mut(&arg.as_any_ref().type_id()))
None => Err(arg), .map(|f| f(arg.as_any_ref(), loc))
}
} }
/// Combine two non-overlapping handler sets /// Combine two non-overlapping handler sets
@@ -56,33 +70,27 @@ impl<'a> HandlerTable<'a> {
} }
} }
/// Various possible outcomes of a [Handler] execution. Ok returns control to
/// the interpreter. The meaning of Err is decided by the value in it.
pub type HandlerRes = Result<ExprInst, Arc<dyn ExternError>>;
/// [run] orchid code, executing any commands it returns using the specified /// [run] orchid code, executing any commands it returns using the specified
/// [Handler]s. /// [Handler]s.
pub fn run_handler( pub fn run_handler(
mut expr: ExprInst, mut state: Expr,
handlers: &mut HandlerTable, handlers: &mut HandlerTable,
mut ctx: Context, RunContext { mut gas, symbols }: RunContext,
) -> Result<Return, RuntimeError> { ) -> Result<Halt, RunError> {
loop { loop {
let mut ret = run(expr, ctx.clone())?; let inert;
let quit = take_with_output(&mut ret.state, |exi| match exi.expr_val() { Halt { gas, inert, state } = run(state, RunContext { gas, symbols })?;
Expr { clause: Clause::Atom(a), .. } => match handlers.dispatch(a.0) { let state_cls = state.clause.cls();
Err(b) => (Clause::Atom(Atom(b)).wrap(), Ok(true)), if let Clause::Atom(Atom(a)) = &*state_cls {
Ok(e) => match e { if let Some(res) = handlers.dispatch(a.as_ref(), state.location()) {
Ok(expr) => (expr, Ok(false)), drop(state_cls);
Err(e) => (Clause::Bottom.wrap(), Err(e)), state = res;
}, continue;
}, }
expr => (ExprInst::new(expr), Ok(true)), }
})?; if inert || gas == Some(0) {
if quit | ret.gas.map_or(false, |g| g == 0) { drop(state_cls);
return Ok(ret); break Ok(Halt { gas, inert, state });
} }
ctx.gas = ret.gas;
expr = ret.state;
} }
} }

View File

@@ -1,11 +1,10 @@
//! functions to interact with Orchid code //! functions to interact with Orchid code
mod apply; pub mod apply;
mod context; pub mod context;
mod error; pub mod error;
mod handler; pub mod gen_nort;
mod run; pub mod handler;
pub mod nort_builder;
pub use context::{Context, Return, ReturnStatus}; pub mod nort;
pub use error::RuntimeError; pub(crate) mod path_set;
pub use handler::{run_handler, HandlerRes, HandlerTable}; pub mod run;
pub use run::run;

334
src/interpreter/nort.rs Normal file
View File

@@ -0,0 +1,334 @@
//! The NORT (Normal Order Referencing Tree) is the interpreter's runtime
//! representation of Orchid programs.
//!
//! It uses a locator tree to find bound variables in lambda functions, which
//! necessitates a normal reduction order because modifying the body by reducing
//! expressions would invalidate any locators in enclosing lambdas.
//!
//! Clauses are held in a mutable `Arc<Mutex<_>>`, so that after substitution
//! the instances of the argument remain linked and a reduction step applied to
//! any instance transforms all of them.
//!
//! To improve locality and make the tree less deep and locators shorter,
//! function calls store multiple arguments in a deque.
use std::collections::VecDeque;
use std::fmt::{Debug, Display};
use std::ops::{Deref, DerefMut};
use std::sync::{Arc, Mutex, TryLockError};
use itertools::Itertools;
use super::error::RunError;
use super::path_set::PathSet;
use crate::foreign::atom::Atom;
#[allow(unused)] // for doc
use crate::foreign::atom::Atomic;
use crate::foreign::error::ExternResult;
use crate::foreign::try_from_expr::TryFromExpr;
use crate::location::CodeLocation;
use crate::name::Sym;
#[allow(unused)] // for doc
use crate::parse::parsed;
use crate::utils::ddispatch::request;
use crate::utils::take_with_output::take_with_output;
/// Kinda like [AsMut] except it supports a guard
pub(crate) trait AsDerefMut<T> {
fn as_deref_mut(&mut self) -> impl DerefMut<Target = T> + '_;
}
/// An expression with metadata
#[derive(Clone)]
pub struct Expr {
/// The actual value
pub clause: ClauseInst,
/// Information about the code that produced this value
pub location: CodeLocation,
}
impl Expr {
/// Constructor
pub fn new(clause: ClauseInst, location: CodeLocation) -> Self {
Self { clause, location }
}
/// Obtain the location of the expression
pub fn location(&self) -> CodeLocation { self.location.clone() }
/// Convert into any type that implements [TryFromExpr]. Calls to this
/// function are generated wherever a conversion is elided in an extern
/// function.
pub fn downcast<T: TryFromExpr>(self) -> ExternResult<T> {
let Expr { mut clause, location } = self;
loop {
let cls_deref = clause.cls();
match &*cls_deref {
Clause::Identity(alt) => {
let temp = alt.clone();
drop(cls_deref);
clause = temp;
},
_ => {
drop(cls_deref);
return T::from_expr(Expr { clause, location });
},
};
}
}
/// Visit all expressions in the tree. The search can be exited early by
/// returning [Some]
///
/// See also [parsed::Expr::search_all]
pub fn search_all<T>(
&self,
predicate: &mut impl FnMut(&Self) -> Option<T>,
) -> Option<T> {
if let Some(t) = predicate(self) {
return Some(t);
}
self.clause.inspect(|c| match c {
Clause::Identity(_alt) => unreachable!("Handled by inspect"),
Clause::Apply { f, x } => (f.search_all(predicate))
.or_else(|| x.iter().find_map(|x| x.search_all(predicate))),
Clause::Lambda { body, .. } => body.search_all(predicate),
Clause::Constant(_)
| Clause::LambdaArg
| Clause::Atom(_)
| Clause::Bottom(_) => None,
})
}
}
impl Debug for Expr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}@{}", self.clause, self.location)
}
}
impl Display for Expr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.clause)
}
}
impl AsDerefMut<Clause> for Expr {
fn as_deref_mut(&mut self) -> impl DerefMut<Target = Clause> + '_ {
self.clause.cls_mut()
}
}
/// [ExprInst::with_literal] produces this marker unit to indicate that the
/// expression is not a literal
pub struct NotALiteral;
/// A wrapper around expressions to handle their multiple occurences in
/// the tree together
#[derive(Clone)]
pub struct ClauseInst(pub Arc<Mutex<Clause>>);
impl ClauseInst {
/// Wrap a [Clause] in a shared container so that normalization steps are
/// applied to all references
#[must_use]
pub fn new(cls: Clause) -> Self { Self(Arc::new(Mutex::new(cls))) }
/// Take the [Clause] out of this container if it's the last reference to it,
/// or return self.
pub fn try_unwrap(self) -> Result<Clause, ClauseInst> {
Arc::try_unwrap(self.0).map(|c| c.into_inner().unwrap()).map_err(Self)
}
/// Read-only access to the shared clause instance
///
/// # Panics
///
/// if the clause is already borrowed in read-write mode
#[must_use]
pub fn cls(&self) -> impl Deref<Target = Clause> + '_ {
self.0.lock().unwrap()
}
/// Read-Write access to the shared clause instance
///
/// # Panics
///
/// if the clause is already borrowed
#[must_use]
pub fn cls_mut(&self) -> impl DerefMut<Target = Clause> + '_ {
self.0.lock().unwrap()
}
/// Call a normalization function on the expression. The expr is
/// updated with the new clause which affects all copies of it
/// across the tree.
pub fn try_normalize<T>(
&self,
mapper: impl FnOnce(Clause) -> Result<(Clause, T), RunError>,
) -> Result<(ClauseInst, T), RunError> {
enum Report<T> {
Nested(ClauseInst, T),
Plain(T),
}
let ret = take_with_output(&mut *self.cls_mut(), |clause| match &clause {
Clause::Identity(alt) => match alt.try_normalize(mapper) {
Ok((nested, t)) => (clause, Ok(Report::Nested(nested, t))),
Err(e) => (Clause::Bottom(e.clone()), Err(e)),
},
_ => match mapper(clause) {
Err(e) => (Clause::Bottom(e.clone()), Err(e)),
Ok((clause, t)) => (clause, Ok(Report::Plain(t))),
},
})?;
Ok(match ret {
Report::Nested(nested, t) => (nested, t),
Report::Plain(t) => (self.clone(), t),
})
}
/// Call a predicate on the clause, returning whatever the
/// predicate returns. This is a convenience function for reaching
/// through the [Mutex]. The clause will never be [Clause::Identity].
#[must_use]
pub fn inspect<T>(&self, predicate: impl FnOnce(&Clause) -> T) -> T {
match &*self.cls() {
Clause::Identity(sub) => sub.inspect(predicate),
x => predicate(x),
}
}
/// If this expression is an [Atomic], request an object of the given type.
/// If it's not an atomic, fail the request automatically.
#[must_use = "your request might not have succeeded"]
pub fn request<T: 'static>(&self) -> Option<T> {
match &*self.cls() {
Clause::Atom(a) => request(&*a.0),
Clause::Identity(alt) => alt.request(),
_ => None,
}
}
/// Associate a location with this clause
pub fn to_expr(self, location: CodeLocation) -> Expr {
Expr { clause: self.clone(), location: location.clone() }
}
/// Check ahead-of-time if this clause contains an atom. Calls
/// [ClauseInst#cls] for read access.
///
/// Since atoms cannot become normalizable, if this is true and previous
/// normalization failed, the atom is known to be in normal form.
pub fn is_atom(&self) -> bool { matches!(&*self.cls(), Clause::Atom(_)) }
/// Tries to unwrap the [Arc]. If that fails, clones it field by field.
/// If it's a [Clause::Atom] which cannot be cloned, wraps it in a
/// [Clause::Identity].
///
/// Implementation of [crate::foreign::to_clause::ToClause::to_clause]. The
/// trait is more general so it requires a location which this one doesn't.
pub fn into_cls(self) -> Clause {
self.try_unwrap().unwrap_or_else(|clsi| match &*clsi.cls() {
Clause::Apply { f, x } => Clause::Apply { f: f.clone(), x: x.clone() },
Clause::Atom(_) => Clause::Identity(clsi.clone()),
Clause::Bottom(e) => Clause::Bottom(e.clone()),
Clause::Constant(c) => Clause::Constant(c.clone()),
Clause::Identity(sub) => Clause::Identity(sub.clone()),
Clause::Lambda { args, body } =>
Clause::Lambda { args: args.clone(), body: body.clone() },
Clause::LambdaArg => Clause::LambdaArg,
})
}
}
impl Debug for ClauseInst {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.0.try_lock() {
Ok(expr) => write!(f, "{expr:?}"),
Err(TryLockError::Poisoned(_)) => write!(f, "<poisoned>"),
Err(TryLockError::WouldBlock) => write!(f, "<locked>"),
}
}
}
impl Display for ClauseInst {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.0.try_lock() {
Ok(expr) => write!(f, "{expr}"),
Err(TryLockError::Poisoned(_)) => write!(f, "<poisoned>"),
Err(TryLockError::WouldBlock) => write!(f, "<locked>"),
}
}
}
impl AsDerefMut<Clause> for ClauseInst {
fn as_deref_mut(&mut self) -> impl DerefMut<Target = Clause> + '_ {
self.cls_mut()
}
}
/// Distinct types of expressions recognized by the interpreter
#[derive(Debug)]
pub enum Clause {
/// An expression that causes an error
Bottom(RunError),
/// Indicates that this [ClauseInst] has the same value as the other
/// [ClauseInst]. This has two benefits;
///
/// - [Clause] and therefore [Atomic] doesn't have to be [Clone] which saves
/// many synchronization primitives and reference counters in usercode
/// - it enforces on the type level that all copies are normalized together,
/// so accidental inefficiency in the interpreter is rarer.
///
/// That being said, it's still arbitrary many indirections, so when possible
/// APIs should be usable with a [ClauseInst] directly.
Identity(ClauseInst),
/// An opaque non-callable value, eg. a file handle
Atom(Atom),
/// A function application
Apply {
/// Function to be applied
f: Expr,
/// Argument to be substituted in the function
x: VecDeque<Expr>,
},
/// A name to be looked up in the interpreter's symbol table
Constant(Sym),
/// A function
Lambda {
/// A collection of (zero or more) paths to placeholders belonging to this
/// function
args: Option<PathSet>,
/// The tree produced by this function, with placeholders where the
/// argument will go
body: Expr,
},
/// A placeholder within a function that will be replaced upon application
LambdaArg,
}
impl Clause {
/// Wrap a clause in a refcounted lock
pub fn to_inst(self) -> ClauseInst { ClauseInst::new(self) }
/// Wrap a clause in an expression.
pub fn to_expr(self, location: CodeLocation) -> Expr {
self.to_inst().to_expr(location)
}
}
impl Display for Clause {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Clause::Atom(a) => write!(f, "{a:?}"),
Clause::Bottom(err) => write!(f, "bottom({err})"),
Clause::LambdaArg => write!(f, "arg"),
Clause::Apply { f: fun, x } =>
write!(f, "({fun} {})", x.iter().join(" ")),
Clause::Lambda { args, body } => match args {
Some(path) => write!(f, "\\{path:?}.{body}"),
None => write!(f, "\\_.{body}"),
},
Clause::Constant(t) => write!(f, "{t}"),
Clause::Identity(other) => write!(f, "({other})"),
}
}
}
impl AsDerefMut<Clause> for Clause {
fn as_deref_mut(&mut self) -> impl DerefMut<Target = Clause> + '_ { self }
}

View File

@@ -0,0 +1,149 @@
use std::cell::RefCell;
use std::mem;
use substack::Substack;
use super::nort::{AsDerefMut, Clause, Expr};
use super::path_set::PathSet;
use crate::utils::pure_seq::pushed;
enum IntGenData<'a, T: ?Sized> {
Lambda(&'a T, &'a RefCell<Option<PathSet>>),
/// Counts left steps within a chain of [Clause::Apply] for collapsing.
Apply(&'a RefCell<usize>),
/// Replaces [IntGenData::Apply] when stepping left into non-apply to record
/// a [None] [super::path_set::Step].
AppF,
/// Replaces [IntGenData::Apply] when stepping right to freeze the value.
AppArg(usize),
}
impl<'a, T: ?Sized> Copy for IntGenData<'a, T> {}
impl<'a, T: ?Sized> Clone for IntGenData<'a, T> {
fn clone(&self) -> Self { *self }
}
struct ArgCollector(RefCell<Option<PathSet>>);
impl ArgCollector {
pub fn new() -> Self { Self(RefCell::new(None)) }
pub fn into_path(self) -> Option<PathSet> { self.0.into_inner() }
}
/// Strategy used to find the lambda corresponding to a given argument in the
/// stack. The function is called on the data associated with the argument, then
/// the callback it returns is called on every lambda ancestor's associated
/// data from closest to outermost ancestor. The first lambda where this
/// callback returns true is considered to own the argument.
pub type LambdaPicker<'a, T, U> =
&'a dyn for<'b> Fn(&'b U) -> Box<dyn FnMut(&T) -> bool + 'b>;
/// Bundle of information passed down through recursive fnuctions to instantiate
/// runtime [Expr], [super::nort::ClauseInst] or [Clause].
///
/// The context used by [crate::gen::traits::Gen] to convert templates is which
/// includes this type is constructed with [super::gen_nort::nort_gen].
pub struct NortBuilder<'a, T: ?Sized, U: ?Sized> {
stack: Substack<'a, IntGenData<'a, T>>,
lambda_picker: LambdaPicker<'a, T, U>,
}
impl<'a, T: ?Sized, U: ?Sized> NortBuilder<'a, T, U> {
/// Create a new recursive [super::nort] builder from a location that will be
pub fn new(lambda_picker: LambdaPicker<'a, T, U>) -> Self {
Self { stack: Substack::Bottom, lambda_picker }
}
/// [Substack::pop] and clone the location
fn pop<'b>(&'b self, count: usize) -> NortBuilder<'b, T, U>
where 'a: 'b {
let mut new = *self;
new.stack = *self.stack.pop(count);
new
}
/// [Substack::push] and clone the location
fn push<'b>(&'b self, data: IntGenData<'a, T>) -> NortBuilder<'b, T, U>
where 'a: 'b {
let mut new = *self;
new.stack = self.stack.push(data);
new
}
fn non_app_step<V>(self, f: impl FnOnce(NortBuilder<T, U>) -> V) -> V {
if let Some(IntGenData::Apply(_)) = self.stack.value() {
let prev = self.pop(1);
f(prev.push(IntGenData::AppF))
} else {
f(self)
}
}
/// Climb back through the stack and find a lambda associated with this
/// argument, then record the path taken from the lambda to this argument in
/// the lambda's mutable cell.
pub fn arg_logic(self, name: &'a U) {
let mut lambda_chk = (self.lambda_picker)(name);
self.non_app_step(|ctx| {
let opt = ctx.stack.rfold(None, |path, item| match item {
IntGenData::Apply(_) => panic!("This is removed after handling"),
IntGenData::Lambda(n, rc) =>
lambda_chk(n).then(|| (vec![], *rc)).or(path),
IntGenData::AppArg(n) => path.map(|(p, rc)| (pushed(p, Some(*n)), rc)),
IntGenData::AppF => path.map(|(p, rc)| (pushed(p, None), rc)),
});
let (path, slot) = opt.expect("Argument not wrapped in matching lambda");
match &mut *slot.borrow_mut() {
slot @ None => *slot = Some(PathSet::end(path)),
Some(slot) => take_mut::take(slot, |p| p.overlay(PathSet::end(path))),
}
})
}
/// Push a stackframe corresponding to a lambda expression, build the body,
/// then record the path set collected by [NortBuilder::arg_logic] calls
/// within the body.
pub fn lambda_logic(
self,
name: &T,
body: impl FnOnce(NortBuilder<T, U>) -> Expr,
) -> Clause {
let coll = ArgCollector::new();
let frame = IntGenData::Lambda(name, &coll.0);
let body = self.non_app_step(|ctx| body(ctx.push(frame)));
let args = coll.into_path();
Clause::Lambda { args, body }
}
/// Logic for collapsing Apply clauses. Different steps of the logic
/// communicate via mutable variables on the stack
pub fn apply_logic(
self,
f: impl FnOnce(NortBuilder<T, U>) -> Expr,
x: impl FnOnce(NortBuilder<T, U>) -> Expr,
) -> Clause {
let mut fun: Expr;
let arg: Expr;
if let Some(IntGenData::Apply(rc)) = self.stack.value() {
// argument side commits backidx
arg = x(self.pop(1).push(IntGenData::AppArg(*rc.borrow())));
// function side increments backidx
*rc.borrow_mut() += 1;
fun = f(self);
} else {
// function side starts from backidx 1
fun = f(self.push(IntGenData::Apply(&RefCell::new(1))));
// argument side commits 0
arg = x(self.push(IntGenData::AppArg(0)));
};
let mut cls_lk = fun.as_deref_mut();
if let Clause::Apply { x, f: _ } = &mut *cls_lk {
x.push_back(arg);
mem::drop(cls_lk);
fun.clause.into_cls()
} else {
mem::drop(cls_lk);
Clause::Apply { f: fun, x: [arg].into() }
}
}
}
impl<'a, T: ?Sized, U: ?Sized> Copy for NortBuilder<'a, T, U> {}
impl<'a, T: ?Sized, U: ?Sized> Clone for NortBuilder<'a, T, U> {
fn clone(&self) -> Self { *self }
}

165
src/interpreter/path_set.rs Normal file
View File

@@ -0,0 +1,165 @@
use std::collections::VecDeque;
use std::fmt;
use hashbrown::HashMap;
use itertools::Itertools;
use crate::utils::join::join_maps;
/// A step into a [super::nort::Clause::Apply]. If [None], it steps to the
/// function. If [Some(n)], it steps to the `n`th _last_ argument.
pub type Step = Option<usize>;
fn print_step(step: Step) -> String {
if let Some(n) = step { format!("{n}>") } else { "f>".to_string() }
}
/// A branching path selecting some placeholders (but at least one) in a Lambda
/// expression
#[derive(Clone)]
pub struct PathSet {
/// The single steps through [super::nort::Clause::Apply]
pub steps: VecDeque<Step>,
/// if Some, it splits at a [super::nort::Clause::Apply]. If None, it ends in
/// a [super::nort::Clause::LambdaArg]
pub next: Option<HashMap<Step, PathSet>>,
}
impl PathSet {
/// Create a path set for more than one target
pub fn branch(
steps: impl IntoIterator<Item = Step>,
conts: impl IntoIterator<Item = (Step, Self)>,
) -> Self {
let conts = conts.into_iter().collect::<HashMap<_, _>>();
assert!(1 < conts.len(), "Branching pathsets need multiple continuations");
Self { steps: steps.into_iter().collect(), next: Some(conts) }
}
/// Create a path set for one target
pub fn end(steps: impl IntoIterator<Item = Step>) -> Self {
Self { steps: steps.into_iter().collect(), next: None }
}
/// Create a path set that points to a slot that is a direct
/// child of the given lambda with no applications. In essence, this means
/// that this argument will be picked as the value of the expression after an
/// arbitrary amount of subsequent discarded parameters.
pub fn pick() -> Self { Self { steps: VecDeque::new(), next: None } }
/// Merge two paths into one path that points to all targets of both. Only
/// works if both paths select leaf nodes of the same partial tree.
///
/// # Panics
///
/// if either path selects a node the other path dissects
pub fn overlay(self, other: Self) -> Self {
let (mut short, mut long) = match self.steps.len() < other.steps.len() {
true => (self, other),
false => (other, self),
};
let short_len = short.steps.len();
let long_len = long.steps.len();
let match_len = (short.steps.iter())
.zip(long.steps.iter())
.take_while(|(a, b)| a == b)
.count();
// fact: match_len <= short_len <= long_len
if short_len == match_len && match_len == long_len {
// implies match_len == short_len == long_len
match (short.next, long.next) {
(None, None) => Self::end(short.steps.iter().cloned()),
(Some(_), None) | (None, Some(_)) => {
panic!("One of these paths is faulty")
},
(Some(s), Some(l)) => Self::branch(
short.steps.iter().cloned(),
join_maps(s, l, |_, l, r| l.overlay(r)),
),
}
} else if short_len == match_len {
// implies match_len == short_len < long_len
// long.steps[0..match_len] is in steps
// long.steps[match_len] becomes the choice of branch below
// long.steps[match_len + 1..] is in tail
let mut conts = short.next.expect("One path ends inside the other");
let tail_steps = long.steps.split_off(match_len + 1);
let tail = match long.next {
Some(n) => Self::branch(tail_steps, n),
None => Self::end(tail_steps),
};
let branch = long.steps[match_len];
let prev_c = conts.remove(&branch);
let new_c = if let Some(x) = prev_c { x.overlay(tail) } else { tail };
conts.insert(branch, new_c);
Self::branch(short.steps, conts)
} else {
// implies match_len < short_len <= long_len
// steps[0..match_len] is in shared
// steps[match_len] become the branches below
// steps[match_len + 1..] is in new_long and new_short
let new_short_steps = short.steps.split_off(match_len + 1);
let short_last = short.steps.pop_back().expect("split at n + 1");
let new_short = Self { next: short.next.clone(), steps: new_short_steps };
let new_long_steps = long.steps.split_off(match_len + 1);
let new_long = Self { next: long.next.clone(), steps: new_long_steps };
Self::branch(short.steps, [
(short_last, new_short),
(long.steps[match_len], new_long),
])
}
}
/// Prepend a step to a path. If it had previously started at a node that is
/// at the specified step within an Apply clause, it now starts at the Apply.
///
/// This is only valid if the new Apply is **separate** from the previous
/// root.
pub fn prepend(&mut self, step: Step) { self.steps.push_front(step); }
}
impl fmt::Display for PathSet {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let step_s = self.steps.iter().copied().map(print_step).join("");
match &self.next {
Some(conts) => {
let opts =
conts.iter().map(|(h, t)| format!("{}{t}", print_step(*h))).join("|");
write!(f, "{step_s}({opts})")
},
None => write!(f, "{step_s}x"),
}
}
}
impl fmt::Debug for PathSet {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "PathSet({self})")
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_combine() {
let ps1 = PathSet { next: None, steps: VecDeque::from([Some(2), None]) };
let ps2 = PathSet { next: None, steps: VecDeque::from([Some(3), Some(1)]) };
let sum = ps1.clone().overlay(ps2.clone());
assert_eq!(format!("{sum}"), "(2>f>x|3>1>x)");
}
fn extend_scaffold() -> PathSet {
PathSet::branch([None, Some(1), None], [
(None, PathSet::end([None, Some(1)])),
(Some(1), PathSet::end([None, Some(2)])),
])
}
#[test]
fn test_extend_noclone() {
let mut ps = extend_scaffold();
ps.prepend(Some(0));
assert_eq!(format!("{ps}"), "0>f>1>f>(f>f>1|1>f>2)");
}
}

View File

@@ -1,45 +1,101 @@
use std::collections::VecDeque;
use hashbrown::HashMap;
use super::apply::apply; use super::apply::apply;
use super::context::{Context, Return}; use super::context::{Halt, RunContext};
use super::error::RuntimeError; use super::error::RunError;
use crate::foreign::AtomicReturn; use super::nort::{Clause, Expr};
use crate::representations::interpreted::{Clause, ExprInst}; use crate::foreign::atom::AtomicReturn;
use crate::foreign::error::ExternResult;
use crate::location::CodeLocation;
use crate::name::Sym;
use crate::utils::pure_seq::pushed;
/// Information about a normalization run presented to an atom
#[derive(Clone)]
pub struct RunData<'a> {
/// Location of the atom
pub location: CodeLocation,
/// Information about the execution
pub ctx: RunContext<'a>,
}
#[derive(Debug)]
pub struct Interrupted {
stack: Vec<Expr>,
}
impl Interrupted {
pub fn resume(self, ctx: RunContext) -> Result<Halt, RunError> {
run_stack(self.stack, ctx)
}
}
/// Normalize an expression using beta reduction with memoization /// Normalize an expression using beta reduction with memoization
pub fn run(expr: ExprInst, mut ctx: Context) -> Result<Return, RuntimeError> { pub fn run(mut expr: Expr, mut ctx: RunContext) -> Result<Halt, RunError> {
let (state, (gas, inert)) = expr.try_normalize( run_stack(vec![expr], ctx)
|mut cls, loc| -> Result<(Clause, _), RuntimeError> { }
while ctx.gas.map(|g| g > 0).unwrap_or(true) {
fn run_stack(
mut stack: Vec<Expr>,
mut ctx: RunContext,
) -> Result<Halt, RunError> {
let mut expr = stack.pop().expect("Empty stack");
loop {
if ctx.no_gas() {
return Err(RunError::Interrupted(Interrupted {
stack: pushed(stack, expr),
}));
}
let (next_clsi, inert) = expr.clause.try_normalize(|mut cls| {
loop {
if ctx.no_gas() {
return Ok((cls, false));
}
match cls { match cls {
cls @ Clause::Identity(_) => return Ok((cls, false)),
// TODO:
// - unfuck nested loop
// - inline most of [apply] to eliminate recursion step
Clause::Apply { f, x } => { Clause::Apply { f, x } => {
let res = apply(f, x, ctx.clone())?; if x.is_empty() {
if res.inert { return Ok((f.clause.into_cls(), false));
return Ok((res.state.expr_val().clause, (res.gas, true)));
} }
ctx.gas = res.gas; let (gas, clause) = apply(f, x, ctx.clone())?;
cls = res.state.expr().clause.clone(); if ctx.gas.is_some() {
ctx.gas = gas;
}
cls = clause;
}, },
Clause::Atom(data) => { Clause::Atom(data) => {
let AtomicReturn { clause, gas, inert } = data.run(ctx.clone())?; let run = RunData { ctx: ctx.clone(), location: expr.location() };
if inert { let atomic_ret = data.run(run)?;
return Ok((clause, (gas, true))); if ctx.gas.is_some() {
ctx.gas = atomic_ret.gas;
} }
ctx.gas = gas; if atomic_ret.inert {
cls = clause; return Ok((atomic_ret.clause, true));
}
cls = atomic_ret.clause;
}, },
Clause::Constant(c) => { Clause::Constant(c) => {
let symval = (ctx.symbols.get(&c)).ok_or_else(|| { let symval = (ctx.symbols.get(&c)).ok_or_else(|| {
RuntimeError::MissingSymbol(c.clone(), loc.clone()) RunError::MissingSymbol(c.clone(), expr.location())
})?; })?;
ctx.gas = ctx.gas.map(|g| g - 1); // cost of lookup ctx.gas = ctx.gas.map(|g| g - 1); // cost of lookup
cls = symval.expr().clause.clone(); cls = Clause::Identity(symval.clause.clone());
}, },
// non-reducible // non-reducible
_ => return Ok((cls, (ctx.gas, true))), c => return Ok((c, true)),
} };
} }
// out of gas })?;
Ok((cls, (ctx.gas, false))) expr.clause = next_clsi;
}, if inert {
)?; match stack.pop() {
Ok(Return { state, gas, inert }) Some(e) => expr = e,
None => return Ok(Halt { state: expr, gas: ctx.gas, inert }),
}
}
}
} }

View File

@@ -10,27 +10,15 @@
pub mod error; pub mod error;
pub mod facade; pub mod facade;
pub mod foreign; pub mod foreign;
pub mod interner; pub mod gen;
pub mod intermediate;
pub mod interpreter; pub mod interpreter;
pub mod libs;
pub mod location;
pub mod name;
pub mod parse; pub mod parse;
pub mod pipeline; pub mod pipeline;
mod representations;
pub mod rule; pub mod rule;
pub mod systems; pub mod tree;
mod utils; pub mod utils;
pub mod virt_fs;
pub use interner::{Interner, Tok};
pub use pipeline::file_loader::{mk_dir_cache, mk_embed_cache};
pub use pipeline::parse_layer;
/// Element of VName and a common occurrence in the API
pub type Stok = Tok<String>;
pub use representations::ast_to_interpreted::ast_to_interpreted;
pub use representations::project::{
collect_consts, collect_rules, vname_to_sym_tree, ProjectTree,
};
pub use representations::{
ast, from_const_tree, interpreted, sourcefile, tree, ConstTree, Location,
NameLike, OrcString, PathSet, Sym, VName,
};
pub use utils::substack::Substack;
pub use utils::{ddispatch, take_with_output, thread_pool, IdMap, Side};

View File

@@ -3,15 +3,10 @@ use std::sync::{Arc, Mutex};
pub struct DeleteCell<T>(pub Arc<Mutex<Option<T>>>); pub struct DeleteCell<T>(pub Arc<Mutex<Option<T>>>);
impl<T> DeleteCell<T> { impl<T> DeleteCell<T> {
pub fn new(t: T) -> Self { Self(Arc::new(Mutex::new(Some(t)))) } pub fn new(t: T) -> Self { Self(Arc::new(Mutex::new(Some(t)))) }
pub fn take(&self) -> Option<T> { self.0.lock().unwrap().take() } pub fn take(&self) -> Option<T> { self.0.lock().unwrap().take() }
}
pub fn clone_out(&self) -> Option<T> impl<T: Clone> DeleteCell<T> {
where pub fn clone_out(&self) -> Option<T> { self.0.lock().unwrap().clone() }
T: Clone,
{
self.0.lock().unwrap().clone()
}
} }
impl<T> Clone for DeleteCell<T> { impl<T> Clone for DeleteCell<T> {
fn clone(&self) -> Self { Self(self.0.clone()) } fn clone(&self) -> Self { Self(self.0.clone()) }

View File

@@ -4,6 +4,6 @@
//! beyond being general Rust functions. //! beyond being general Rust functions.
//! It also exposes timers. //! It also exposes timers.
mod system; pub mod poller;
pub mod system;
pub use system::{AsynchSystem, InfiniteBlock, MessagePort}; mod delete_cell;

View File

@@ -1,10 +1,12 @@
//! Abstract implementation of the poller
use std::collections::BinaryHeap; use std::collections::BinaryHeap;
use std::mem; use std::mem;
use std::sync::mpsc::{channel, Receiver, RecvError, RecvTimeoutError, Sender}; use std::sync::mpsc::{channel, Receiver, RecvError, RecvTimeoutError, Sender};
use std::thread::sleep; use std::thread::sleep;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use crate::utils::DeleteCell; use super::delete_cell::DeleteCell;
enum TimerKind<TOnce, TRec> { enum TimerKind<TOnce, TRec> {
Once(DeleteCell<TOnce>), Once(DeleteCell<TOnce>),
@@ -27,8 +29,8 @@ impl<TOnce, TRec> Clone for TimerKind<TOnce, TRec> {
/// [Ord] implemenetation of this struct is reversed; it can be intuitively /// [Ord] implemenetation of this struct is reversed; it can be intuitively
/// thought of as ordering by urgency. /// thought of as ordering by urgency.
struct Timer<TOnce, TRec> { struct Timer<TOnce, TRec> {
pub expires: Instant, expires: Instant,
pub kind: TimerKind<TOnce, TRec>, kind: TimerKind<TOnce, TRec>,
} }
impl<TOnce, TRec> Clone for Timer<TOnce, TRec> { impl<TOnce, TRec> Clone for Timer<TOnce, TRec> {
fn clone(&self) -> Self { fn clone(&self) -> Self {
@@ -50,42 +52,55 @@ impl<TOnce, TRec> Ord for Timer<TOnce, TRec> {
} }
} }
/// Representation of a scheduled timer
#[derive(Clone)]
pub struct TimerHandle<T>(DeleteCell<T>);
impl<T> TimerHandle<T> {
/// Cancel the timer
pub fn cancel(self) { mem::drop(self.0.take()) }
}
/// The abstract event poller implementation used by the standard asynch
/// subsystem.
pub struct Poller<TEv, TOnce, TRec: Clone> { pub struct Poller<TEv, TOnce, TRec: Clone> {
timers: BinaryHeap<Timer<TOnce, TRec>>, timers: BinaryHeap<Timer<TOnce, TRec>>,
receiver: Receiver<TEv>, receiver: Receiver<TEv>,
} }
impl<TEv, TOnce, TRec: Clone + Send> Poller<TEv, TOnce, TRec> { impl<TEv, TOnce, TRec: Clone> Poller<TEv, TOnce, TRec> {
/// Create an event poller and a [Sender] that can produce events on it.
pub fn new() -> (Sender<TEv>, Self) { pub fn new() -> (Sender<TEv>, Self) {
let (sender, receiver) = channel(); let (sender, receiver) = channel();
let this = Self { receiver, timers: BinaryHeap::new() }; let this = Self { receiver, timers: BinaryHeap::new() };
(sender, this) (sender, this)
} }
/// Set a single-fire timer
pub fn set_timeout( pub fn set_timeout(
&mut self, &mut self,
duration: Duration, duration: Duration,
data: TOnce, data: TOnce,
) -> impl Fn() + Clone { ) -> TimerHandle<TOnce> {
let data_cell = DeleteCell::new(data); let data_cell = DeleteCell::new(data);
self.timers.push(Timer { self.timers.push(Timer {
kind: TimerKind::Once(data_cell.clone()), kind: TimerKind::Once(data_cell.clone()),
expires: Instant::now() + duration, expires: Instant::now() + duration,
}); });
move || mem::drop(data_cell.take()) TimerHandle(data_cell)
} }
/// Set a recurring timer
pub fn set_interval( pub fn set_interval(
&mut self, &mut self,
period: Duration, period: Duration,
data: TRec, data: TRec,
) -> impl Fn() + Send + Clone { ) -> TimerHandle<TRec> {
let data_cell = DeleteCell::new(data); let data_cell = DeleteCell::new(data);
self.timers.push(Timer { self.timers.push(Timer {
expires: Instant::now() + period, expires: Instant::now() + period,
kind: TimerKind::Recurring { period, data_cell: data_cell.clone() }, kind: TimerKind::Recurring { period, data_cell: data_cell.clone() },
}); });
move || mem::drop(data_cell.take()) TimerHandle(data_cell)
} }
/// Process a timer popped from the timers heap of this event loop. /// Process a timer popped from the timers heap of this event loop.
@@ -140,8 +155,12 @@ impl<TEv, TOnce, TRec: Clone + Send> Poller<TEv, TOnce, TRec> {
} }
} }
/// Events produced by [Poller].
pub enum PollEvent<TEv, TOnce, TRec> { pub enum PollEvent<TEv, TOnce, TRec> {
/// An event was sent to the [Sender] associated with the [Poller].
Event(TEv), Event(TEv),
/// A single-fire timer expired
Once(TOnce), Once(TOnce),
/// A recurring event fired
Recurring(TRec), Recurring(TRec),
} }

View File

@@ -1,3 +1,7 @@
//! Object to pass to [crate::facade::loader::Loader::add_system] to enable the
//! I/O subsystem. Also many other systems depend on it, these take a mut ref to
//! register themselves.
use std::any::{type_name, Any, TypeId}; use std::any::{type_name, Any, TypeId};
use std::cell::RefCell; use std::cell::RefCell;
use std::collections::VecDeque; use std::collections::VecDeque;
@@ -11,17 +15,23 @@ use hashbrown::HashMap;
use ordered_float::NotNan; use ordered_float::NotNan;
use rust_embed::RustEmbed; use rust_embed::RustEmbed;
use crate::facade::{IntoSystem, System}; use super::poller::{PollEvent, Poller, TimerHandle};
use crate::foreign::cps_box::{init_cps, CPSBox}; use crate::facade::system::{IntoSystem, System};
use crate::foreign::{xfn_2ary, Atomic, ExternError, InertAtomic, XfnResult}; use crate::foreign::atom::Atomic;
use crate::interpreted::{Clause, ExprInst}; use crate::foreign::cps_box::CPSBox;
use crate::interpreter::HandlerTable; use crate::foreign::error::ExternError;
use crate::pipeline::file_loader::embed_to_map; use crate::foreign::fn_bridge::constructors::xfn_2ary;
use crate::systems::codegen::call; use crate::foreign::inert::{Inert, InertPayload};
use crate::systems::stl::Numeric; use crate::gen::tpl;
use crate::utils::poller::{PollEvent, Poller}; use crate::gen::traits::Gen;
use crate::utils::unwrap_or; use crate::gen::tree::{atom_leaf, ConstTree};
use crate::{ConstTree, Interner}; use crate::interpreter::gen_nort::nort_gen;
use crate::interpreter::handler::HandlerTable;
use crate::interpreter::nort::Expr;
use crate::libs::std::number::Numeric;
use crate::location::{CodeGenInfo, CodeLocation};
use crate::utils::unwrap_or::unwrap_or;
use crate::virt_fs::{DeclTree, EmbeddedFS, PrefixFS, VirtFS};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct Timer { struct Timer {
@@ -29,28 +39,28 @@ struct Timer {
delay: NotNan<f64>, delay: NotNan<f64>,
} }
pub fn set_timer(recurring: bool, delay: Numeric) -> XfnResult<Clause> { fn set_timer(rec: Inert<bool>, delay: Numeric) -> CPSBox<Timer> {
Ok(init_cps(2, Timer { recurring, delay: delay.as_float() })) CPSBox::new(2, Timer { recurring: rec.0, delay: delay.as_float() })
} }
#[derive(Clone)] #[derive(Clone)]
struct CancelTimer(Arc<Mutex<dyn Fn() + Send>>); struct CancelTimer(Arc<Mutex<dyn Fn() + Send>>);
impl CancelTimer { impl CancelTimer {
pub fn new(f: impl Fn() + Send + 'static) -> Self { pub fn new<T: Send + Clone + 'static>(canceller: TimerHandle<T>) -> Self {
Self(Arc::new(Mutex::new(f))) Self(Arc::new(Mutex::new(move || canceller.clone().cancel())))
} }
pub fn cancel(&self) { self.0.lock().unwrap()() } pub fn cancel(&self) { self.0.lock().unwrap()() }
} }
impl Debug for CancelTimer { impl Debug for CancelTimer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "opaque cancel operation") f.debug_struct("CancelTimer").finish_non_exhaustive()
} }
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
struct Yield; struct Yield;
impl InertAtomic for Yield { impl InertPayload for Yield {
fn type_str() -> &'static str { "a yield command" } const TYPE_STR: &'static str = "asynch::yield";
} }
/// Error indicating a yield command when all event producers and timers had /// Error indicating a yield command when all event producers and timers had
@@ -76,17 +86,24 @@ impl MessagePort {
} }
} }
fn gen() -> CodeGenInfo { CodeGenInfo::no_details("asynch") }
#[derive(RustEmbed)] #[derive(RustEmbed)]
#[folder = "src/systems/asynch"] #[folder = "src/libs/asynch"]
#[prefix = "system/"]
#[include = "*.orc"] #[include = "*.orc"]
struct AsynchEmbed; struct AsynchEmbed;
type AnyHandler<'a> = Box<dyn FnMut(Box<dyn Any>) -> Vec<ExprInst> + 'a>; fn code() -> DeclTree {
DeclTree::ns("system::async", [DeclTree::leaf(
PrefixFS::new(EmbeddedFS::new::<AsynchEmbed>(".orc", gen()), "", "io").rc(),
)])
}
type AnyHandler<'a> = Box<dyn FnMut(Box<dyn Any>) -> Vec<Expr> + 'a>;
/// Datastructures the asynch system will eventually be constructed from. /// Datastructures the asynch system will eventually be constructed from.
pub struct AsynchSystem<'a> { pub struct AsynchSystem<'a> {
poller: Poller<Box<dyn Any + Send>, ExprInst, ExprInst>, poller: Poller<Box<dyn Any + Send>, Expr, Expr>,
sender: Sender<Box<dyn Any + Send>>, sender: Sender<Box<dyn Any + Send>>,
handlers: HashMap<TypeId, AnyHandler<'a>>, handlers: HashMap<TypeId, AnyHandler<'a>>,
} }
@@ -109,7 +126,7 @@ impl<'a> AsynchSystem<'a> {
/// if the given type is already handled. /// if the given type is already handled.
pub fn register<T: 'static>( pub fn register<T: 'static>(
&mut self, &mut self,
mut f: impl FnMut(Box<T>) -> Vec<ExprInst> + 'a, mut f: impl FnMut(Box<T>) -> Vec<Expr> + 'a,
) { ) {
let cb = move |a: Box<dyn Any>| f(a.downcast().expect("keyed by TypeId")); let cb = move |a: Box<dyn Any>| f(a.downcast().expect("keyed by TypeId"));
let prev = self.handlers.insert(TypeId::of::<T>(), Box::new(cb)); let prev = self.handlers.insert(TypeId::of::<T>(), Box::new(cb));
@@ -132,39 +149,40 @@ impl<'a> Default for AsynchSystem<'a> {
} }
impl<'a> IntoSystem<'a> for AsynchSystem<'a> { impl<'a> IntoSystem<'a> for AsynchSystem<'a> {
fn into_system(self, i: &Interner) -> System<'a> { fn into_system(self) -> System<'a> {
let Self { mut handlers, poller, .. } = self; let Self { mut handlers, poller, .. } = self;
let mut handler_table = HandlerTable::new(); let mut handler_table = HandlerTable::new();
let polly = Rc::new(RefCell::new(poller)); let polly = Rc::new(RefCell::new(poller));
handler_table.register({ handler_table.register({
let polly = polly.clone(); let polly = polly.clone();
move |t: Box<CPSBox<Timer>>| { move |t: &CPSBox<Timer>| {
let mut polly = polly.borrow_mut(); let mut polly = polly.borrow_mut();
let (timeout, action, cont) = t.unpack2(); let (Timer { delay, recurring }, action, cont) = t.unpack2();
let duration = Duration::from_secs_f64(*timeout.delay); let duration = Duration::from_secs_f64(**delay);
let cancel_timer = match timeout.recurring { let cancel_timer = match *recurring {
true => CancelTimer::new(polly.set_interval(duration, action)), true => CancelTimer::new(polly.set_interval(duration, action)),
false => CancelTimer::new(polly.set_timeout(duration, action)), false => CancelTimer::new(polly.set_timeout(duration, action)),
}; };
Ok(call(cont, [init_cps(1, cancel_timer).wrap()]).wrap()) let tpl = tpl::A(tpl::Slot, tpl::V(CPSBox::new(1, cancel_timer)));
tpl.template(nort_gen(cont.location()), [cont])
} }
}); });
handler_table.register(move |t: Box<CPSBox<CancelTimer>>| { handler_table.register(move |t: &CPSBox<CancelTimer>| {
let (command, cont) = t.unpack1(); let (command, cont) = t.unpack1();
command.cancel(); command.cancel();
Ok(cont) cont
}); });
handler_table.register({ handler_table.register({
let polly = polly.clone(); let polly = polly.clone();
let mut microtasks = VecDeque::new(); let mut microtasks = VecDeque::new();
move |_: Box<Yield>| { move |_: &Inert<Yield>| {
if let Some(expr) = microtasks.pop_front() { if let Some(expr) = microtasks.pop_front() {
return Ok(expr); return Ok(expr);
} }
let mut polly = polly.borrow_mut(); let mut polly = polly.borrow_mut();
loop { loop {
let next = unwrap_or!(polly.run(); let next = unwrap_or!(polly.run();
return Err(InfiniteBlock.into_extern()) return Err(InfiniteBlock.rc())
); );
match next { match next {
PollEvent::Once(expr) => return Ok(expr), PollEvent::Once(expr) => return Ok(expr),
@@ -179,7 +197,9 @@ impl<'a> IntoSystem<'a> for AsynchSystem<'a> {
if !events.is_empty() { if !events.is_empty() {
microtasks = VecDeque::from(events); microtasks = VecDeque::from(events);
// trampoline // trampoline
return Ok(Yield.atom_exi()); let loc =
CodeLocation::Gen(CodeGenInfo::no_details("system::asynch"));
return Ok(Inert(Yield).atom_expr(loc));
} }
}, },
} }
@@ -187,18 +207,14 @@ impl<'a> IntoSystem<'a> for AsynchSystem<'a> {
} }
}); });
System { System {
name: vec!["system".to_string(), "asynch".to_string()], name: "system::asynch",
lexer_plugins: vec![], lexer_plugins: vec![],
line_parsers: vec![], line_parsers: vec![],
constants: ConstTree::namespace( constants: ConstTree::ns("system::async", [ConstTree::tree([
[i.i("system"), i.i("async")], ("set_timer", atom_leaf(xfn_2ary(set_timer))),
ConstTree::tree([ ("yield", atom_leaf(Inert(Yield))),
(i.i("set_timer"), ConstTree::xfn(xfn_2ary(set_timer))), ])]),
(i.i("yield"), ConstTree::atom(Yield)), code: code(),
]),
)
.unwrap_tree(),
code: embed_to_map::<AsynchEmbed>(".orc", i),
prelude: Vec::new(), prelude: Vec::new(),
handlers: handler_table, handlers: handler_table,
} }

View File

@@ -0,0 +1,201 @@
use std::ffi::OsString;
use std::fs::File;
use std::path::{Path, PathBuf};
use super::osstring::os_string_lib;
use crate::facade::system::{IntoSystem, System};
use crate::foreign::atom::Atomic;
use crate::foreign::cps_box::CPSBox;
use crate::foreign::error::ExternResult;
use crate::foreign::fn_bridge::constructors::{xfn_1ary, xfn_2ary};
use crate::foreign::inert::{Inert, InertPayload};
use crate::foreign::process::Unstable;
use crate::foreign::to_clause::ToClause;
use crate::gen::tpl;
use crate::gen::traits::Gen;
use crate::gen::tree::{atom_ent, atom_leaf, ConstTree};
use crate::interpreter::gen_nort::nort_gen;
use crate::interpreter::handler::HandlerTable;
use crate::interpreter::nort::{Clause, Expr};
use crate::libs::io::instances::io_error_handler;
use crate::libs::io::{Sink, Source};
use crate::libs::scheduler::system::{SeqScheduler, SharedHandle};
use crate::libs::std::runtime_error::RuntimeError;
use crate::utils::combine::Combine;
use crate::virt_fs::DeclTree;
#[derive(Debug, Clone)]
struct ReadFileCmd(OsString);
impl InertPayload for ReadFileCmd {
const TYPE_STR: &'static str = "readfile command";
}
#[derive(Debug, Clone)]
struct ReadDirCmd(OsString);
impl InertPayload for ReadDirCmd {
const TYPE_STR: &'static str = "readdir command";
}
#[derive(Debug, Clone)]
struct WriteFile {
name: OsString,
append: bool,
}
impl InertPayload for WriteFile {
const TYPE_STR: &'static str = "writefile command";
}
#[must_use]
fn read_file(sched: &SeqScheduler, cmd: &CPSBox<ReadFileCmd>) -> Expr {
let (ReadFileCmd(name), succ, fail, cont) = cmd.unpack3();
let name = name.clone();
let cancel = sched.run_orphan(
move |_| File::open(name),
|file, _| match file {
Err(e) => vec![io_error_handler(e, fail)],
Ok(f) => {
let source_handle = SharedHandle::wrap(Source::new(Box::new(f)));
let tpl = tpl::A(tpl::Slot, tpl::V(Inert(source_handle)));
vec![tpl.template(nort_gen(succ.location()), [succ])]
},
},
);
let tpl = tpl::A(tpl::Slot, tpl::V(CPSBox::new(1, cancel)));
tpl.template(nort_gen(cont.location()), [cont])
}
#[must_use]
fn read_dir(sched: &SeqScheduler, cmd: &CPSBox<ReadDirCmd>) -> Expr {
let (ReadDirCmd(name), succ, fail, cont) = cmd.unpack3();
let name = name.clone();
let cancel = sched.run_orphan(
move |_| {
Path::new(&name)
.read_dir()?
.map(|r| r.and_then(|e| Ok((e.file_name(), e.file_type()?.is_dir()))))
.collect()
},
|items: std::io::Result<Vec<(OsString, bool)>>, _| match items {
Err(e) => vec![io_error_handler(e, fail)],
Ok(os_namev) => {
let converted = (os_namev.into_iter())
.map(|(n, d)| {
Ok((
Inert(n).atom_expr(succ.location()),
Inert(d).atom_expr(succ.location()),
))
})
.collect::<Result<Vec<_>, Clause>>();
match converted {
Err(e) => {
let e = e.to_expr(fail.location());
let tpl = tpl::A(tpl::Slot, tpl::Slot);
vec![tpl.template(nort_gen(fail.location()), [fail, e])]
},
Ok(names) => {
let names = names.to_expr(succ.location());
let tpl = tpl::A(tpl::Slot, tpl::Slot);
vec![tpl.template(nort_gen(succ.location()), [succ, names])]
},
}
},
},
);
let tpl = tpl::A(tpl::Slot, tpl::V(CPSBox::new(1, cancel)));
tpl.template(nort_gen(cont.location()), [cont])
}
#[must_use]
fn write_file(sched: &SeqScheduler, cmd: &CPSBox<WriteFile>) -> Expr {
let (cmd, succ, fail, cont) = cmd.unpack3();
let cmd = cmd.clone();
let cancel = sched.run_orphan(
move |_| File::options().write(true).append(cmd.append).open(&cmd.name),
|file, _| match file {
Err(e) => vec![io_error_handler(e, fail)],
Ok(f) => {
let sink_handle = SharedHandle::wrap(Box::new(f) as Sink);
let tpl = tpl::A(tpl::Slot, tpl::V(Inert(sink_handle)));
vec![tpl.template(nort_gen(succ.location()), [succ])]
},
},
);
let tpl = tpl::A(tpl::Slot, tpl::V(CPSBox::new(1, cancel)));
tpl.template(nort_gen(cont.location()), [cont])
}
fn open_file_read_cmd(name: OsString) -> CPSBox<ReadFileCmd> {
CPSBox::new(3, ReadFileCmd(name))
}
fn read_dir_cmd(name: OsString) -> CPSBox<ReadDirCmd> {
CPSBox::new(3, ReadDirCmd(name))
}
fn open_file_write_cmd(name: OsString) -> CPSBox<WriteFile> {
CPSBox::new(3, WriteFile { name, append: false })
}
fn open_file_append_cmd(name: OsString) -> CPSBox<WriteFile> {
CPSBox::new(3, WriteFile { name, append: true })
}
fn join_paths(root: OsString, sub: OsString) -> OsString {
let mut path = PathBuf::from(root);
path.push(sub);
path.into_os_string()
}
fn pop_path(
path: Inert<OsString>,
) -> Option<(Inert<OsString>, Inert<OsString>)> {
let mut path = PathBuf::from(path.0);
let sub = path.file_name()?.to_owned();
debug_assert!(path.pop(), "file_name above returned Some");
Some((Inert(path.into_os_string()), Inert(sub)))
}
/// A rudimentary system to read and write files.
#[derive(Clone)]
pub struct DirectFS {
scheduler: SeqScheduler,
}
impl DirectFS {
/// Create a new instance of the system.
pub fn new(scheduler: SeqScheduler) -> Self { Self { scheduler } }
}
impl IntoSystem<'static> for DirectFS {
fn into_system(self) -> System<'static> {
let mut handlers = HandlerTable::new();
let sched = self.scheduler.clone();
handlers.register(move |cmd| read_file(&sched, cmd));
let sched = self.scheduler.clone();
handlers.register(move |cmd| read_dir(&sched, cmd));
let sched = self.scheduler;
handlers.register(move |cmd| write_file(&sched, cmd));
System {
name: "system::directfs",
code: DeclTree::empty(),
prelude: Vec::new(),
lexer_plugins: vec![],
line_parsers: vec![],
constants: ConstTree::ns("system::fs", [ConstTree::tree([
("read_file", atom_leaf(xfn_1ary(open_file_read_cmd))),
("read_dir", atom_leaf(xfn_1ary(read_dir_cmd))),
("write_file", atom_leaf(xfn_1ary(open_file_write_cmd))),
("append_file", atom_leaf(xfn_1ary(open_file_append_cmd))),
("join_paths", atom_leaf(xfn_2ary(join_paths))),
("pop_path", atom_leaf(xfn_1ary(pop_path))),
atom_ent("cwd", [Unstable::new(|_| -> ExternResult<_> {
let path = std::env::current_dir()
.map_err(|e| RuntimeError::ext(e.to_string(), "reading CWD"))?;
Ok(Inert(path.into_os_string()))
})]),
])])
.combine(os_string_lib())
.expect("os_string library and directfs conflict"),
handlers,
}
}
}

View File

@@ -1,5 +1,8 @@
//! A rudimentary system exposing methods for Orchid to interact with the file //! A rudimentary system exposing methods for Orchid to interact with the file
//! system. All paths are strings. //! system. All paths are strings.
//!
//! The system depends on [crate::libs::scheduler] for scheduling blocking I/O
//! on a separate thread.
mod commands; mod commands;
mod osstring; mod osstring;

View File

@@ -0,0 +1,44 @@
use std::ffi::OsString;
use crate::foreign::atom::Atomic;
use crate::foreign::error::ExternResult;
use crate::foreign::fn_bridge::constructors::xfn_1ary;
use crate::foreign::inert::{Inert, InertPayload};
use crate::foreign::to_clause::ToClause;
use crate::foreign::try_from_expr::TryFromExpr;
use crate::gen::tree::{atom_leaf, ConstTree};
use crate::interpreter::nort::{Clause, Expr};
use crate::libs::std::string::OrcString;
use crate::location::CodeLocation;
impl InertPayload for OsString {
const TYPE_STR: &'static str = "OsString";
}
impl TryFromExpr for OsString {
fn from_expr(exi: Expr) -> ExternResult<Self> { Ok(Inert::from_expr(exi)?.0) }
}
impl ToClause for OsString {
fn to_clause(self, _: CodeLocation) -> Clause { Inert(self).atom_cls() }
}
pub fn os_to_string(
os: Inert<OsString>,
) -> Result<Inert<OrcString>, Inert<OsString>> {
os.0.into_string().map(|s| Inert(s.into())).map_err(Inert)
}
pub fn string_to_os(str: Inert<OrcString>) -> Inert<OsString> {
Inert(str.0.get_string().into())
}
pub fn os_print(os: Inert<OsString>) -> Inert<OrcString> {
Inert(os.0.to_string_lossy().to_string().into())
}
pub fn os_string_lib() -> ConstTree {
ConstTree::tree([
("os_to_string", atom_leaf(xfn_1ary(os_to_string))),
("string_to_os", atom_leaf(xfn_1ary(string_to_os))),
("os_print", atom_leaf(xfn_1ary(os_print))),
])
}

80
src/libs/io/bindings.rs Normal file
View File

@@ -0,0 +1,80 @@
use super::flow::IOCmdHandlePack;
use super::instances::{BRead, ReadCmd, SRead, WriteCmd};
use super::service::{Sink, Source};
use crate::foreign::cps_box::CPSBox;
use crate::foreign::error::ExternResult;
use crate::foreign::fn_bridge::constructors::{xfn_1ary, xfn_2ary};
use crate::foreign::inert::Inert;
use crate::gen::tree::{atom_leaf, ConstTree};
use crate::libs::scheduler::system::SharedHandle;
use crate::libs::std::binary::Binary;
use crate::libs::std::runtime_error::RuntimeError;
use crate::libs::std::string::OrcString;
use crate::utils::combine::Combine;
pub type WriteHandle = Inert<SharedHandle<Sink>>;
pub type ReadHandle = Inert<SharedHandle<Source>>;
type ReadCmdPack = CPSBox<IOCmdHandlePack<ReadCmd>>;
type WriteCmdPack = CPSBox<IOCmdHandlePack<WriteCmd>>;
pub fn read_string(Inert(handle): ReadHandle) -> ReadCmdPack {
let cmd = ReadCmd::RStr(SRead::All);
CPSBox::new(3, IOCmdHandlePack { handle, cmd })
}
pub fn read_line(Inert(handle): ReadHandle) -> ReadCmdPack {
let cmd = ReadCmd::RStr(SRead::Line);
CPSBox::new(3, IOCmdHandlePack { handle, cmd })
}
pub fn read_bin(Inert(handle): ReadHandle) -> ReadCmdPack {
let cmd = ReadCmd::RBytes(BRead::All);
CPSBox::new(3, IOCmdHandlePack { handle, cmd })
}
pub fn read_bytes(Inert(handle): ReadHandle, n: Inert<usize>) -> ReadCmdPack {
let cmd = ReadCmd::RBytes(BRead::N(n.0));
CPSBox::new(3, IOCmdHandlePack { cmd, handle })
}
pub fn read_until(
Inert(handle): ReadHandle,
Inert(pattern): Inert<usize>,
) -> ExternResult<ReadCmdPack> {
let pattern = pattern.try_into().map_err(|_| {
let msg = format!("{pattern} doesn't fit into a byte");
RuntimeError::ext(msg, "converting number to byte")
})?;
let cmd = ReadCmd::RBytes(BRead::Until(pattern));
Ok(CPSBox::new(3, IOCmdHandlePack { handle, cmd }))
}
pub fn write_str(
Inert(handle): WriteHandle,
string: Inert<OrcString>,
) -> WriteCmdPack {
let cmd = WriteCmd::WStr(string.0.get_string());
CPSBox::new(3, IOCmdHandlePack { handle, cmd })
}
pub fn write_bin(
Inert(handle): WriteHandle,
bytes: Inert<Binary>,
) -> WriteCmdPack {
CPSBox::new(3, IOCmdHandlePack { handle, cmd: WriteCmd::WBytes(bytes.0) })
}
pub fn flush(Inert(handle): WriteHandle) -> WriteCmdPack {
CPSBox::new(3, IOCmdHandlePack { handle, cmd: WriteCmd::Flush })
}
pub fn io_bindings<'a>(
std_streams: impl IntoIterator<Item = (&'a str, ConstTree)>,
) -> ConstTree {
ConstTree::ns("system::io", [ConstTree::tree([
("read_string", atom_leaf(xfn_1ary(read_string))),
("read_line", atom_leaf(xfn_1ary(read_line))),
("read_bin", atom_leaf(xfn_1ary(read_bin))),
("read_n_bytes", atom_leaf(xfn_2ary(read_bytes))),
("read_until", atom_leaf(xfn_2ary(read_until))),
("write_str", atom_leaf(xfn_2ary(write_str))),
("write_bin", atom_leaf(xfn_2ary(write_bin))),
("flush", atom_leaf(xfn_1ary(flush))),
])
.combine(ConstTree::tree(std_streams))
.expect("std_stream name clashing with io functions")])
}

View File

@@ -1,7 +1,7 @@
use std::fmt::Display; use std::fmt::Display;
use crate::foreign::ExternError; use crate::foreign::error::ExternError;
use crate::systems::scheduler::Canceller; use crate::libs::scheduler::cancel_flag::CancelFlag;
pub trait IOHandler<T> { pub trait IOHandler<T> {
type Product; type Product;
@@ -25,7 +25,7 @@ pub trait IOCmd: Send {
fn execute( fn execute(
self, self,
stream: &mut Self::Stream, stream: &mut Self::Stream,
cancel: Canceller, cancel: CancelFlag,
) -> Self::Result; ) -> Self::Result;
} }

View File

@@ -1,36 +1,36 @@
use std::io::{self, BufRead, BufReader, Read, Write}; use std::io::{self, BufRead, Read, Write};
use std::sync::Arc; use std::sync::Arc;
use super::flow::IOCmd; use super::flow::IOCmd;
use crate::foreign::Atomic; use super::service::{Sink, Source};
use crate::interpreted::ExprInst; use crate::foreign::inert::Inert;
use crate::systems::codegen::call; use crate::gen::tpl;
use crate::systems::scheduler::{Canceller, SharedHandle}; use crate::gen::traits::Gen;
use crate::systems::stl::Binary; use crate::interpreter::gen_nort::nort_gen;
use crate::OrcString; use crate::interpreter::nort::Expr;
use crate::libs::scheduler::cancel_flag::CancelFlag;
/// Any type that we can read controlled amounts of data from use crate::libs::scheduler::system::SharedHandle;
pub type Source = BufReader<Box<dyn Read + Send>>; use crate::libs::std::binary::Binary;
/// Any type that we can write data to use crate::libs::std::string::OrcString;
pub type Sink = Box<dyn Write + Send>; use crate::location::{CodeGenInfo, CodeLocation};
/// String reading command /// String reading command
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum SRead { pub(super) enum SRead {
All, All,
Line, Line,
} }
/// Binary reading command /// Binary reading command
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum BRead { pub(super) enum BRead {
All, All,
N(usize), N(usize),
Until(u8), Until(u8),
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum ReadCmd { pub(super) enum ReadCmd {
RBytes(BRead), RBytes(BRead),
RStr(SRead), RStr(SRead),
} }
@@ -45,7 +45,7 @@ impl IOCmd for ReadCmd {
fn execute( fn execute(
self, self,
stream: &mut Self::Stream, stream: &mut Self::Stream,
_cancel: Canceller, _cancel: CancelFlag,
) -> Self::Result { ) -> Self::Result {
match self { match self {
Self::RBytes(bread) => { Self::RBytes(bread) => {
@@ -77,33 +77,34 @@ impl IOCmd for ReadCmd {
} }
/// Reading command (string or binary) /// Reading command (string or binary)
pub enum ReadResult { pub(super) enum ReadResult {
RStr(SRead, io::Result<String>), RStr(SRead, io::Result<String>),
RBin(BRead, io::Result<Vec<u8>>), RBin(BRead, io::Result<Vec<u8>>),
} }
impl ReadResult { impl ReadResult {
pub fn dispatch(self, succ: ExprInst, fail: ExprInst) -> Vec<ExprInst> { pub fn dispatch(self, succ: Expr, fail: Expr) -> Vec<Expr> {
match self { vec![match self {
ReadResult::RBin(_, Err(e)) | ReadResult::RStr(_, Err(e)) => { ReadResult::RBin(_, Err(e)) | ReadResult::RStr(_, Err(e)) =>
vec![call(fail, [wrap_io_error(e)]).wrap()] io_error_handler(e, fail),
}, ReadResult::RBin(_, Ok(bytes)) =>
ReadResult::RBin(_, Ok(bytes)) => { tpl::A(tpl::Slot, tpl::V(Inert(Binary(Arc::new(bytes)))))
let arg = Binary(Arc::new(bytes)).atom_cls().wrap(); .template(nort_gen(succ.location()), [succ]),
vec![call(succ, [arg]).wrap()] ReadResult::RStr(_, Ok(text)) =>
}, tpl::A(tpl::Slot, tpl::V(Inert(OrcString::from(text))))
ReadResult::RStr(_, Ok(text)) => { .template(nort_gen(succ.location()), [succ]),
vec![call(succ, [OrcString::from(text).atom_exi()]).wrap()] }]
},
}
} }
} }
/// Function to convert [io::Error] to Orchid data /// Function to convert [io::Error] to Orchid data
pub fn wrap_io_error(_e: io::Error) -> ExprInst { 0usize.atom_exi() } pub(crate) fn io_error_handler(_e: io::Error, handler: Expr) -> Expr {
let ctx = nort_gen(CodeLocation::Gen(CodeGenInfo::no_details("io_error")));
tpl::A(tpl::Slot, tpl::V(Inert(0usize))).template(ctx, [handler])
}
/// Writing command (string or binary) /// Writing command (string or binary)
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum WriteCmd { pub(super) enum WriteCmd {
WBytes(Binary), WBytes(Binary),
WStr(String), WStr(String),
Flush, Flush,
@@ -117,7 +118,7 @@ impl IOCmd for WriteCmd {
fn execute( fn execute(
self, self,
stream: &mut Self::Stream, stream: &mut Self::Stream,
_cancel: Canceller, _cancel: CancelFlag,
) -> Self::Result { ) -> Self::Result {
let result = match &self { let result = match &self {
Self::Flush => stream.flush(), Self::Flush => stream.flush(),
@@ -128,15 +129,13 @@ impl IOCmd for WriteCmd {
} }
} }
pub struct WriteResult { pub(super) struct WriteResult {
#[allow(unused)]
pub cmd: WriteCmd, pub cmd: WriteCmd,
pub result: io::Result<()>, pub result: io::Result<()>,
} }
impl WriteResult { impl WriteResult {
pub fn dispatch(self, succ: ExprInst, fail: ExprInst) -> Vec<ExprInst> { pub fn dispatch(self, succ: Expr, fail: Expr) -> Vec<Expr> {
match self.result { vec![self.result.map_or_else(|e| io_error_handler(e, fail), |()| succ)]
Ok(_) => vec![succ],
Err(e) => vec![call(fail, vec![wrap_io_error(e)]).wrap()],
}
} }
} }

38
src/libs/io/mod.rs Normal file
View File

@@ -0,0 +1,38 @@
//! System that allows Orchid to interact with trait objects of Rust's `Writer`
//! and with `BufReader`s of `Reader` trait objects.
//!
//! You can pass standard streams during initialization, the stllib expects
//! `stdin`, `stdout` and `stderr`. This system depends on
//! [crate::libs::scheduler] to run blocking I/O operations off-thread, which in
//! turn depends on [crate::libs::asynch] to process results on the main thread,
//! and [crate::libs::std] for `std::panic`.
//!
//! ```
//! use orchidlang::libs::asynch::system::AsynchSystem;
//! use orchidlang::libs::scheduler::system::SeqScheduler;
//! use orchidlang::libs::std::std_system::StdConfig;
//! use orchidlang::libs::io::{IOService, Stream};
//! use orchidlang::facade::loader::Loader;
//! use std::io::BufReader;
//!
//!
//! let mut asynch = AsynchSystem::new();
//! let scheduler = SeqScheduler::new(&mut asynch);
//! let std_streams = [
//! ("stdin", Stream::Source(BufReader::new(Box::new(std::io::stdin())))),
//! ("stdout", Stream::Sink(Box::new(std::io::stdout()))),
//! ("stderr", Stream::Sink(Box::new(std::io::stderr()))),
//! ];
//! let env = Loader::new()
//! .add_system(StdConfig { impure: false })
//! .add_system(asynch)
//! .add_system(scheduler.clone())
//! .add_system(IOService::new(scheduler.clone(), std_streams));
//! ```
mod bindings;
mod flow;
pub(super) mod instances;
mod service;
pub use service::{IOService, Sink, Source, Stream};

138
src/libs/io/service.rs Normal file
View File

@@ -0,0 +1,138 @@
//! Object to pass to [crate::facade::loader::Loader::add_system] to enable the
//! I/O subsystem
use std::io::{BufReader, Read, Write};
use rust_embed::RustEmbed;
use trait_set::trait_set;
use super::bindings::io_bindings;
use super::flow::{IOCmd, IOCmdHandlePack};
use super::instances::{ReadCmd, WriteCmd};
use crate::facade::system::{IntoSystem, System};
use crate::foreign::cps_box::CPSBox;
use crate::foreign::inert::Inert;
use crate::gen::tpl;
use crate::gen::traits::Gen;
use crate::gen::tree::leaf;
use crate::interpreter::gen_nort::nort_gen;
use crate::interpreter::handler::HandlerTable;
use crate::libs::scheduler::system::{SeqScheduler, SharedHandle};
use crate::location::CodeGenInfo;
use crate::name::VName;
use crate::pipeline::load_solution::Prelude;
use crate::virt_fs::{DeclTree, EmbeddedFS, PrefixFS, VirtFS};
/// Any type that we can read controlled amounts of data from
pub type Source = BufReader<Box<dyn Read + Send>>;
/// Any type that we can write data to
pub type Sink = Box<dyn Write + Send>;
/// A shared type for sinks and sources
pub enum Stream {
/// A Source, aka. a BufReader
Source(Source),
/// A Sink, aka. a Writer
Sink(Sink),
}
trait_set! {
/// The table of default streams to be overlain on the I/O module, typicially
/// stdin, stdout, stderr.
pub(super) trait StreamTable<'a> = IntoIterator<Item = (&'a str, Stream)>
}
fn gen() -> CodeGenInfo { CodeGenInfo::no_details("system::io") }
#[derive(RustEmbed)]
#[folder = "src/libs/io"]
#[include = "*.orc"]
struct IOEmbed;
fn code() -> DeclTree {
DeclTree::ns("system::io", [DeclTree::leaf(
PrefixFS::new(EmbeddedFS::new::<IOEmbed>(".orc", gen()), "", "io").rc(),
)])
}
/// A streaming I/O service for interacting with Rust's [std::io::Write] and
/// [std::io::Read] traits.
pub struct IOService<'a, ST: IntoIterator<Item = (&'a str, Stream)>> {
scheduler: SeqScheduler,
global_streams: ST,
}
impl<'a, ST: IntoIterator<Item = (&'a str, Stream)>> IOService<'a, ST> {
/// Construct a new instance of the service
pub fn new(scheduler: SeqScheduler, global_streams: ST) -> Self {
Self { scheduler, global_streams }
}
}
impl<'a, ST: IntoIterator<Item = (&'a str, Stream)>> IntoSystem<'static>
for IOService<'a, ST>
{
fn into_system(self) -> System<'static> {
let scheduler = self.scheduler.clone();
let mut handlers = HandlerTable::new();
handlers.register(move |cps: &CPSBox<IOCmdHandlePack<ReadCmd>>| {
let (IOCmdHandlePack { cmd, handle }, succ, fail, cont) = cps.unpack3();
let (cmd, fail1) = (*cmd, fail.clone());
let result = scheduler.schedule(
handle.clone(),
move |mut stream, cancel| {
let ret = cmd.execute(&mut stream, cancel);
(stream, ret)
},
move |stream, res, _cancel| (stream, res.dispatch(succ, fail1)),
|stream| (stream, Vec::new()),
);
match result {
Ok(cancel) => tpl::A(tpl::Slot, tpl::V(CPSBox::new(1, cancel)))
.template(nort_gen(cont.location()), [cont]),
Err(e) => tpl::A(tpl::Slot, tpl::V(Inert(e)))
.template(nort_gen(fail.location()), [fail]),
}
});
let scheduler = self.scheduler.clone();
handlers.register(move |cps: &CPSBox<IOCmdHandlePack<WriteCmd>>| {
let (IOCmdHandlePack { cmd, handle }, succ, fail, cont) = cps.unpack3();
let (succ1, fail1, cmd) = (succ, fail.clone(), cmd.clone());
let result = scheduler.schedule(
handle.clone(),
move |mut stream, cancel| {
let ret = cmd.execute(&mut stream, cancel);
(stream, ret)
},
move |stream, res, _cancel| (stream, res.dispatch(succ1, fail1)),
|stream| (stream, Vec::new()),
);
match result {
Ok(cancel) => tpl::A(tpl::Slot, tpl::V(CPSBox::new(1, cancel)))
.template(nort_gen(cont.location()), [cont]),
Err(e) => tpl::A(tpl::Slot, tpl::V(Inert(e)))
.template(nort_gen(fail.location()), [fail]),
}
});
let streams = self.global_streams.into_iter().map(|(n, stream)| {
let handle = match stream {
Stream::Sink(sink) => leaf(tpl::V(Inert(SharedHandle::wrap(sink)))),
Stream::Source(source) =>
leaf(tpl::V(Inert(SharedHandle::wrap(source)))),
};
(n, handle)
});
System {
handlers,
name: "system::io",
constants: io_bindings(streams),
code: code(),
prelude: vec![Prelude {
target: VName::literal("system::io::prelude"),
exclude: VName::literal("system::io"),
owner: gen(),
}],
lexer_plugins: vec![],
line_parsers: vec![],
}
}
}

View File

@@ -1,9 +1,7 @@
//! Constants exposed to usercode by the interpreter //! Constants exposed to usercode by the interpreter
pub mod asynch; pub mod asynch;
pub mod codegen;
pub mod directfs; pub mod directfs;
pub mod io; pub mod io;
pub mod scheduler;
pub mod stl;
pub mod parse_custom_line; pub mod parse_custom_line;
pub mod scheduler;
pub mod std;

View File

@@ -0,0 +1,47 @@
//! A helper for defining custom lines. See [custom_line]
use intern_all::Tok;
use crate::error::ProjectResult;
use crate::location::SourceRange;
use crate::parse::errors::ParseErrorKind;
use crate::parse::frag::Frag;
use crate::parse::lexer::Lexeme;
use crate::parse::parse_plugin::ParsePluginReq;
/// An exported line with a name for which the line parser denies exports
pub struct Unexportable(Lexeme);
impl ParseErrorKind for Unexportable {
const DESCRIPTION: &'static str = "this line type cannot be exported";
fn message(&self) -> String { format!("{} cannot be exported", &self.0) }
}
/// Parse a line identified by the specified leading keyword. Although not
/// required, plugins are encouraged to prefix their lines with a globally
/// unique keyword which makes or breaks their parsing, to avoid accidental
/// failure to recognize
pub fn custom_line<'a>(
tail: Frag<'a>,
keyword: Tok<String>,
exportable: bool,
req: &dyn ParsePluginReq,
) -> Option<ProjectResult<(bool, Frag<'a>, SourceRange)>> {
let line_loc = req.frag_loc(tail);
let (fst, tail) = req.pop(tail).ok()?;
let fst_name = req.expect_name(fst).ok()?;
let (exported, n_ent, tail) = if fst_name == keyword {
(false, fst, tail.trim())
} else if fst_name.as_str() == "export" {
let (snd, tail) = req.pop(tail).ok()?;
req.expect(Lexeme::Name(keyword), snd).ok()?;
(true, snd, tail.trim())
} else {
return None;
};
Some(match exported && !exportable {
true => {
let err = Unexportable(n_ent.lexeme.clone());
Err(err.pack(req.range_loc(n_ent.range.clone())))
},
false => Ok((exported, tail, line_loc)),
})
}

View File

@@ -1,31 +1,30 @@
use std::any::Any; use std::any::Any;
use std::collections::VecDeque; use std::collections::VecDeque;
use super::Canceller; use super::cancel_flag::CancelFlag;
use crate::interpreted::ExprInst; use crate::interpreter::nort::Expr;
pub type SyncResult<T> = (T, Box<dyn Any + Send>); pub type SyncResult<T> = (T, Box<dyn Any + Send>);
/// Output from handlers contains the resource being processed and any Orchid /// Output from handlers contains the resource being processed and any Orchid
/// handlers executed as a result of the operation /// handlers executed as a result of the operation
pub type HandlerRes<T> = (T, Vec<ExprInst>); pub type HandlerRes<T> = (T, Vec<Expr>);
pub type SyncOperation<T> = pub type SyncOperation<T> =
Box<dyn FnOnce(T, Canceller) -> SyncResult<T> + Send>; Box<dyn FnOnce(T, CancelFlag) -> SyncResult<T> + Send>;
pub type SyncOpResultHandler<T> = Box< pub type SyncOpResultHandler<T> =
dyn FnOnce(T, Box<dyn Any + Send>, Canceller) -> (T, Vec<ExprInst>) + Send, Box<dyn FnOnce(T, Box<dyn Any + Send>, CancelFlag) -> (T, Vec<Expr>) + Send>;
>;
struct SyncQueueItem<T> { struct SyncQueueItem<T> {
cancelled: Canceller, cancelled: CancelFlag,
operation: SyncOperation<T>, operation: SyncOperation<T>,
handler: SyncOpResultHandler<T>, handler: SyncOpResultHandler<T>,
early_cancel: Box<dyn FnOnce(T) -> (T, Vec<ExprInst>) + Send>, early_cancel: Box<dyn FnOnce(T) -> (T, Vec<Expr>) + Send>,
} }
pub enum NextItemReportKind<T> { pub enum NextItemReportKind<T> {
Free(T), Free(T),
Next { Next {
instance: T, instance: T,
cancelled: Canceller, cancelled: CancelFlag,
operation: SyncOperation<T>, operation: SyncOperation<T>,
rest: BusyState<T>, rest: BusyState<T>,
}, },
@@ -34,17 +33,17 @@ pub enum NextItemReportKind<T> {
pub struct NextItemReport<T> { pub struct NextItemReport<T> {
pub kind: NextItemReportKind<T>, pub kind: NextItemReportKind<T>,
pub events: Vec<ExprInst>, pub events: Vec<Expr>,
} }
pub struct BusyState<T> { pub(super) struct BusyState<T> {
handler: SyncOpResultHandler<T>, handler: SyncOpResultHandler<T>,
queue: VecDeque<SyncQueueItem<T>>, queue: VecDeque<SyncQueueItem<T>>,
seal: Option<Box<dyn FnOnce(T) -> Vec<ExprInst> + Send>>, seal: Option<Box<dyn FnOnce(T) -> Vec<Expr> + Send>>,
} }
impl<T> BusyState<T> { impl<T> BusyState<T> {
pub fn new<U: 'static + Send>( pub fn new<U: 'static + Send>(
handler: impl FnOnce(T, U, Canceller) -> HandlerRes<T> + Send + 'static, handler: impl FnOnce(T, U, CancelFlag) -> HandlerRes<T> + Send + 'static,
) -> Self { ) -> Self {
BusyState { BusyState {
handler: Box::new(|t, payload, cancel| { handler: Box::new(|t, payload, cancel| {
@@ -62,14 +61,14 @@ impl<T> BusyState<T> {
/// successfully enqueued and None if the queue is already sealed. /// successfully enqueued and None if the queue is already sealed.
pub fn enqueue<U: 'static + Send>( pub fn enqueue<U: 'static + Send>(
&mut self, &mut self,
operation: impl FnOnce(T, Canceller) -> (T, U) + Send + 'static, operation: impl FnOnce(T, CancelFlag) -> (T, U) + Send + 'static,
handler: impl FnOnce(T, U, Canceller) -> HandlerRes<T> + Send + 'static, handler: impl FnOnce(T, U, CancelFlag) -> HandlerRes<T> + Send + 'static,
early_cancel: impl FnOnce(T) -> HandlerRes<T> + Send + 'static, early_cancel: impl FnOnce(T) -> HandlerRes<T> + Send + 'static,
) -> Option<Canceller> { ) -> Option<CancelFlag> {
if self.seal.is_some() { if self.seal.is_some() {
return None; return None;
} }
let cancelled = Canceller::new(); let cancelled = CancelFlag::new();
self.queue.push_back(SyncQueueItem { self.queue.push_back(SyncQueueItem {
cancelled: cancelled.clone(), cancelled: cancelled.clone(),
early_cancel: Box::new(early_cancel), early_cancel: Box::new(early_cancel),
@@ -87,7 +86,7 @@ impl<T> BusyState<T> {
pub fn seal( pub fn seal(
&mut self, &mut self,
recipient: impl FnOnce(T) -> Vec<ExprInst> + Send + 'static, recipient: impl FnOnce(T) -> Vec<Expr> + Send + 'static,
) { ) {
assert!(self.seal.is_none(), "Already sealed"); assert!(self.seal.is_none(), "Already sealed");
self.seal = Some(Box::new(recipient)) self.seal = Some(Box::new(recipient))
@@ -99,7 +98,7 @@ impl<T> BusyState<T> {
mut self, mut self,
instance: T, instance: T,
result: Box<dyn Any + Send>, result: Box<dyn Any + Send>,
cancelled: Canceller, cancelled: CancelFlag,
) -> NextItemReport<T> { ) -> NextItemReport<T> {
let (mut instance, mut events) = let (mut instance, mut events) =
(self.handler)(instance, result, cancelled); (self.handler)(instance, result, cancelled);

View File

@@ -1,13 +1,15 @@
//! Flag for cancelling scheduled operations
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc; use std::sync::Arc;
/// A single-fire thread-safe boolean flag with relaxed ordering /// A single-fire thread-safe boolean flag with relaxed ordering
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Canceller(Arc<AtomicBool>); pub struct CancelFlag(Arc<AtomicBool>);
impl Canceller { impl CancelFlag {
/// Create a new canceller /// Create a new canceller
pub fn new() -> Self { Canceller(Arc::new(AtomicBool::new(false))) } pub fn new() -> Self { CancelFlag(Arc::new(AtomicBool::new(false))) }
/// Check whether the operation has been cancelled /// Check whether the operation has been cancelled
pub fn is_cancelled(&self) -> bool { self.0.load(Ordering::Relaxed) } pub fn is_cancelled(&self) -> bool { self.0.load(Ordering::Relaxed) }
@@ -16,6 +18,6 @@ impl Canceller {
pub fn cancel(&self) { self.0.store(true, Ordering::Relaxed) } pub fn cancel(&self) { self.0.store(true, Ordering::Relaxed) }
} }
impl Default for Canceller { impl Default for CancelFlag {
fn default() -> Self { Self::new() } fn default() -> Self { Self::new() }
} }

View File

@@ -16,9 +16,6 @@ impl<T> IdMap<T> {
/// Create a new empty set /// Create a new empty set
pub fn new() -> Self { Self { next_id: 0, data: HashMap::new() } } pub fn new() -> Self { Self { next_id: 0, data: HashMap::new() } }
/// Obtain a reference to the underlying map for iteration
pub fn map(&self) -> &HashMap<u64, T> { &self.data }
/// Insert an element with a new ID and return the ID /// Insert an element with a new ID and return the ID
pub fn insert(&mut self, t: T) -> u64 { pub fn insert(&mut self, t: T) -> u64 {
let id = self.next_id; let id = self.next_id;
@@ -28,14 +25,6 @@ impl<T> IdMap<T> {
id id
} }
/// Obtain a reference to the element with the given ID
pub fn get(&self, id: u64) -> Option<&T> { self.data.get(&id) }
/// Obtain a mutable reference to the element with the given ID
pub fn get_mut(&mut self, id: u64) -> Option<&mut T> {
self.data.get_mut(&id)
}
/// Remove the element with the given ID from the set. The ID will not be /// Remove the element with the given ID from the set. The ID will not be
/// reused. /// reused.
pub fn remove(&mut self, id: u64) -> Option<T> { self.data.remove(&id) } pub fn remove(&mut self, id: u64) -> Option<T> { self.data.remove(&id) }
@@ -56,6 +45,6 @@ mod test {
let b = map.insert(2); let b = map.insert(2);
assert_eq!(map.remove(a), Some(1)); assert_eq!(map.remove(a), Some(1));
assert_eq!(map.remove(a), None); assert_eq!(map.remove(a), None);
assert_eq!(map.get(b), Some(&2)); assert_eq!(map.data.get(&b), Some(&2));
} }
} }

View File

@@ -0,0 +1,8 @@
//! A generic utility to sequence long blocking mutations that require a mutable
//! reference to a shared resource.
mod busy;
pub mod cancel_flag;
mod id_map;
pub mod system;
pub mod thread_pool;

View File

@@ -1,28 +1,47 @@
//! Object to pass to [crate::facade::loader::Loader::add_system] to enable the
//! scheduling subsystem. Other systems also take clones as dependencies.
//!
//! ```
//! use orchidlang::libs::asynch::system::AsynchSystem;
//! use orchidlang::libs::scheduler::system::SeqScheduler;
//! use orchidlang::libs::std::std_system::StdConfig;
//! use orchidlang::facade::loader::Loader;
//!
//! let mut asynch = AsynchSystem::new();
//! let scheduler = SeqScheduler::new(&mut asynch);
//! let env = Loader::new()
//! .add_system(StdConfig { impure: false })
//! .add_system(asynch)
//! .add_system(scheduler.clone());
//! ```
use std::any::{type_name, Any}; use std::any::{type_name, Any};
use std::cell::RefCell; use std::cell::RefCell;
use std::fmt::Debug; use std::fmt::Debug;
use std::rc::Rc; use std::rc::Rc;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use hashbrown::HashMap;
use itertools::Itertools;
use trait_set::trait_set; use trait_set::trait_set;
use super::busy::{BusyState, NextItemReportKind, SyncOperation}; use super::busy::{BusyState, HandlerRes, NextItemReportKind, SyncOperation};
use super::{Canceller, HandlerRes}; use super::cancel_flag::CancelFlag;
use crate::error::AssertionError; use super::id_map::IdMap;
use crate::facade::{IntoSystem, System}; use super::thread_pool::ThreadPool;
use crate::foreign::cps_box::{init_cps, CPSBox}; use crate::facade::system::{IntoSystem, System};
use crate::foreign::{xfn_1ary, InertAtomic, XfnResult}; use crate::foreign::cps_box::CPSBox;
use crate::interpreted::{Clause, ExprInst}; use crate::foreign::error::{AssertionError, ExternResult};
use crate::interpreter::HandlerTable; use crate::foreign::fn_bridge::constructors::xfn_1ary;
use crate::systems::asynch::{AsynchSystem, MessagePort}; use crate::foreign::inert::{Inert, InertPayload};
use crate::gen::tree::{atom_leaf, ConstTree};
use crate::interpreter::handler::HandlerTable;
use crate::interpreter::nort::Expr;
use crate::libs::asynch::system::{AsynchSystem, MessagePort};
use crate::utils::ddispatch::Request; use crate::utils::ddispatch::Request;
use crate::utils::thread_pool::ThreadPool; use crate::utils::take_with_output::take_with_output;
use crate::utils::{take_with_output, unwrap_or, IdMap}; use crate::utils::unwrap_or::unwrap_or;
use crate::{ConstTree, Location}; use crate::virt_fs::DeclTree;
enum SharedResource<T> { pub(super) enum SharedResource<T> {
Free(T), Free(T),
Busy(BusyState<T>), Busy(BusyState<T>),
Taken, Taken,
@@ -47,7 +66,7 @@ pub enum SharedState {
/// A shared handle for a resource of type `T` that can be used with a /// A shared handle for a resource of type `T` that can be used with a
/// [SeqScheduler] to execute mutating operations one by one in worker threads. /// [SeqScheduler] to execute mutating operations one by one in worker threads.
pub struct SharedHandle<T>(Arc<Mutex<SharedResource<T>>>); pub struct SharedHandle<T>(pub(super) Arc<Mutex<SharedResource<T>>>);
impl<T> SharedHandle<T> { impl<T> SharedHandle<T> {
/// Wrap a value to be accessible to a [SeqScheduler]. /// Wrap a value to be accessible to a [SeqScheduler].
@@ -97,8 +116,8 @@ impl<T> Debug for SharedHandle<T> {
.finish() .finish()
} }
} }
impl<T: Send + 'static> InertAtomic for SharedHandle<T> { impl<T: Send + 'static> InertPayload for SharedHandle<T> {
fn type_str() -> &'static str { "a SharedHandle" } const TYPE_STR: &'static str = "a SharedHandle";
fn respond(&self, mut request: Request) { fn respond(&self, mut request: Request) {
request.serve_with(|| { request.serve_with(|| {
let this = self.clone(); let this = self.clone();
@@ -110,7 +129,7 @@ impl<T: Send + 'static> InertAtomic for SharedHandle<T> {
} }
#[derive(Clone)] #[derive(Clone)]
pub struct TakeCmd(pub Arc<dyn Fn(SeqScheduler) + Send + Sync>); struct TakeCmd(pub Arc<dyn Fn(SeqScheduler) + Send + Sync>);
impl Debug for TakeCmd { impl Debug for TakeCmd {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "A command to drop a shared resource") write!(f, "A command to drop a shared resource")
@@ -121,27 +140,25 @@ impl Debug for TakeCmd {
/// which is either already sealed or taken. /// which is either already sealed or taken.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct SealedOrTaken; pub struct SealedOrTaken;
impl InertAtomic for SealedOrTaken { impl InertPayload for SealedOrTaken {
fn type_str() -> &'static str { const TYPE_STR: &'static str = "SealedOrTaken";
"a sealed-or-taken error for a shared resource" }
fn take_and_drop(x: Expr) -> ExternResult<CPSBox<TakeCmd>> {
match x.clause.request() {
Some(t) => Ok(CPSBox::<TakeCmd>::new(1, t)),
None => AssertionError::fail(x.location(), "SharedHandle", format!("{x}")),
} }
} }
pub fn take_and_drop(x: ExprInst) -> XfnResult<Clause> { fn is_taken_error(x: Expr) -> Inert<bool> {
match x.request() { Inert(x.downcast::<Inert<SealedOrTaken>>().is_ok())
Some(t) => Ok(init_cps::<TakeCmd>(1, t)),
None => AssertionError::fail(Location::Unknown, "SharedHandle"),
}
}
pub fn is_taken_error(x: ExprInst) -> XfnResult<bool> {
Ok(x.downcast::<SealedOrTaken>().is_ok())
} }
trait_set! { trait_set! {
/// The part of processing a blocking I/O task that cannot be done on a remote /// The part of processing a blocking I/O task that cannot be done on a remote
/// thread, eg. because it accesses other systems or Orchid code. /// thread, eg. because it accesses other systems or Orchid code.
trait NonSendFn = FnOnce(Box<dyn Any + Send>, SeqScheduler) -> Vec<ExprInst>; trait NonSendFn = FnOnce(Box<dyn Any + Send>, SeqScheduler) -> Vec<Expr>;
} }
struct SyncReply { struct SyncReply {
@@ -195,10 +212,10 @@ impl SeqScheduler {
pub fn schedule<T: Send + 'static, U: Send + 'static>( pub fn schedule<T: Send + 'static, U: Send + 'static>(
&self, &self,
handle: SharedHandle<T>, handle: SharedHandle<T>,
operation: impl FnOnce(T, Canceller) -> (T, U) + Send + 'static, operation: impl FnOnce(T, CancelFlag) -> (T, U) + Send + 'static,
handler: impl FnOnce(T, U, Canceller) -> HandlerRes<T> + Send + 'static, handler: impl FnOnce(T, U, CancelFlag) -> HandlerRes<T> + Send + 'static,
early_cancel: impl FnOnce(T) -> HandlerRes<T> + Send + 'static, early_cancel: impl FnOnce(T) -> HandlerRes<T> + Send + 'static,
) -> Result<Canceller, SealedOrTaken> { ) -> Result<CancelFlag, SealedOrTaken> {
take_with_output(&mut *handle.0.lock().unwrap(), { take_with_output(&mut *handle.0.lock().unwrap(), {
let handle = handle.clone(); let handle = handle.clone();
|state| { |state| {
@@ -211,7 +228,7 @@ impl SeqScheduler {
} }
}, },
SharedResource::Free(t) => { SharedResource::Free(t) => {
let cancelled = Canceller::new(); let cancelled = CancelFlag::new();
drop(early_cancel); // cannot possibly be useful drop(early_cancel); // cannot possibly be useful
let op_erased: SyncOperation<T> = Box::new(|t, c| { let op_erased: SyncOperation<T> = Box::new(|t, c| {
let (t, u) = operation(t, c); let (t, u) = operation(t, c);
@@ -229,10 +246,10 @@ impl SeqScheduler {
/// without queuing on any particular data. /// without queuing on any particular data.
pub fn run_orphan<T: Send + 'static>( pub fn run_orphan<T: Send + 'static>(
&self, &self,
operation: impl FnOnce(Canceller) -> T + Send + 'static, operation: impl FnOnce(CancelFlag) -> T + Send + 'static,
handler: impl FnOnce(T, Canceller) -> Vec<ExprInst> + 'static, handler: impl FnOnce(T, CancelFlag) -> Vec<Expr> + 'static,
) -> Canceller { ) -> CancelFlag {
let cancelled = Canceller::new(); let cancelled = CancelFlag::new();
let canc1 = cancelled.clone(); let canc1 = cancelled.clone();
let opid = self.0.pending.borrow_mut().insert(Box::new( let opid = self.0.pending.borrow_mut().insert(Box::new(
|data: Box<dyn Any + Send>, _| { |data: Box<dyn Any + Send>, _| {
@@ -252,8 +269,8 @@ impl SeqScheduler {
pub fn seal<T>( pub fn seal<T>(
&self, &self,
handle: SharedHandle<T>, handle: SharedHandle<T>,
seal: impl FnOnce(T) -> Vec<ExprInst> + Sync + Send + 'static, seal: impl FnOnce(T) -> Vec<Expr> + Sync + Send + 'static,
) -> Result<Vec<ExprInst>, SealedOrTaken> { ) -> Result<Vec<Expr>, SealedOrTaken> {
take_with_output(&mut *handle.0.lock().unwrap(), |state| match state { take_with_output(&mut *handle.0.lock().unwrap(), |state| match state {
SharedResource::Busy(mut b) if !b.is_sealed() => { SharedResource::Busy(mut b) if !b.is_sealed() => {
b.seal(seal); b.seal(seal);
@@ -274,7 +291,7 @@ impl SeqScheduler {
&self, &self,
t: T, t: T,
handle: SharedHandle<T>, handle: SharedHandle<T>,
cancelled: Canceller, cancelled: CancelFlag,
operation: SyncOperation<T>, operation: SyncOperation<T>,
) { ) {
// referenced by self until run, references handle // referenced by self until run, references handle
@@ -315,33 +332,29 @@ impl SeqScheduler {
} }
impl IntoSystem<'static> for SeqScheduler { impl IntoSystem<'static> for SeqScheduler {
fn into_system(self, i: &crate::Interner) -> crate::facade::System<'static> { fn into_system(self) -> System<'static> {
let mut handlers = HandlerTable::new(); let mut handlers = HandlerTable::new();
handlers.register(|cmd: Box<CPSBox<Canceller>>| { handlers.register(|cmd: &CPSBox<CancelFlag>| {
let (canceller, cont) = cmd.unpack1(); let (canceller, cont) = cmd.unpack1();
canceller.cancel(); canceller.cancel();
Ok(cont) cont
}); });
handlers.register(move |cmd: Box<CPSBox<TakeCmd>>| { handlers.register(move |cmd: &CPSBox<TakeCmd>| {
let (TakeCmd(cb), cont) = cmd.unpack1(); let (TakeCmd(cb), cont) = cmd.unpack1();
cb(self.clone()); cb(self.clone());
Ok(cont) cont
}); });
System { System {
name: ["system", "scheduler"].into_iter().map_into().collect(), name: "system::scheduler",
prelude: Vec::new(), prelude: Vec::new(),
code: HashMap::new(), code: DeclTree::empty(),
handlers, handlers,
lexer_plugins: vec![], lexer_plugins: vec![],
line_parsers: vec![], line_parsers: vec![],
constants: ConstTree::namespace( constants: ConstTree::ns("system::scheduler", [ConstTree::tree([
[i.i("system"), i.i("scheduler")], ("is_taken_error", atom_leaf(xfn_1ary(is_taken_error))),
ConstTree::tree([ ("take_and_drop", atom_leaf(xfn_1ary(take_and_drop))),
(i.i("is_taken_error"), ConstTree::xfn(xfn_1ary(is_taken_error))), ])]),
(i.i("take_and_drop"), ConstTree::xfn(xfn_1ary(take_and_drop))),
]),
)
.unwrap_tree(),
} }
} }
} }

View File

@@ -1,6 +1,8 @@
//! A thread pool for executing tasks in parallel, spawning threads as workload //! A thread pool for executing tasks in parallel, spawning threads as workload
//! increases and terminating them as tasks finish. This is not terribly //! increases and terminating them as tasks finish. This is not terribly
//! efficient, its main design goal is to parallelize blocking I/O calls. //! efficient, its main design goal is to parallelize blocking I/O calls.
//!
//! This is the abstract implementation of the scheduler.
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::mpsc::{sync_channel, SyncSender}; use std::sync::mpsc::{sync_channel, SyncSender};
@@ -81,7 +83,7 @@ struct ThreadPoolData<T: Task> {
/// arrive. To get rid of the last waiting thread, drop the thread pool. /// arrive. To get rid of the last waiting thread, drop the thread pool.
/// ///
/// ``` /// ```
/// use orchidlang::thread_pool::{Task, ThreadPool}; /// use orchidlang::libs::scheduler::thread_pool::{Task, ThreadPool};
/// ///
/// struct MyTask(&'static str); /// struct MyTask(&'static str);
/// impl Task for MyTask { /// impl Task for MyTask {

View File

@@ -1,6 +1,8 @@
//! Error produced by numeric opperations
use std::fmt::Display; use std::fmt::Display;
use crate::foreign::ExternError; use crate::foreign::error::ExternError;
/// Various errors produced by arithmetic operations /// Various errors produced by arithmetic operations
#[derive(Clone)] #[derive(Clone)]

160
src/libs/std/binary.rs Normal file
View File

@@ -0,0 +1,160 @@
//! `std::binary` Operations on binary buffers.
use std::fmt::Debug;
use std::ops::Deref;
use std::sync::Arc;
use itertools::Itertools;
use super::runtime_error::RuntimeError;
use crate::foreign::atom::Atomic;
use crate::foreign::error::ExternResult;
use crate::foreign::fn_bridge::constructors::{
xfn_1ary, xfn_2ary, xfn_3ary, xfn_4ary,
};
use crate::foreign::inert::{Inert, InertPayload};
use crate::gen::tree::{atom_leaf, ConstTree};
use crate::interpreter::nort::Clause;
use crate::utils::iter_find::iter_find;
use crate::utils::unwrap_or::unwrap_or;
const INT_BYTES: usize = usize::BITS as usize / 8;
/// A block of binary data
#[derive(Clone, Hash, PartialEq, Eq)]
pub struct Binary(pub Arc<Vec<u8>>);
impl InertPayload for Binary {
const TYPE_STR: &'static str = "a binary blob";
}
impl Deref for Binary {
type Target = Vec<u8>;
fn deref(&self) -> &Self::Target { &self.0 }
}
impl Debug for Binary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut iter = self.0.iter().copied();
f.write_str("Binary")?;
for mut chunk in iter.by_ref().take(32).chunks(4).into_iter() {
let a = chunk.next().expect("Chunks cannot be empty");
let b = unwrap_or!(chunk.next(); return write!(f, "{a:02x}"));
let c = unwrap_or!(chunk.next(); return write!(f, "{a:02x}{b:02x}"));
let d =
unwrap_or!(chunk.next(); return write!(f, "{a:02x}{b:02x}{c:02x}"));
write!(f, "{a:02x}{b:02x}{c:02x}{d:02x}")?
}
if iter.next().is_some() { write!(f, "...") } else { Ok(()) }
}
}
/// Append two binary data blocks
pub fn concatenate(a: Inert<Binary>, b: Inert<Binary>) -> Inert<Binary> {
let data = (*a).iter().chain(b.0.0.iter()).copied().collect();
Inert(Binary(Arc::new(data)))
}
/// Extract a subsection of the binary data
pub fn slice(
s: Inert<Binary>,
i: Inert<usize>,
len: Inert<usize>,
) -> ExternResult<Inert<Binary>> {
if i.0 + len.0 < s.0.0.len() {
RuntimeError::fail(
"Byte index out of bounds".to_string(),
"indexing binary",
)?
}
Ok(Inert(Binary(Arc::new(s.0.0[i.0..i.0 + len.0].to_vec()))))
}
/// Return the index where the first argument first contains the second, if any
pub fn find(haystack: Inert<Binary>, needle: Inert<Binary>) -> Option<Clause> {
let found = iter_find(haystack.0.0.iter(), needle.0.0.iter());
found.map(|i| Inert(i).atom_cls())
}
/// Split binary data block into two smaller blocks
pub fn split(
bin: Inert<Binary>,
i: Inert<usize>,
) -> ExternResult<(Inert<Binary>, Inert<Binary>)> {
if bin.0.0.len() < i.0 {
RuntimeError::fail(
"Byte index out of bounds".to_string(),
"splitting binary",
)?
}
let (asl, bsl) = bin.0.0.split_at(i.0);
Ok((
Inert(Binary(Arc::new(asl.to_vec()))),
Inert(Binary(Arc::new(bsl.to_vec()))),
))
}
/// Read a number from a binary blob
pub fn get_num(
buf: Inert<Binary>,
loc: Inert<usize>,
size: Inert<usize>,
is_le: Inert<bool>,
) -> ExternResult<Inert<usize>> {
if buf.0.0.len() < (loc.0 + size.0) {
RuntimeError::fail(
"section out of range".to_string(),
"reading number from binary data",
)?
}
if INT_BYTES < size.0 {
RuntimeError::fail(
"more than std::bin::int_bytes bytes provided".to_string(),
"reading number from binary data",
)?
}
let mut data = [0u8; INT_BYTES];
let section = &buf.0.0[loc.0..(loc.0 + size.0)];
let num = if is_le.0 {
data[0..size.0].copy_from_slice(section);
usize::from_le_bytes(data)
} else {
data[INT_BYTES - size.0..].copy_from_slice(section);
usize::from_be_bytes(data)
};
Ok(Inert(num))
}
/// Convert a number into a blob
pub fn from_num(
size: Inert<usize>,
is_le: Inert<bool>,
data: Inert<usize>,
) -> ExternResult<Inert<Binary>> {
if INT_BYTES < size.0 {
RuntimeError::fail(
"more than std::bin::int_bytes bytes requested".to_string(),
"converting number to binary",
)?
}
let bytes = match is_le.0 {
true => data.0.to_le_bytes()[0..size.0].to_vec(),
false => data.0.to_be_bytes()[8 - size.0..].to_vec(),
};
Ok(Inert(Binary(Arc::new(bytes))))
}
/// Detect the number of bytes in the blob
pub fn size(b: Inert<Binary>) -> Inert<usize> { Inert(b.0.len()) }
pub(super) fn bin_lib() -> ConstTree {
ConstTree::ns("std::binary", [ConstTree::tree([
("concat", atom_leaf(xfn_2ary(concatenate))),
("slice", atom_leaf(xfn_3ary(slice))),
("find", atom_leaf(xfn_2ary(find))),
("split", atom_leaf(xfn_2ary(split))),
("get_num", atom_leaf(xfn_4ary(get_num))),
("from_num", atom_leaf(xfn_3ary(from_num))),
("size", atom_leaf(xfn_1ary(size))),
("int_bytes", atom_leaf(Inert(INT_BYTES))),
])])
}

46
src/libs/std/bool.orc Normal file
View File

@@ -0,0 +1,46 @@
import std::(pmatch, inspect)
export ::(!=, ==)
export const not := \bool. if bool then false else true
macro ...$a != ...$b =0x3p36=> (not (...$a == ...$b))
macro ...$a == ...$b =0x3p36=> (equals (...$a) (...$b))
export macro ...$a and ...$b =0x4p36=> (ifthenelse (...$a) (...$b) false)
export macro ...$a or ...$b =0x4p36=> (ifthenelse (...$a) true (...$b))
export macro if ...$cond then ...$true else ...$false:1 =0x1p84=> (
ifthenelse (...$cond) (...$true) (...$false)
)
(
macro pmatch::request (== ...$other)
=0x1p230=> pmatch::response (
if pmatch::value == (...$other)
then pmatch::pass
else pmatch::fail
)
( pmatch::no_binds )
)
(
macro pmatch::request (!= ...$other)
=0x1p230=> pmatch::response (
if pmatch::value != (...$other)
then pmatch::pass
else pmatch::fail
)
( pmatch::no_binds )
)
(
macro pmatch::request (true)
=0x1p230=> pmatch::response
(if pmatch::value then pmatch::pass else pmatch::fail)
( pmatch::no_binds )
)
(
macro pmatch::request (false)
=0x1p230=> pmatch::response
(if pmatch::value then pmatch::fail else pmatch::pass)
( pmatch::no_binds )
)

53
src/libs/std/bool.rs Normal file
View File

@@ -0,0 +1,53 @@
use super::number::Numeric;
use super::string::OrcString;
use crate::foreign::error::{AssertionError, ExternResult};
use crate::foreign::fn_bridge::constructors::{xfn_1ary, xfn_2ary};
use crate::foreign::inert::Inert;
use crate::foreign::try_from_expr::WithLoc;
use crate::gen::tpl;
use crate::gen::traits::{Gen, GenClause};
use crate::gen::tree::{atom_leaf, ConstTree};
use crate::interpreter::gen_nort::nort_gen;
use crate::interpreter::nort_builder::NortBuilder;
use crate::interpreter::nort::Expr;
const fn left() -> impl GenClause { tpl::L("l", tpl::L("_", tpl::P("l"))) }
const fn right() -> impl GenClause { tpl::L("_", tpl::L("r", tpl::P("r"))) }
/// Takes a boolean and two branches, runs the first if the bool is true, the
/// second if it's false.
// Even though it's a ternary function, IfThenElse is implemented as an unary
// foreign function, as the rest of the logic can be defined in Orchid.
pub fn if_then_else(WithLoc(loc, b): WithLoc<Inert<bool>>) -> Expr {
let ctx = nort_gen(loc);
if b.0 { left().template(ctx, []) } else { right().template(ctx, []) }
}
/// Compares the inner values if
///
/// - both are string,
/// - both are bool,
/// - both are either uint or num
pub fn equals(
WithLoc(loc, a): WithLoc<Expr>,
b: Expr,
) -> ExternResult<Inert<bool>> {
Ok(Inert(if let Ok(l) = a.clone().downcast::<Inert<OrcString>>() {
b.downcast::<Inert<OrcString>>().is_ok_and(|r| *l == *r)
} else if let Ok(l) = a.clone().downcast::<Inert<bool>>() {
b.downcast::<Inert<bool>>().is_ok_and(|r| *l == *r)
} else if let Some(l) = a.clause.request::<Numeric>() {
b.clause.request::<Numeric>().is_some_and(|r| l.as_float() == r.as_float())
} else {
AssertionError::fail(loc, "string, bool or numeric", format!("{a}"))?
}))
}
pub fn bool_lib() -> ConstTree {
ConstTree::ns("std::bool", [ConstTree::tree([
("ifthenelse", atom_leaf(xfn_1ary(if_then_else))),
("equals", atom_leaf(xfn_2ary(equals))),
("true", atom_leaf(Inert(true))),
("false", atom_leaf(Inert(false))),
])])
}

71
src/libs/std/conv.rs Normal file
View File

@@ -0,0 +1,71 @@
use once_cell::sync::Lazy;
use ordered_float::NotNan;
use super::number::Numeric;
use super::protocol::{gen_resolv, Protocol};
use super::string::OrcString;
use crate::foreign::atom::Atomic;
use crate::foreign::error::{AssertionError, ExternResult};
use crate::foreign::fn_bridge::constructors::xfn_1ary;
use crate::foreign::inert::Inert;
use crate::foreign::try_from_expr::WithLoc;
use crate::gen::tpl;
use crate::gen::traits::Gen;
use crate::gen::tree::{atom_leaf, ConstTree};
use crate::interpreter::gen_nort::nort_gen;
use crate::interpreter::nort_builder::NortBuilder;
use crate::interpreter::nort::{ClauseInst, Expr};
use crate::parse::numeric::parse_num;
pub static TO_STRING: Lazy<Protocol> =
Lazy::new(|| Protocol::new("to_string", []));
fn to_numeric(WithLoc(loc, a): WithLoc<ClauseInst>) -> ExternResult<Numeric> {
if let Some(n) = a.request::<Numeric>() {
return Ok(n);
}
if let Some(s) = a.request::<OrcString>() {
return parse_num(s.as_str()).map_err(|e| {
AssertionError::ext(loc, "number syntax", format!("{e:?}"))
});
}
AssertionError::fail(loc, "string or number", format!("{a}"))
}
/// parse a number. Accepts the same syntax Orchid does.
pub fn to_float(a: WithLoc<ClauseInst>) -> ExternResult<Inert<NotNan<f64>>> {
to_numeric(a).map(|n| Inert(n.as_float()))
}
/// Parse an unsigned integer. Accepts the same formats Orchid does. If the
/// input is a number, floors it.
pub fn to_uint(a: WithLoc<ClauseInst>) -> ExternResult<Inert<usize>> {
to_numeric(a).map(|n| match n {
Numeric::Float(f) => Inert(f.floor() as usize),
Numeric::Uint(i) => Inert(i),
})
}
/// Convert a literal to a string using Rust's conversions for floats, chars and
/// uints respectively
pub fn to_string(WithLoc(loc, a): WithLoc<Expr>) -> Expr {
match a.clone().downcast::<Inert<OrcString>>() {
Ok(str) => str.atom_expr(loc),
Err(_) => match a.clause.request::<OrcString>() {
Some(str) => Inert(str).atom_expr(loc),
None => tpl::a2(gen_resolv("std::to_string"), tpl::Slot, tpl::Slot)
.template(nort_gen(loc), [a.clone(), a]),
},
}
}
pub fn conv_lib() -> ConstTree {
ConstTree::ns("std", [ConstTree::tree([
TO_STRING.as_tree_ent([]),
ConstTree::tree_ent("conv", [
("to_float", atom_leaf(xfn_1ary(to_float))),
("to_uint", atom_leaf(xfn_1ary(to_uint))),
("to_string", atom_leaf(xfn_1ary(to_string))),
]),
])])
}

View File

@@ -0,0 +1,86 @@
use std::collections::VecDeque;
use std::iter;
use std::rc::Rc;
use crate::foreign::atom::Atomic;
use crate::foreign::fn_bridge::constructors::xfn_1ary;
use crate::foreign::process::Unstable;
use crate::foreign::to_clause::ToClause;
use crate::foreign::try_from_expr::TryFromExpr;
use crate::location::{CodeLocation, SourceRange};
use crate::parse::parsed::{self, PType};
use crate::utils::pure_seq::pushed;
pub trait DeferredRuntimeCallback<T, U, R: ToClause>:
Fn(Vec<(T, U)>) -> R + Clone + Send + 'static
{
}
impl<T, U, R: ToClause, F: Fn(Vec<(T, U)>) -> R + Clone + Send + 'static>
DeferredRuntimeCallback<T, U, R> for F
{
}
/// Lazy-recursive function that takes the next value from the interpreter
/// and acts upon it
///
/// # Panics
///
/// If the list of remaining keys is empty
fn table_receiver_rec<
T: Clone + Send + 'static,
U: TryFromExpr + Clone + Send + 'static,
R: ToClause + 'static,
>(
range: SourceRange,
results: Vec<(T, U)>,
mut remaining_keys: VecDeque<T>,
callback: impl DeferredRuntimeCallback<T, U, R>,
) -> impl Atomic {
let t = remaining_keys.pop_front().expect("empty handled elsewhere");
xfn_1ary(move |u: U| {
let results = pushed(results, (t, u));
match remaining_keys.is_empty() {
true => callback(results).to_clause(CodeLocation::Source(range)),
false =>
table_receiver_rec(range, results, remaining_keys, callback).atom_cls(),
}
})
}
fn table_receiver<
T: Clone + Send + 'static,
U: TryFromExpr + Clone + Send + 'static,
R: ToClause + 'static,
>(
range: SourceRange,
keys: VecDeque<T>,
callback: impl DeferredRuntimeCallback<T, U, R>,
) -> parsed::Clause {
if keys.is_empty() {
Unstable::new(move |_| callback(Vec::new())).ast_cls()
} else {
Unstable::new(move |_| {
table_receiver_rec(range, Vec::new(), keys, callback).atom_cls()
})
.ast_cls()
}
}
/// Defers the execution of the callback to runtime, allowing it to depend on
/// the result of Otchid expressions.
pub fn defer_to_runtime<
T: Clone + Send + 'static,
U: TryFromExpr + Clone + Send + 'static,
R: ToClause + 'static,
>(
range: SourceRange,
pairs: impl IntoIterator<Item = (T, Vec<parsed::Expr>)>,
callback: impl DeferredRuntimeCallback<T, U, R>,
) -> parsed::Clause {
let (keys, ast_values) =
pairs.into_iter().unzip::<_, _, VecDeque<_>, Vec<_>>();
let items = iter::once(table_receiver(range.clone(), keys, callback)).chain(
ast_values.into_iter().map(|v| parsed::Clause::S(PType::Par, Rc::new(v))),
);
parsed::Clause::s('(', items, range)
}

View File

@@ -0,0 +1,41 @@
//! `std::exit_status` Exit status of a program or effectful subprogram.
//!
//! There is no support for custom codes, and the success/failure state can be
//! inspected.
use std::process::ExitCode;
use crate::foreign::fn_bridge::constructors::xfn_1ary;
use crate::foreign::inert::{Inert, InertPayload};
use crate::gen::tree::{atom_leaf, ConstTree};
/// An Orchid equivalent to Rust's binary exit status model
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ExitStatus {
/// unix exit code 0
Success,
/// unix exit code 1
Failure,
}
impl ExitStatus {
/// Convert to Rust-land [ExitCode]
pub fn code(self) -> ExitCode {
match self {
Self::Success => ExitCode::SUCCESS,
Self::Failure => ExitCode::FAILURE,
}
}
}
impl InertPayload for ExitStatus {
const TYPE_STR: &'static str = "ExitStatus";
}
pub(super) fn exit_status_lib() -> ConstTree {
let is_success = |es: Inert<ExitStatus>| Inert(es.0 == ExitStatus::Success);
ConstTree::ns("std::exit_status", [ConstTree::tree([
("success", atom_leaf(Inert(ExitStatus::Success))),
("failure", atom_leaf(Inert(ExitStatus::Failure))),
("is_success", atom_leaf(xfn_1ary(is_success))),
])])
}

View File

@@ -1,7 +1,6 @@
import super::known::* import super::known::*
import super::match::* import super::pmatch::*
import super::macro import super::macro
import super::match::(match, =>)
--[ Do nothing. Especially useful as a passive cps operation ]-- --[ Do nothing. Especially useful as a passive cps operation ]--
export const identity := \x.x export const identity := \x.x

39
src/libs/std/inspect.rs Normal file
View File

@@ -0,0 +1,39 @@
use std::fmt::Debug;
use crate::foreign::atom::{Atomic, AtomicResult, AtomicReturn};
use crate::foreign::error::ExternResult;
use crate::foreign::fn_bridge::constructors::xfn_1ary;
use crate::foreign::to_clause::ToClause;
use crate::gen::tree::{atom_leaf, ConstTree};
use crate::interpreter::apply::CallData;
use crate::interpreter::nort::{Clause, ClauseInst, Expr};
use crate::interpreter::run::RunData;
use crate::utils::ddispatch::Responder;
#[derive(Debug, Clone)]
struct Inspect;
impl Responder for Inspect {}
impl Atomic for Inspect {
fn as_any(self: Box<Self>) -> Box<dyn std::any::Any> { self }
fn as_any_ref(&self) -> &dyn std::any::Any { self }
fn redirect(&mut self) -> Option<&mut ClauseInst> { None }
fn run(self: Box<Self>, run: RunData) -> AtomicResult {
AtomicReturn::inert(*self, run.ctx)
}
fn apply_ref(&self, call: CallData) -> ExternResult<Clause> {
eprintln!("{}", call.arg);
Ok(call.arg.to_clause(call.location))
}
}
fn tee(x: Expr) -> Expr {
eprintln!("{x}");
x
}
pub fn inspect_lib() -> ConstTree {
ConstTree::ns("std", [ConstTree::tree([
("inspect", atom_leaf(Inspect)),
("tee", atom_leaf(xfn_1ary(tee))),
])])
}

137
src/libs/std/list.orc Normal file
View File

@@ -0,0 +1,137 @@
import super::(option, tuple, tuple::t, panic, pmatch, pmatch::=>, macro, tee)
import super::(functional::*, procedural::*)
import super::(loop::*, bool::*, known::*, number::*)
as_type list ()
export const cons := \hd. \tl. wrap (option::some t[hd, unwrap tl])
export const end := wrap option::none
export const pop := \list. \default. \f. (
pmatch::match (unwrap list) {
option::none => default;
option::some t[hd, tl] => f hd (wrap tl);
}
)
-- Operators
--[ Fold each element into an accumulator using an `acc -> el -> acc`. #eager ]--
export const fold := \list. \acc. \f. (
loop_over (list, acc) {
cps head, list = pop list acc;
let acc = f acc head;
}
)
--[ Fold each element into an accumulator in reverse order. #eager-notail ]--
export const rfold := \list. \acc. \f. (
recursive r (list)
pop list acc \head. \tail.
f (r tail) head
)
--[ Reverse a list. #eager ]--
export const reverse := \list. fold list end \tl. \hd. cons hd tl
--[ Fold each element into a shared element with an `el -> el -> el`. #eager-notail ]--
export const reduce := \list. \f. do{
cps head, list = pop list option::none;
option::some $ fold list head f
}
--[
Return a new list that contains only the elements from the input list
for which the function returns true. #lazy
]--
export const filter := \list. \f. (
pop list end \head. \tail.
if (f head)
then cons head (filter tail f)
else filter tail f
)
--[ Transform each element of the list with an `el -> any`. #lazy ]--
export const map := \list. \f. (
recursive r (list)
pop list end \head. \tail.
cons (f head) (r tail)
)
--[ Skip `n` elements from the list and return the tail. #lazy ]--
export const skip := \foo. \n. (
loop_over (foo, n) {
cps _head, foo = if n <= 0
then return foo
else pop foo end;
let n = n - 1;
}
)
--[ Return `n` elements from the list and discard the rest. #lazy ]--
export const take := \list. \n. (
recursive r (list, n)
if n == 0
then end
else pop list end \head. \tail.
cons head $ r tail $ n - 1
)
--[ Return the `n`th element from the list. #eager ]--
export const get := \list. \n. (
loop_over (list, n) {
cps head, list = pop list option::none;
cps if n == 0
then return (option::some head)
else identity;
let n = n - 1;
}
)
--[ Map every element to a pair of the index and the original element. #lazy ]--
export const enumerate := \list. (
recursive r (list, n = 0)
pop list end \head. \tail.
cons t[n, head] $ r tail $ n + 1
)
--[
Turn a list of CPS commands into a sequence. This is achieved by calling every
element on the return value of the next element with the tail passed to it.
The continuation is passed to the very last argument. #lazy
]--
export const chain := \list. \cont. loop_over (list) {
cps head, list = pop list cont;
cps head;
}
macro new[..$items] =0x2p84=> mk_list macro::comma_list (..$items)
macro mk_list ( macro::list_item $item $tail ) =0x1p254=> (cons $item mk_list $tail)
macro mk_list macro::list_end =0x1p254=> end
export ::(new)
( macro pmatch::request (cons $head $tail)
=0x1p230=> await_subpatterns
(pmatch::request ($head))
(pmatch::request ($tail))
)
( macro await_subpatterns
(pmatch::response $h_expr ( $h_binds ))
(pmatch::response $t_expr ( $t_binds ))
=0x1p230=> pmatch::response (
pop
pmatch::value
pmatch::fail
\head. \tail. (
(\pmatch::pass. (\pmatch::value. $h_expr) head)
(pmatch::take_binds $h_binds (
(\pmatch::pass. (\pmatch::value. $t_expr) tail)
(pmatch::take_binds $t_binds (
pmatch::give_binds pmatch::chain_binds $h_binds $t_binds pmatch::pass
))
))
)
)
(pmatch::chain_binds $h_binds $t_binds)
)

View File

@@ -37,7 +37,6 @@ macro parse_binds (...$item) =0x1p250=> (
() ()
) )
-- while loop -- while loop
export macro statement ( export macro statement (
while ..$condition (..$binds) { while ..$condition (..$binds) {

Some files were not shown because too many files have changed in this diff Show More