Updated everything and moved to hard tab indentation

This commit is contained in:
2025-01-08 19:20:34 +01:00
parent 7cdfe7e3c4
commit 52c8d1c95a
100 changed files with 5949 additions and 5998 deletions

View File

@@ -8,7 +8,7 @@ root = true
end_of_line = lf end_of_line = lf
insert_final_newline = true insert_final_newline = true
charset = utf-8 charset = utf-8
indent_style = space indent_style = tab
indent_size = 2 indent_size = 2
[Makefile] [Makefile]

426
Cargo.lock generated
View File

@@ -28,15 +28,15 @@ dependencies = [
[[package]] [[package]]
name = "allocator-api2" name = "allocator-api2"
version = "0.2.16" version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]] [[package]]
name = "anstream" name = "anstream"
version = "0.6.15" version = "0.6.18"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b"
dependencies = [ dependencies = [
"anstyle", "anstyle",
"anstyle-parse", "anstyle-parse",
@@ -49,33 +49,33 @@ dependencies = [
[[package]] [[package]]
name = "anstyle" name = "anstyle"
version = "1.0.8" version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9"
[[package]] [[package]]
name = "anstyle-parse" name = "anstyle-parse"
version = "0.2.5" version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9"
dependencies = [ dependencies = [
"utf8parse", "utf8parse",
] ]
[[package]] [[package]]
name = "anstyle-query" name = "anstyle-query"
version = "1.1.1" version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c"
dependencies = [ dependencies = [
"windows-sys", "windows-sys",
] ]
[[package]] [[package]]
name = "anstyle-wincon" name = "anstyle-wincon"
version = "3.0.4" version = "3.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125"
dependencies = [ dependencies = [
"anstyle", "anstyle",
"windows-sys", "windows-sys",
@@ -83,15 +83,15 @@ dependencies = [
[[package]] [[package]]
name = "arrayvec" name = "arrayvec"
version = "0.7.4" version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]] [[package]]
name = "autocfg" name = "autocfg"
version = "1.1.0" version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
[[package]] [[package]]
name = "bitvec" name = "bitvec"
@@ -116,9 +116,9 @@ dependencies = [
[[package]] [[package]]
name = "borsh" name = "borsh"
version = "1.5.1" version = "1.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6362ed55def622cddc70a4746a68554d7b687713770de539e59a739b249f8ed" checksum = "2506947f73ad44e344215ccd6403ac2ae18cd8e046e581a441bf8d199f257f03"
dependencies = [ dependencies = [
"borsh-derive", "borsh-derive",
"cfg_aliases", "cfg_aliases",
@@ -126,16 +126,15 @@ dependencies = [
[[package]] [[package]]
name = "borsh-derive" name = "borsh-derive"
version = "1.5.1" version = "1.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3ef8005764f53cd4dca619f5bf64cafd4664dada50ece25e4d81de54c80cc0b" checksum = "c2593a3b8b938bd68373196c9832f516be11fa487ef4ae745eb282e6a56a7244"
dependencies = [ dependencies = [
"once_cell", "once_cell",
"proc-macro-crate", "proc-macro-crate",
"proc-macro2 1.0.78", "proc-macro2 1.0.92",
"quote 1.0.35", "quote 1.0.38",
"syn 2.0.52", "syn 2.0.95",
"syn_derive",
] ]
[[package]] [[package]]
@@ -155,22 +154,28 @@ version = "0.6.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659" checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.92",
"quote 1.0.35", "quote 1.0.38",
"syn 1.0.109", "syn 1.0.109",
] ]
[[package]] [[package]]
name = "bytes" name = "byteorder"
version = "1.6.1" version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a12916984aab3fa6e39d655a33e09c0071eb36d6ab3aea5c2d78551f1df6d952" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "bytes"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
[[package]] [[package]]
name = "camino" name = "camino"
version = "1.1.7" version = "1.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239" checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3"
[[package]] [[package]]
name = "cfg-if" name = "cfg-if"
@@ -186,9 +191,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]] [[package]]
name = "clap" name = "clap"
version = "4.5.4" version = "4.5.24"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" checksum = "9560b07a799281c7e0958b9296854d6fafd4c5f31444a7e5bb1ad6dde5ccf1bd"
dependencies = [ dependencies = [
"clap_builder", "clap_builder",
"clap_derive", "clap_derive",
@@ -196,51 +201,51 @@ dependencies = [
[[package]] [[package]]
name = "clap_builder" name = "clap_builder"
version = "4.5.2" version = "4.5.24"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" checksum = "874e0dd3eb68bf99058751ac9712f622e61e6f393a94f7128fa26e3f02f5c7cd"
dependencies = [ dependencies = [
"anstream", "anstream",
"anstyle", "anstyle",
"clap_lex", "clap_lex",
"strsim 0.11.1", "strsim",
] ]
[[package]] [[package]]
name = "clap_derive" name = "clap_derive"
version = "4.5.4" version = "4.5.24"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c"
dependencies = [ dependencies = [
"heck", "heck",
"proc-macro2 1.0.78", "proc-macro2 1.0.92",
"quote 1.0.35", "quote 1.0.38",
"syn 2.0.52", "syn 2.0.95",
] ]
[[package]] [[package]]
name = "clap_lex" name = "clap_lex"
version = "0.7.2" version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
[[package]] [[package]]
name = "colorchoice" name = "colorchoice"
version = "1.0.2" version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
[[package]] [[package]]
name = "const_panic" name = "const_panic"
version = "0.2.8" version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6051f239ecec86fde3410901ab7860d458d160371533842974fc61f96d15879b" checksum = "53857514f72ee4a2b583de67401e3ff63a5472ca4acf289d09a9ea7636dfec17"
[[package]] [[package]]
name = "cpufeatures" name = "cpufeatures"
version = "0.2.12" version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3"
dependencies = [ dependencies = [
"libc", "libc",
] ]
@@ -257,9 +262,9 @@ dependencies = [
[[package]] [[package]]
name = "darling" name = "darling"
version = "0.20.8" version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391" checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989"
dependencies = [ dependencies = [
"darling_core", "darling_core",
"darling_macro", "darling_macro",
@@ -267,27 +272,27 @@ dependencies = [
[[package]] [[package]]
name = "darling_core" name = "darling_core"
version = "0.20.8" version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f" checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5"
dependencies = [ dependencies = [
"fnv", "fnv",
"ident_case", "ident_case",
"proc-macro2 1.0.78", "proc-macro2 1.0.92",
"quote 1.0.35", "quote 1.0.38",
"strsim 0.10.0", "strsim",
"syn 2.0.52", "syn 2.0.95",
] ]
[[package]] [[package]]
name = "darling_macro" name = "darling_macro"
version = "0.20.8" version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806"
dependencies = [ dependencies = [
"darling_core", "darling_core",
"quote 1.0.35", "quote 1.0.38",
"syn 2.0.52", "syn 2.0.95",
] ]
[[package]] [[package]]
@@ -319,9 +324,9 @@ checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125"
[[package]] [[package]]
name = "either" name = "either"
version = "1.10.0" version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
[[package]] [[package]]
name = "equivalent" name = "equivalent"
@@ -359,9 +364,9 @@ dependencies = [
[[package]] [[package]]
name = "getrandom" name = "getrandom"
version = "0.2.14" version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
@@ -377,12 +382,6 @@ dependencies = [
"ahash 0.7.8", "ahash 0.7.8",
] ]
[[package]]
name = "hashbrown"
version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
version = "0.15.2" version = "0.15.2"
@@ -408,12 +407,12 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]] [[package]]
name = "indexmap" name = "indexmap"
version = "2.2.6" version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f"
dependencies = [ dependencies = [
"equivalent", "equivalent",
"hashbrown 0.14.5", "hashbrown 0.15.2",
] ]
[[package]] [[package]]
@@ -422,15 +421,6 @@ version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
[[package]]
name = "itertools"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
dependencies = [
"either",
]
[[package]] [[package]]
name = "itertools" name = "itertools"
version = "0.14.0" version = "0.14.0"
@@ -442,15 +432,15 @@ dependencies = [
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "1.0.11" version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
[[package]] [[package]]
name = "konst" name = "konst"
version = "0.3.9" version = "0.3.16"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50a0ba6de5f7af397afff922f22c149ff605c766cd3269cf6c1cd5e466dbe3b9" checksum = "4381b9b00c55f251f2ebe9473aef7c117e96828def1a7cb3bd3f0f903c6894e9"
dependencies = [ dependencies = [
"const_panic", "const_panic",
"konst_kernel", "konst_kernel",
@@ -460,18 +450,18 @@ dependencies = [
[[package]] [[package]]
name = "konst_kernel" name = "konst_kernel"
version = "0.3.9" version = "0.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be0a455a1719220fd6adf756088e1c69a85bf14b6a9e24537a5cc04f503edb2b" checksum = "e4b1eb7788f3824c629b1116a7a9060d6e898c358ebff59070093d51103dcc3c"
dependencies = [ dependencies = [
"typewit", "typewit",
] ]
[[package]] [[package]]
name = "konst_proc_macros" name = "konst_proc_macros"
version = "0.3.0" version = "0.3.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e28ab1dc35e09d60c2b8c90d12a9a8d9666c876c10a3739a3196db0103b6043" checksum = "00af7901ba50898c9e545c24d5c580c96a982298134e8037d8978b6594782c07"
[[package]] [[package]]
name = "lazy_static" name = "lazy_static"
@@ -481,9 +471,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.153" version = "0.2.169"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
[[package]] [[package]]
name = "memchr" name = "memchr"
@@ -508,9 +498,9 @@ dependencies = [
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.19.0" version = "1.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
[[package]] [[package]]
name = "orchid-api" name = "orchid-api"
@@ -526,18 +516,18 @@ name = "orchid-api-derive"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"darling", "darling",
"itertools 0.13.0", "itertools",
"orchid-api-traits", "orchid-api-traits",
"proc-macro2 1.0.78", "proc-macro2 1.0.92",
"quote 1.0.35", "quote 1.0.38",
"syn 2.0.52", "syn 2.0.95",
] ]
[[package]] [[package]]
name = "orchid-api-traits" name = "orchid-api-traits"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"itertools 0.13.0", "itertools",
"never", "never",
"ordered-float", "ordered-float",
] ]
@@ -549,7 +539,7 @@ dependencies = [
"derive_destructure", "derive_destructure",
"dyn-clone", "dyn-clone",
"hashbrown 0.15.2", "hashbrown 0.15.2",
"itertools 0.14.0", "itertools",
"lazy_static", "lazy_static",
"never", "never",
"num-traits", "num-traits",
@@ -571,7 +561,7 @@ dependencies = [
"derive_destructure", "derive_destructure",
"dyn-clone", "dyn-clone",
"hashbrown 0.15.2", "hashbrown 0.15.2",
"itertools 0.14.0", "itertools",
"konst", "konst",
"lazy_static", "lazy_static",
"never", "never",
@@ -592,7 +582,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"derive_destructure", "derive_destructure",
"hashbrown 0.15.2", "hashbrown 0.15.2",
"itertools 0.14.0", "itertools",
"lazy_static", "lazy_static",
"never", "never",
"num-traits", "num-traits",
@@ -609,7 +599,7 @@ dependencies = [
name = "orchid-std" name = "orchid-std"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"itertools 0.13.0", "itertools",
"never", "never",
"once_cell", "once_cell",
"orchid-api", "orchid-api",
@@ -626,16 +616,16 @@ version = "0.1.0"
dependencies = [ dependencies = [
"camino", "camino",
"clap", "clap",
"itertools 0.13.0", "itertools",
"orchid-base", "orchid-base",
"orchid-host", "orchid-host",
] ]
[[package]] [[package]]
name = "ordered-float" name = "ordered-float"
version = "4.2.1" version = "4.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19ff2cf528c6c03d9ed653d6c4ce1dc0582dc4af309790ad92f07c1cd551b0be" checksum = "7bb71e1b3fa6ca1c61f383464aaf2bb0e2f8e772a1f01d486832464de363b951"
dependencies = [ dependencies = [
"num-traits", "num-traits",
] ]
@@ -648,42 +638,22 @@ checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]] [[package]]
name = "ppv-lite86" name = "ppv-lite86"
version = "0.2.17" version = "0.2.20"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04"
dependencies = [
"zerocopy",
]
[[package]] [[package]]
name = "proc-macro-crate" name = "proc-macro-crate"
version = "3.1.0" version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b"
dependencies = [ dependencies = [
"toml_edit", "toml_edit",
] ]
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2 1.0.78",
"quote 1.0.35",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2 1.0.78",
"quote 1.0.35",
"version_check",
]
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "0.4.30" version = "0.4.30"
@@ -695,9 +665,9 @@ dependencies = [
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.78" version = "1.0.92"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]
@@ -717,8 +687,8 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.92",
"quote 1.0.35", "quote 1.0.38",
"syn 1.0.109", "syn 1.0.109",
] ]
@@ -733,11 +703,11 @@ dependencies = [
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.35" version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.92",
] ]
[[package]] [[package]]
@@ -787,9 +757,9 @@ dependencies = [
[[package]] [[package]]
name = "rkyv" name = "rkyv"
version = "0.7.44" version = "0.7.45"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cba464629b3394fc4dbc6f940ff8f5b4ff5c7aef40f29166fd4ad12acbc99c0" checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b"
dependencies = [ dependencies = [
"bitvec", "bitvec",
"bytecheck", "bytecheck",
@@ -805,20 +775,20 @@ dependencies = [
[[package]] [[package]]
name = "rkyv_derive" name = "rkyv_derive"
version = "0.7.44" version = "0.7.45"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7dddfff8de25e6f62b9d64e6e432bf1c6736c57d20323e15ee10435fbda7c65" checksum = "503d1d27590a2b0a3a4ca4c94755aa2875657196ecbf401a42eff41d7de532c0"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.92",
"quote 1.0.35", "quote 1.0.38",
"syn 1.0.109", "syn 1.0.109",
] ]
[[package]] [[package]]
name = "rust-embed" name = "rust-embed"
version = "8.3.0" version = "8.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb78f46d0066053d16d4ca7b898e9343bc3530f71c61d5ad84cd404ada068745" checksum = "fa66af4a4fdd5e7ebc276f115e895611a34739a9c1c01028383d612d550953c0"
dependencies = [ dependencies = [
"rust-embed-impl", "rust-embed-impl",
"rust-embed-utils", "rust-embed-utils",
@@ -827,22 +797,22 @@ dependencies = [
[[package]] [[package]]
name = "rust-embed-impl" name = "rust-embed-impl"
version = "8.3.0" version = "8.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b91ac2a3c6c0520a3fb3dd89321177c3c692937c4eb21893378219da10c44fc8" checksum = "6125dbc8867951125eec87294137f4e9c2c96566e61bf72c45095a7c77761478"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.92",
"quote 1.0.35", "quote 1.0.38",
"rust-embed-utils", "rust-embed-utils",
"syn 2.0.52", "syn 2.0.95",
"walkdir", "walkdir",
] ]
[[package]] [[package]]
name = "rust-embed-utils" name = "rust-embed-utils"
version = "8.3.0" version = "8.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86f69089032567ffff4eada41c573fc43ff466c7db7c5688b2e7969584345581" checksum = "2e5347777e9aacb56039b0e1f28785929a8a3b709e87482e7442c72e7c12529d"
dependencies = [ dependencies = [
"sha2", "sha2",
"walkdir", "walkdir",
@@ -850,9 +820,9 @@ dependencies = [
[[package]] [[package]]
name = "rust_decimal" name = "rust_decimal"
version = "1.35.0" version = "1.36.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1790d1c4c0ca81211399e0e0af16333276f375209e71a37b67698a373db5b47a" checksum = "b082d80e3e3cc52b2ed634388d436fe1f4de6af5786cc2de9ba9737527bdf555"
dependencies = [ dependencies = [
"arrayvec", "arrayvec",
"borsh", "borsh",
@@ -887,31 +857,32 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.204" version = "1.0.217"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12" checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70"
dependencies = [ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.204" version = "1.0.217"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.92",
"quote 1.0.35", "quote 1.0.38",
"syn 2.0.52", "syn 2.0.95",
] ]
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.120" version = "1.0.135"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5" checksum = "2b0d7ba2887406110130a978386c4e1befb98c674b4fba677954e4db976630d9"
dependencies = [ dependencies = [
"itoa", "itoa",
"memchr",
"ryu", "ryu",
"serde", "serde",
] ]
@@ -929,20 +900,14 @@ dependencies = [
[[package]] [[package]]
name = "simdutf8" name = "simdutf8"
version = "0.1.4" version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a" checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e"
[[package]] [[package]]
name = "stdio-perftest" name = "stdio-perftest"
version = "0.1.0" version = "0.1.0"
[[package]]
name = "strsim"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]] [[package]]
name = "strsim" name = "strsim"
version = "0.11.1" version = "0.11.1"
@@ -972,34 +937,22 @@ version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.92",
"quote 1.0.35", "quote 1.0.38",
"unicode-ident", "unicode-ident",
] ]
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.52" version = "2.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07" checksum = "46f71c0377baf4ef1cc3e3402ded576dccc315800fbc62dfc7fe04b009773b4a"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.92",
"quote 1.0.35", "quote 1.0.38",
"unicode-ident", "unicode-ident",
] ]
[[package]]
name = "syn_derive"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b"
dependencies = [
"proc-macro-error",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.52",
]
[[package]] [[package]]
name = "tap" name = "tap"
version = "1.0.1" version = "1.0.1"
@@ -1008,9 +961,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]] [[package]]
name = "tinyvec" name = "tinyvec"
version = "1.8.0" version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8"
dependencies = [ dependencies = [
"tinyvec_macros", "tinyvec_macros",
] ]
@@ -1023,15 +976,15 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]] [[package]]
name = "toml_datetime" name = "toml_datetime"
version = "0.6.6" version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41"
[[package]] [[package]]
name = "toml_edit" name = "toml_edit"
version = "0.21.1" version = "0.22.22"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5"
dependencies = [ dependencies = [
"indexmap", "indexmap",
"toml_datetime", "toml_datetime",
@@ -1044,8 +997,8 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b79e2e9c9ab44c6d7c20d5976961b47e8f49ac199154daa514b77cd1ab536625" checksum = "b79e2e9c9ab44c6d7c20d5976961b47e8f49ac199154daa514b77cd1ab536625"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.92",
"quote 1.0.35", "quote 1.0.38",
"syn 1.0.109", "syn 1.0.109",
] ]
@@ -1057,9 +1010,9 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
[[package]] [[package]]
name = "typewit" name = "typewit"
version = "1.9.0" version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6fb9ae6a3cafaf0a5d14c2302ca525f9ae8e07a0f0e6949de88d882c37a6e24" checksum = "cb77c29baba9e4d3a6182d51fa75e3215c7fd1dab8f4ea9d107c716878e55fc0"
dependencies = [ dependencies = [
"typewit_proc_macros", "typewit_proc_macros",
] ]
@@ -1072,9 +1025,9 @@ checksum = "e36a83ea2b3c704935a01b4642946aadd445cea40b10935e3f8bd8052b8193d6"
[[package]] [[package]]
name = "unicode-ident" name = "unicode-ident"
version = "1.0.12" version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83"
[[package]] [[package]]
name = "unicode-xid" name = "unicode-xid"
@@ -1090,21 +1043,21 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]] [[package]]
name = "uuid" name = "uuid"
version = "1.10.0" version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a"
[[package]] [[package]]
name = "version_check" name = "version_check"
version = "0.9.4" version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]] [[package]]
name = "walkdir" name = "walkdir"
version = "2.4.0" version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
dependencies = [ dependencies = [
"same-file", "same-file",
"winapi-util", "winapi-util",
@@ -1116,42 +1069,20 @@ version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]] [[package]]
name = "winapi-util" name = "winapi-util"
version = "0.1.6" version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [ dependencies = [
"winapi", "windows-sys",
] ]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]] [[package]]
name = "windows-sys" name = "windows-sys"
version = "0.52.0" version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [ dependencies = [
"windows-targets", "windows-targets",
] ]
@@ -1222,9 +1153,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]] [[package]]
name = "winnow" name = "winnow"
version = "0.5.40" version = "0.6.22"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" checksum = "39281189af81c07ec09db316b302a3e67bf9bd7cbf6c820b50e35fee9c2fa980"
dependencies = [ dependencies = [
"memchr", "memchr",
] ]
@@ -1247,20 +1178,21 @@ dependencies = [
[[package]] [[package]]
name = "zerocopy" name = "zerocopy"
version = "0.7.32" version = "0.7.35"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
dependencies = [ dependencies = [
"byteorder",
"zerocopy-derive", "zerocopy-derive",
] ]
[[package]] [[package]]
name = "zerocopy-derive" name = "zerocopy-derive"
version = "0.7.32" version = "0.7.35"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.92",
"quote 1.0.35", "quote 1.0.38",
"syn 2.0.52", "syn 2.0.95",
] ]

View File

@@ -9,9 +9,9 @@ proc-macro = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
quote = "1.0.35" quote = "1.0.38"
syn = { version = "2.0.52" } syn = { version = "2.0.95" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
proc-macro2 = "1.0.78" proc-macro2 = "1.0.92"
darling = "0.20.8" darling = "0.20.10"
itertools = "0.13.0" itertools = "0.14.0"

View File

@@ -3,28 +3,28 @@ use quote::ToTokens;
use syn::spanned::Spanned; use syn::spanned::Spanned;
pub fn add_trait_bounds(mut generics: syn::Generics, bound: syn::TypeParamBound) -> syn::Generics { pub fn add_trait_bounds(mut generics: syn::Generics, bound: syn::TypeParamBound) -> syn::Generics {
for param in &mut generics.params { for param in &mut generics.params {
if let syn::GenericParam::Type(ref mut type_param) = *param { if let syn::GenericParam::Type(ref mut type_param) = *param {
type_param.bounds.push(bound.clone()) type_param.bounds.push(bound.clone())
} }
} }
generics generics
} }
pub fn destructure(fields: &syn::Fields) -> Option<pm2::TokenStream> { pub fn destructure(fields: &syn::Fields) -> Option<pm2::TokenStream> {
match fields { match fields {
syn::Fields::Unit => None, syn::Fields::Unit => None,
syn::Fields::Named(_) => { syn::Fields::Named(_) => {
let field_list = fields.iter().map(|f| f.ident.as_ref().unwrap()); let field_list = fields.iter().map(|f| f.ident.as_ref().unwrap());
Some(quote! { { #(#field_list),* } }) Some(quote! { { #(#field_list),* } })
}, },
syn::Fields::Unnamed(un) => { syn::Fields::Unnamed(un) => {
let field_list = (0..fields.len()).map(|i| pos_field_name(i, un.span())); let field_list = (0..fields.len()).map(|i| pos_field_name(i, un.span()));
Some(quote! { ( #(#field_list),* ) }) Some(quote! { ( #(#field_list),* ) })
}, },
} }
} }
pub fn pos_field_name(i: usize, span: pm2::Span) -> pm2::TokenStream { pub fn pos_field_name(i: usize, span: pm2::Span) -> pm2::TokenStream {
syn::Ident::new(&format!("field_{i}"), span).to_token_stream() syn::Ident::new(&format!("field_{i}"), span).to_token_stream()
} }

View File

@@ -4,53 +4,53 @@ use proc_macro2 as pm2;
use crate::common::add_trait_bounds; use crate::common::add_trait_bounds;
pub fn derive(input: TokenStream) -> TokenStream { pub fn derive(input: TokenStream) -> TokenStream {
// Parse the input tokens into a syntax tree // Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as syn::DeriveInput); let input = parse_macro_input!(input as syn::DeriveInput);
let generics = add_trait_bounds(input.generics, parse_quote!(orchid_api_traits::Decode)); let generics = add_trait_bounds(input.generics, parse_quote!(orchid_api_traits::Decode));
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let name = input.ident; let name = input.ident;
let decode = decode_body(&input.data); let decode = decode_body(&input.data);
let expanded = quote! { let expanded = quote! {
impl #impl_generics orchid_api_traits::Decode for #name #ty_generics #where_clause { impl #impl_generics orchid_api_traits::Decode for #name #ty_generics #where_clause {
fn decode<R: std::io::Read + ?Sized>(read: &mut R) -> Self { #decode } fn decode<R: std::io::Read + ?Sized>(read: &mut R) -> Self { #decode }
} }
}; };
TokenStream::from(expanded) TokenStream::from(expanded)
} }
fn decode_fields(fields: &syn::Fields) -> pm2::TokenStream { fn decode_fields(fields: &syn::Fields) -> pm2::TokenStream {
match fields { match fields {
syn::Fields::Unit => quote! {}, syn::Fields::Unit => quote! {},
syn::Fields::Named(_) => { syn::Fields::Named(_) => {
let names = fields.iter().map(|f| f.ident.as_ref().unwrap()); let names = fields.iter().map(|f| f.ident.as_ref().unwrap());
quote! { { #( #names: orchid_api_traits::Decode::decode(read), )* } } quote! { { #( #names: orchid_api_traits::Decode::decode(read), )* } }
}, },
syn::Fields::Unnamed(_) => { syn::Fields::Unnamed(_) => {
let exprs = fields.iter().map(|_| quote! { orchid_api_traits::Decode::decode(read), }); let exprs = fields.iter().map(|_| quote! { orchid_api_traits::Decode::decode(read), });
quote! { ( #( #exprs )* ) } quote! { ( #( #exprs )* ) }
}, },
} }
} }
fn decode_body(data: &syn::Data) -> proc_macro2::TokenStream { fn decode_body(data: &syn::Data) -> proc_macro2::TokenStream {
match data { match data {
syn::Data::Union(_) => panic!("Unions can't be deserialized"), syn::Data::Union(_) => panic!("Unions can't be deserialized"),
syn::Data::Struct(str) => { syn::Data::Struct(str) => {
let fields = decode_fields(&str.fields); let fields = decode_fields(&str.fields);
quote! { Self #fields } quote! { Self #fields }
}, },
syn::Data::Enum(en) => { syn::Data::Enum(en) => {
let opts = en.variants.iter().enumerate().map(|(i, v @ syn::Variant { ident, .. })| { let opts = en.variants.iter().enumerate().map(|(i, v @ syn::Variant { ident, .. })| {
let fields = decode_fields(&v.fields); let fields = decode_fields(&v.fields);
let id = i as u8; let id = i as u8;
quote! { #id => Self::#ident #fields, } quote! { #id => Self::#ident #fields, }
}); });
quote! { quote! {
match <u8 as orchid_api_traits::Decode>::decode(read) { match <u8 as orchid_api_traits::Decode>::decode(read) {
#(#opts)* #(#opts)*
x => panic!("Unrecognized enum kind {x}") x => panic!("Unrecognized enum kind {x}")
} }
} }
}, },
} }
} }

View File

@@ -6,61 +6,61 @@ use syn::spanned::Spanned;
use crate::common::{add_trait_bounds, destructure, pos_field_name}; use crate::common::{add_trait_bounds, destructure, pos_field_name};
pub fn derive(input: TokenStream) -> TokenStream { pub fn derive(input: TokenStream) -> TokenStream {
// Parse the input tokens into a syntax tree // Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as syn::DeriveInput); let input = parse_macro_input!(input as syn::DeriveInput);
let e_generics = add_trait_bounds(input.generics, parse_quote!(orchid_api_traits::Encode)); let e_generics = add_trait_bounds(input.generics, parse_quote!(orchid_api_traits::Encode));
let (e_impl_generics, e_ty_generics, e_where_clause) = e_generics.split_for_impl(); let (e_impl_generics, e_ty_generics, e_where_clause) = e_generics.split_for_impl();
let name = input.ident; let name = input.ident;
let encode = encode_body(&input.data); let encode = encode_body(&input.data);
let expanded = quote! { let expanded = quote! {
impl #e_impl_generics orchid_api_traits::Encode for #name #e_ty_generics #e_where_clause { impl #e_impl_generics orchid_api_traits::Encode for #name #e_ty_generics #e_where_clause {
fn encode<W: std::io::Write + ?Sized>(&self, write: &mut W) { #encode } fn encode<W: std::io::Write + ?Sized>(&self, write: &mut W) { #encode }
} }
}; };
TokenStream::from(expanded) TokenStream::from(expanded)
} }
fn encode_body(data: &syn::Data) -> Option<pm2::TokenStream> { fn encode_body(data: &syn::Data) -> Option<pm2::TokenStream> {
match data { match data {
syn::Data::Union(_) => panic!("Unions can't be deserialized"), syn::Data::Union(_) => panic!("Unions can't be deserialized"),
syn::Data::Struct(str) => { syn::Data::Struct(str) => {
let dest = destructure(&str.fields)?; let dest = destructure(&str.fields)?;
let body = encode_items(&str.fields); let body = encode_items(&str.fields);
Some(quote! { Some(quote! {
let Self #dest = &self; let Self #dest = &self;
#body #body
}) })
}, },
syn::Data::Enum(en) => { syn::Data::Enum(en) => {
let options = en.variants.iter().enumerate().map(|(i, v @ syn::Variant { ident, .. })| { let options = en.variants.iter().enumerate().map(|(i, v @ syn::Variant { ident, .. })| {
let dest = destructure(&v.fields).unwrap_or_default(); let dest = destructure(&v.fields).unwrap_or_default();
let body = encode_items(&v.fields); let body = encode_items(&v.fields);
quote! { quote! {
Self::#ident #dest => { Self::#ident #dest => {
(#i as u8).encode(write); (#i as u8).encode(write);
#body #body
} }
} }
}); });
Some(quote! { Some(quote! {
match self { match self {
#(#options)* #(#options)*
_ => unreachable!("Autogenerated encode impl for all possible variants"), _ => unreachable!("Autogenerated encode impl for all possible variants"),
} }
}) })
}, },
} }
} }
fn encode_names<T: ToTokens>(names: impl Iterator<Item = T>) -> pm2::TokenStream { fn encode_names<T: ToTokens>(names: impl Iterator<Item = T>) -> pm2::TokenStream {
quote! { #( #names .encode(write); )* } quote! { #( #names .encode(write); )* }
} }
fn encode_items(fields: &syn::Fields) -> Option<pm2::TokenStream> { fn encode_items(fields: &syn::Fields) -> Option<pm2::TokenStream> {
match fields { match fields {
syn::Fields::Unit => None, syn::Fields::Unit => None,
syn::Fields::Named(_) => Some(encode_names(fields.iter().map(|f| f.ident.as_ref().unwrap()))), syn::Fields::Named(_) => Some(encode_names(fields.iter().map(|f| f.ident.as_ref().unwrap()))),
syn::Fields::Unnamed(un) => syn::Fields::Unnamed(un) =>
Some(encode_names((0..fields.len()).map(|i| pos_field_name(i, un.span())))), Some(encode_names((0..fields.len()).map(|i| pos_field_name(i, un.span())))),
} }
} }

View File

@@ -7,118 +7,118 @@ use proc_macro2 as pm2;
use syn::DeriveInput; use syn::DeriveInput;
pub fn derive(input: TokenStream) -> TokenStream { pub fn derive(input: TokenStream) -> TokenStream {
// Parse the input tokens into a syntax tree // Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as syn::DeriveInput); let input = parse_macro_input!(input as syn::DeriveInput);
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
let name = &input.ident; let name = &input.ident;
let extendable = is_extendable(&input); let extendable = is_extendable(&input);
let is_leaf_val = if extendable { quote!(TLFalse) } else { quote!(TLTrue) }; let is_leaf_val = if extendable { quote!(TLFalse) } else { quote!(TLTrue) };
match get_ancestry(&input) { match get_ancestry(&input) {
None => TokenStream::from(quote! { None => TokenStream::from(quote! {
impl #impl_generics orchid_api_traits::InHierarchy for #name #ty_generics #where_clause { impl #impl_generics orchid_api_traits::InHierarchy for #name #ty_generics #where_clause {
type IsRoot = orchid_api_traits::TLTrue; type IsRoot = orchid_api_traits::TLTrue;
type IsLeaf = orchid_api_traits:: #is_leaf_val ; type IsLeaf = orchid_api_traits:: #is_leaf_val ;
} }
}), }),
Some(ancestry) => { Some(ancestry) => {
let parent = ancestry[0].clone(); let parent = ancestry[0].clone();
let casts = gen_casts(&ancestry[..], &quote!(#name)); let casts = gen_casts(&ancestry[..], &quote!(#name));
TokenStream::from(quote! { TokenStream::from(quote! {
#casts #casts
impl #impl_generics orchid_api_traits::InHierarchy for #name #ty_generics #where_clause { impl #impl_generics orchid_api_traits::InHierarchy for #name #ty_generics #where_clause {
type IsRoot = orchid_api_traits::TLFalse; type IsRoot = orchid_api_traits::TLFalse;
type IsLeaf = orchid_api_traits:: #is_leaf_val ; type IsLeaf = orchid_api_traits:: #is_leaf_val ;
} }
impl #impl_generics orchid_api_traits::Extends for #name #ty_generics #where_clause { impl #impl_generics orchid_api_traits::Extends for #name #ty_generics #where_clause {
type Parent = #parent; type Parent = #parent;
} }
}) })
}, },
} }
} }
fn gen_casts(ancestry: &[pm2::TokenStream], this: &pm2::TokenStream) -> pm2::TokenStream { fn gen_casts(ancestry: &[pm2::TokenStream], this: &pm2::TokenStream) -> pm2::TokenStream {
let from_impls = iter::once(this).chain(ancestry.iter()).tuple_windows().map(|(prev, cur)| { let from_impls = iter::once(this).chain(ancestry.iter()).tuple_windows().map(|(prev, cur)| {
quote! { quote! {
impl From<#this> for #cur { impl From<#this> for #cur {
fn from(value: #this) -> Self { fn from(value: #this) -> Self {
#cur::#prev(value.into()) #cur::#prev(value.into())
} }
} }
} }
}); });
let try_from_impls = (1..=ancestry.len()).map(|len| { let try_from_impls = (1..=ancestry.len()).map(|len| {
let (orig, inter) = ancestry[..len].split_last().unwrap(); let (orig, inter) = ancestry[..len].split_last().unwrap();
fn gen_chk(r: &[pm2::TokenStream], last: &pm2::TokenStream) -> pm2::TokenStream { fn gen_chk(r: &[pm2::TokenStream], last: &pm2::TokenStream) -> pm2::TokenStream {
match r.split_last() { match r.split_last() {
None => quote! { #last (_) => true }, None => quote! { #last (_) => true },
Some((ty, tail)) => { Some((ty, tail)) => {
let sub = gen_chk(tail, last); let sub = gen_chk(tail, last);
quote! { quote! {
#ty ( value ) => match value { #ty ( value ) => match value {
#ty:: #sub , #ty:: #sub ,
_ => false _ => false
} }
} }
}, },
} }
} }
let chk = gen_chk(inter, this); let chk = gen_chk(inter, this);
fn gen_unpk(r: &[pm2::TokenStream], last: &pm2::TokenStream) -> pm2::TokenStream { fn gen_unpk(r: &[pm2::TokenStream], last: &pm2::TokenStream) -> pm2::TokenStream {
match r.split_last() { match r.split_last() {
None => quote! { #last ( value ) => value }, None => quote! { #last ( value ) => value },
Some((ty, tail)) => { Some((ty, tail)) => {
let sub = gen_unpk(tail, last); let sub = gen_unpk(tail, last);
quote! { quote! {
#ty ( value ) => match value { #ty ( value ) => match value {
#ty:: #sub , #ty:: #sub ,
_ => unreachable!("Checked above!"), _ => unreachable!("Checked above!"),
} }
} }
}, },
} }
} }
let unpk = gen_unpk(inter, this); let unpk = gen_unpk(inter, this);
quote! { quote! {
impl TryFrom<#orig> for #this { impl TryFrom<#orig> for #this {
type Error = #orig; type Error = #orig;
fn try_from(value: #orig) -> Result<Self, Self::Error> { fn try_from(value: #orig) -> Result<Self, Self::Error> {
let can_cast = match &value { let can_cast = match &value {
#orig:: #chk , #orig:: #chk ,
_ => false _ => false
}; };
if !can_cast { return Err(value) } if !can_cast { return Err(value) }
Ok ( match value { Ok ( match value {
#orig:: #unpk , #orig:: #unpk ,
_ => unreachable!("Checked above!") _ => unreachable!("Checked above!")
} ) } )
} }
} }
} }
}); });
from_impls.chain(try_from_impls).flatten().collect() from_impls.chain(try_from_impls).flatten().collect()
} }
fn get_ancestry(input: &DeriveInput) -> Option<Vec<pm2::TokenStream>> { fn get_ancestry(input: &DeriveInput) -> Option<Vec<pm2::TokenStream>> {
input.attrs.iter().find(|a| a.path().get_ident().is_some_and(|i| *i == "extends")).map(|attr| { input.attrs.iter().find(|a| a.path().get_ident().is_some_and(|i| *i == "extends")).map(|attr| {
match &attr.meta { match &attr.meta {
syn::Meta::List(list) => (list.tokens.clone().into_iter()) syn::Meta::List(list) => (list.tokens.clone().into_iter())
.batching(|it| { .batching(|it| {
let grp: pm2::TokenStream = let grp: pm2::TokenStream =
it.take_while(|t| { it.take_while(|t| {
if let TokenTree::Punct(punct) = t { punct.as_char() != ',' } else { true } if let TokenTree::Punct(punct) = t { punct.as_char() != ',' } else { true }
}) })
.collect(); .collect();
(!grp.is_empty()).then_some(grp) (!grp.is_empty()).then_some(grp)
}) })
.collect(), .collect(),
_ => panic!("The correct format of the parent macro is #[parent(SomeParentType)]"), _ => panic!("The correct format of the parent macro is #[parent(SomeParentType)]"),
} }
}) })
} }
fn is_extendable(input: &DeriveInput) -> bool { fn is_extendable(input: &DeriveInput) -> bool {
input.attrs.iter().any(|a| a.path().get_ident().is_some_and(|i| *i == "extendable")) input.attrs.iter().any(|a| a.path().get_ident().is_some_and(|i| *i == "extendable"))
} }
#[test] #[test]

View File

@@ -23,5 +23,5 @@ pub fn hierarchy(input: TokenStream) -> TokenStream { hierarchy::derive(input) }
#[proc_macro_derive(Coding)] #[proc_macro_derive(Coding)]
pub fn coding(input: TokenStream) -> TokenStream { pub fn coding(input: TokenStream) -> TokenStream {
decode(input.clone()).into_iter().chain(encode(input)).collect() decode(input.clone()).into_iter().chain(encode(input)).collect()
} }

View File

@@ -6,6 +6,6 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
itertools = "0.13.0" itertools = "0.14.0"
never = "0.1.0" never = "0.1.0"
ordered-float = "4.2" ordered-float = "4.6.0"

View File

@@ -13,36 +13,36 @@ use ordered_float::NotNan;
use crate::encode_enum; use crate::encode_enum;
pub trait Decode { pub trait Decode {
/// Decode an instance from the beginning of the buffer. Return the decoded /// Decode an instance from the beginning of the buffer. Return the decoded
/// data and the remaining buffer. /// data and the remaining buffer.
fn decode<R: Read + ?Sized>(read: &mut R) -> Self; fn decode<R: Read + ?Sized>(read: &mut R) -> Self;
} }
pub trait Encode { pub trait Encode {
/// Append an instance of the struct to the buffer /// Append an instance of the struct to the buffer
fn encode<W: Write + ?Sized>(&self, write: &mut W); fn encode<W: Write + ?Sized>(&self, write: &mut W);
} }
pub trait Coding: Encode + Decode + Clone { pub trait Coding: Encode + Decode + Clone {
fn get_decoder<T>(map: impl Fn(Self) -> T + 'static) -> impl Fn(&mut dyn Read) -> T { fn get_decoder<T>(map: impl Fn(Self) -> T + 'static) -> impl Fn(&mut dyn Read) -> T {
move |r| map(Self::decode(r)) move |r| map(Self::decode(r))
} }
} }
impl<T: Encode + Decode + Clone> Coding for T {} impl<T: Encode + Decode + Clone> Coding for T {}
macro_rules! num_impl { macro_rules! num_impl {
($number:ty) => { ($number:ty) => {
impl Decode for $number { impl Decode for $number {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let mut bytes = [0u8; (<$number>::BITS / 8) as usize]; let mut bytes = [0u8; (<$number>::BITS / 8) as usize];
read.read_exact(&mut bytes).unwrap(); read.read_exact(&mut bytes).unwrap();
<$number>::from_be_bytes(bytes) <$number>::from_be_bytes(bytes)
} }
} }
impl Encode for $number { impl Encode for $number {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { fn encode<W: Write + ?Sized>(&self, write: &mut W) {
write.write_all(&self.to_be_bytes()).expect("Could not write number") write.write_all(&self.to_be_bytes()).expect("Could not write number")
} }
} }
}; };
} }
num_impl!(u128); num_impl!(u128);
num_impl!(u64); num_impl!(u64);
@@ -56,14 +56,14 @@ num_impl!(i16);
num_impl!(i8); num_impl!(i8);
macro_rules! nonzero_impl { macro_rules! nonzero_impl {
($name:ty) => { ($name:ty) => {
impl Decode for $name { impl Decode for $name {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { Self::new(Decode::decode(read)).unwrap() } fn decode<R: Read + ?Sized>(read: &mut R) -> Self { Self::new(Decode::decode(read)).unwrap() }
} }
impl Encode for $name { impl Encode for $name {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { self.get().encode(write) } fn encode<W: Write + ?Sized>(&self, write: &mut W) { self.get().encode(write) }
} }
}; };
} }
nonzero_impl!(std::num::NonZeroU8); nonzero_impl!(std::num::NonZeroU8);
@@ -78,111 +78,111 @@ nonzero_impl!(std::num::NonZeroI64);
nonzero_impl!(std::num::NonZeroI128); nonzero_impl!(std::num::NonZeroI128);
impl<T: Encode + ?Sized> Encode for &T { impl<T: Encode + ?Sized> Encode for &T {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { (**self).encode(write) } fn encode<W: Write + ?Sized>(&self, write: &mut W) { (**self).encode(write) }
} }
macro_rules! float_impl { macro_rules! float_impl {
($t:ty, $size:expr) => { ($t:ty, $size:expr) => {
impl Decode for NotNan<$t> { impl Decode for NotNan<$t> {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let mut bytes = [0u8; $size]; let mut bytes = [0u8; $size];
read.read_exact(&mut bytes).unwrap(); read.read_exact(&mut bytes).unwrap();
NotNan::new(<$t>::from_be_bytes(bytes)).expect("Float was NaN") NotNan::new(<$t>::from_be_bytes(bytes)).expect("Float was NaN")
} }
} }
impl Encode for NotNan<$t> { impl Encode for NotNan<$t> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { fn encode<W: Write + ?Sized>(&self, write: &mut W) {
write.write_all(&self.as_ref().to_be_bytes()).expect("Could not write number") write.write_all(&self.as_ref().to_be_bytes()).expect("Could not write number")
} }
} }
}; };
} }
float_impl!(f64, 8); float_impl!(f64, 8);
float_impl!(f32, 4); float_impl!(f32, 4);
impl Decode for String { impl Decode for String {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let len = u64::decode(read).try_into().unwrap(); let len = u64::decode(read).try_into().unwrap();
let mut data = vec![0u8; len]; let mut data = vec![0u8; len];
read.read_exact(&mut data).unwrap(); read.read_exact(&mut data).unwrap();
std::str::from_utf8(&data).expect("String invalid UTF-8").to_owned() std::str::from_utf8(&data).expect("String invalid UTF-8").to_owned()
} }
} }
impl Encode for String { impl Encode for String {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { fn encode<W: Write + ?Sized>(&self, write: &mut W) {
u64::try_from(self.len()).unwrap().encode(write); u64::try_from(self.len()).unwrap().encode(write);
write.write_all(self.as_bytes()).unwrap() write.write_all(self.as_bytes()).unwrap()
} }
} }
impl Encode for str { impl Encode for str {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { fn encode<W: Write + ?Sized>(&self, write: &mut W) {
u64::try_from(self.len()).unwrap().encode(write); u64::try_from(self.len()).unwrap().encode(write);
write.write_all(self.as_bytes()).unwrap() write.write_all(self.as_bytes()).unwrap()
} }
} }
impl<T: Decode> Decode for Vec<T> { impl<T: Decode> Decode for Vec<T> {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let len = u64::decode(read).try_into().unwrap(); let len = u64::decode(read).try_into().unwrap();
iter::repeat_with(|| T::decode(read)).take(len).collect() iter::repeat_with(|| T::decode(read)).take(len).collect()
} }
} }
impl<T: Encode> Encode for Vec<T> { impl<T: Encode> Encode for Vec<T> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { fn encode<W: Write + ?Sized>(&self, write: &mut W) {
u64::try_from(self.len()).unwrap().encode(write); u64::try_from(self.len()).unwrap().encode(write);
self.iter().for_each(|t| t.encode(write)); self.iter().for_each(|t| t.encode(write));
} }
} }
impl<T: Encode> Encode for [T] { impl<T: Encode> Encode for [T] {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { fn encode<W: Write + ?Sized>(&self, write: &mut W) {
u64::try_from(self.len()).unwrap().encode(write); u64::try_from(self.len()).unwrap().encode(write);
self.iter().for_each(|t| t.encode(write)); self.iter().for_each(|t| t.encode(write));
} }
} }
impl<T: Decode> Decode for Option<T> { impl<T: Decode> Decode for Option<T> {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
match u8::decode(read) { match u8::decode(read) {
0 => None, 0 => None,
1 => Some(T::decode(read)), 1 => Some(T::decode(read)),
x => panic!("{x} is not a valid option value"), x => panic!("{x} is not a valid option value"),
} }
} }
} }
impl<T: Encode> Encode for Option<T> { impl<T: Encode> Encode for Option<T> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { fn encode<W: Write + ?Sized>(&self, write: &mut W) {
let t = if let Some(t) = self { t } else { return 0u8.encode(write) }; let t = if let Some(t) = self { t } else { return 0u8.encode(write) };
1u8.encode(write); 1u8.encode(write);
t.encode(write); t.encode(write);
} }
} }
impl<T: Decode, E: Decode> Decode for Result<T, E> { impl<T: Decode, E: Decode> Decode for Result<T, E> {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
match u8::decode(read) { match u8::decode(read) {
0 => Self::Ok(T::decode(read)), 0 => Self::Ok(T::decode(read)),
1 => Self::Err(E::decode(read)), 1 => Self::Err(E::decode(read)),
x => panic!("Invalid Result tag {x}"), x => panic!("Invalid Result tag {x}"),
} }
} }
} }
impl<T: Encode, E: Encode> Encode for Result<T, E> { impl<T: Encode, E: Encode> Encode for Result<T, E> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { fn encode<W: Write + ?Sized>(&self, write: &mut W) {
match self { match self {
Ok(t) => encode_enum(write, 0, |w| t.encode(w)), Ok(t) => encode_enum(write, 0, |w| t.encode(w)),
Err(e) => encode_enum(write, 1, |w| e.encode(w)), Err(e) => encode_enum(write, 1, |w| e.encode(w)),
} }
} }
} }
impl<K: Decode + Eq + Hash, V: Decode> Decode for HashMap<K, V> { impl<K: Decode + Eq + Hash, V: Decode> Decode for HashMap<K, V> {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let len = u64::decode(read).try_into().unwrap(); let len = u64::decode(read).try_into().unwrap();
iter::repeat_with(|| <(K, V)>::decode(read)).take(len).collect() iter::repeat_with(|| <(K, V)>::decode(read)).take(len).collect()
} }
} }
impl<K: Encode + Eq + Hash, V: Encode> Encode for HashMap<K, V> { impl<K: Encode + Eq + Hash, V: Encode> Encode for HashMap<K, V> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { fn encode<W: Write + ?Sized>(&self, write: &mut W) {
u64::try_from(self.len()).unwrap().encode(write); u64::try_from(self.len()).unwrap().encode(write);
self.iter().for_each(|pair| pair.encode(write)); self.iter().for_each(|pair| pair.encode(write));
} }
} }
macro_rules! tuple { macro_rules! tuple {
(($($t:ident)*) ($($T:ident)*)) => { (($($t:ident)*) ($($T:ident)*)) => {
@@ -216,40 +216,40 @@ tuple!((t u v x y z a b c d e f g h i) (T U V X Y Z A B C D E F G H I));
tuple!((t u v x y z a b c d e f g h i j) (T U V X Y Z A B C D E F G H I J)); // 16 tuple!((t u v x y z a b c d e f g h i j) (T U V X Y Z A B C D E F G H I J)); // 16
impl Decode for () { impl Decode for () {
fn decode<R: Read + ?Sized>(_: &mut R) -> Self {} fn decode<R: Read + ?Sized>(_: &mut R) -> Self {}
} }
impl Encode for () { impl Encode for () {
fn encode<W: Write + ?Sized>(&self, _: &mut W) {} fn encode<W: Write + ?Sized>(&self, _: &mut W) {}
} }
impl Decode for Never { impl Decode for Never {
fn decode<R: Read + ?Sized>(_: &mut R) -> Self { fn decode<R: Read + ?Sized>(_: &mut R) -> Self {
unreachable!("A value of Never cannot exist so it can't have been serialized"); unreachable!("A value of Never cannot exist so it can't have been serialized");
} }
} }
impl Encode for Never { impl Encode for Never {
fn encode<W: Write + ?Sized>(&self, _: &mut W) { match *self {} } fn encode<W: Write + ?Sized>(&self, _: &mut W) { match *self {} }
} }
impl Decode for bool { impl Decode for bool {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let mut buf = [0]; let mut buf = [0];
read.read_exact(&mut buf).unwrap(); read.read_exact(&mut buf).unwrap();
buf[0] != 0 buf[0] != 0
} }
} }
impl Encode for bool { impl Encode for bool {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { fn encode<W: Write + ?Sized>(&self, write: &mut W) {
write.write_all(&[if *self { 0xff } else { 0 }]).unwrap() write.write_all(&[if *self { 0xff } else { 0 }]).unwrap()
} }
} }
impl<T: Decode, const N: usize> Decode for [T; N] { impl<T: Decode, const N: usize> Decode for [T; N] {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
// TODO: figure out how to do this in safe rust on the stack // TODO: figure out how to do this in safe rust on the stack
((0..N).map(|_| T::decode(read)).collect::<Vec<_>>().try_into()) ((0..N).map(|_| T::decode(read)).collect::<Vec<_>>().try_into())
.unwrap_or_else(|_| unreachable!("The length of this iterator is statically known")) .unwrap_or_else(|_| unreachable!("The length of this iterator is statically known"))
} }
} }
impl<T: Encode, const N: usize> Encode for [T; N] { impl<T: Encode, const N: usize> Encode for [T; N] {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { self.iter().for_each(|t| t.encode(write)) } fn encode<W: Write + ?Sized>(&self, write: &mut W) { self.iter().for_each(|t| t.encode(write)) }
} }
macro_rules! two_end_range { macro_rules! two_end_range {
@@ -271,14 +271,14 @@ two_end_range!(x, Range, .., x.start, x.end);
two_end_range!(x, RangeInclusive, ..=, x.start(), x.end()); two_end_range!(x, RangeInclusive, ..=, x.start(), x.end());
macro_rules! smart_ptr { macro_rules! smart_ptr {
($name:tt) => { ($name:tt) => {
impl<T: Decode> Decode for $name<T> { impl<T: Decode> Decode for $name<T> {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { $name::new(T::decode(read)) } fn decode<R: Read + ?Sized>(read: &mut R) -> Self { $name::new(T::decode(read)) }
} }
impl<T: Encode> Encode for $name<T> { impl<T: Encode> Encode for $name<T> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { (**self).encode(write) } fn encode<W: Write + ?Sized>(&self, write: &mut W) { (**self).encode(write) }
} }
}; };
} }
smart_ptr!(Arc); smart_ptr!(Arc);
@@ -288,15 +288,15 @@ smart_ptr!(Box);
impl<T: ?Sized + ToOwned> Decode for Cow<'_, T> impl<T: ?Sized + ToOwned> Decode for Cow<'_, T>
where T::Owned: Decode where T::Owned: Decode
{ {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { Cow::Owned(T::Owned::decode(read)) } fn decode<R: Read + ?Sized>(read: &mut R) -> Self { Cow::Owned(T::Owned::decode(read)) }
} }
impl<T: ?Sized + Encode + ToOwned> Encode for Cow<'_, T> { impl<T: ?Sized + Encode + ToOwned> Encode for Cow<'_, T> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { (**self).encode(write) } fn encode<W: Write + ?Sized>(&self, write: &mut W) { (**self).encode(write) }
} }
impl Decode for char { impl Decode for char {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { char::from_u32(u32::decode(read)).unwrap() } fn decode<R: Read + ?Sized>(read: &mut R) -> Self { char::from_u32(u32::decode(read)).unwrap() }
} }
impl Encode for char { impl Encode for char {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { (*self as u32).encode(write) } fn encode<W: Write + ?Sized>(&self, write: &mut W) { (*self as u32).encode(write) }
} }

View File

@@ -5,32 +5,32 @@ use itertools::{Chunk, Itertools};
use crate::Encode; use crate::Encode;
pub fn encode_enum<W: Write + ?Sized>(write: &mut W, id: u8, f: impl FnOnce(&mut W)) { pub fn encode_enum<W: Write + ?Sized>(write: &mut W, id: u8, f: impl FnOnce(&mut W)) {
id.encode(write); id.encode(write);
f(write) f(write)
} }
pub fn write_exact<W: Write + ?Sized>(write: &mut W, bytes: &'static [u8]) { pub fn write_exact<W: Write + ?Sized>(write: &mut W, bytes: &'static [u8]) {
write.write_all(bytes).expect("Failed to write exact bytes") write.write_all(bytes).expect("Failed to write exact bytes")
} }
pub fn print_bytes(b: &[u8]) -> String { pub fn print_bytes(b: &[u8]) -> String {
(b.iter().map(|b| format!("{b:02x}"))) (b.iter().map(|b| format!("{b:02x}")))
.chunks(4) .chunks(4)
.into_iter() .into_iter()
.map(|mut c: Chunk<_>| c.join(" ")) .map(|mut c: Chunk<_>| c.join(" "))
.join(" ") .join(" ")
} }
pub fn read_exact<R: Read + ?Sized>(read: &mut R, bytes: &'static [u8]) { pub fn read_exact<R: Read + ?Sized>(read: &mut R, bytes: &'static [u8]) {
let mut data = vec![0u8; bytes.len()]; let mut data = vec![0u8; bytes.len()];
read.read_exact(&mut data).expect("Failed to read bytes"); read.read_exact(&mut data).expect("Failed to read bytes");
if data != bytes { if data != bytes {
panic!("Wrong bytes!\nExpected: {}\nFound: {}", print_bytes(bytes), print_bytes(&data)); panic!("Wrong bytes!\nExpected: {}\nFound: {}", print_bytes(bytes), print_bytes(&data));
} }
} }
pub fn enc_vec(enc: &impl Encode) -> Vec<u8> { pub fn enc_vec(enc: &impl Encode) -> Vec<u8> {
let mut vec = Vec::new(); let mut vec = Vec::new();
enc.encode(&mut vec); enc.encode(&mut vec);
vec vec
} }

View File

@@ -11,54 +11,54 @@ impl TLBool for TLFalse {}
/// A type that implements [Hierarchy]. Used to select implementations of traits /// A type that implements [Hierarchy]. Used to select implementations of traits
/// on the hierarchy /// on the hierarchy
pub trait InHierarchy: Clone { pub trait InHierarchy: Clone {
/// Indicates that this hierarchy element is a leaf. Leaves can never have /// Indicates that this hierarchy element is a leaf. Leaves can never have
/// children /// children
type IsLeaf: TLBool; type IsLeaf: TLBool;
/// Indicates that this hierarchy element is a root. Roots can never have /// Indicates that this hierarchy element is a root. Roots can never have
/// parents /// parents
type IsRoot: TLBool; type IsRoot: TLBool;
} }
/// A type that derives from a parent type. /// A type that derives from a parent type.
pub trait Extends: InHierarchy<IsRoot = TLFalse> + Into<Self::Parent> { pub trait Extends: InHierarchy<IsRoot = TLFalse> + Into<Self::Parent> {
/// Specify the immediate parent of this type. This guides the /// Specify the immediate parent of this type. This guides the
type Parent: InHierarchy<IsLeaf = TLFalse> type Parent: InHierarchy<IsLeaf = TLFalse>
+ TryInto<Self> + TryInto<Self>
+ UnderRootImpl<<Self::Parent as InHierarchy>::IsRoot>; + UnderRootImpl<<Self::Parent as InHierarchy>::IsRoot>;
} }
pub trait UnderRootImpl<IsRoot: TLBool>: Sized { pub trait UnderRootImpl<IsRoot: TLBool>: Sized {
type __Root: UnderRoot<IsRoot = TLTrue, Root = Self::__Root>; type __Root: UnderRoot<IsRoot = TLTrue, Root = Self::__Root>;
fn __into_root(self) -> Self::__Root; fn __into_root(self) -> Self::__Root;
fn __try_from_root(root: Self::__Root) -> Result<Self, Self::__Root>; fn __try_from_root(root: Self::__Root) -> Result<Self, Self::__Root>;
} }
pub trait UnderRoot: InHierarchy { pub trait UnderRoot: InHierarchy {
type Root: UnderRoot<IsRoot = TLTrue, Root = Self::Root>; type Root: UnderRoot<IsRoot = TLTrue, Root = Self::Root>;
fn into_root(self) -> Self::Root; fn into_root(self) -> Self::Root;
fn try_from_root(root: Self::Root) -> Result<Self, Self::Root>; fn try_from_root(root: Self::Root) -> Result<Self, Self::Root>;
} }
impl<T: InHierarchy + UnderRootImpl<T::IsRoot>> UnderRoot for T { impl<T: InHierarchy + UnderRootImpl<T::IsRoot>> UnderRoot for T {
type Root = <Self as UnderRootImpl<<Self as InHierarchy>::IsRoot>>::__Root; type Root = <Self as UnderRootImpl<<Self as InHierarchy>::IsRoot>>::__Root;
fn into_root(self) -> Self::Root { self.__into_root() } fn into_root(self) -> Self::Root { self.__into_root() }
fn try_from_root(root: Self::Root) -> Result<Self, Self::Root> { Self::__try_from_root(root) } fn try_from_root(root: Self::Root) -> Result<Self, Self::Root> { Self::__try_from_root(root) }
} }
impl<T: InHierarchy<IsRoot = TLTrue>> UnderRootImpl<TLTrue> for T { impl<T: InHierarchy<IsRoot = TLTrue>> UnderRootImpl<TLTrue> for T {
type __Root = Self; type __Root = Self;
fn __into_root(self) -> Self::__Root { self } fn __into_root(self) -> Self::__Root { self }
fn __try_from_root(root: Self::__Root) -> Result<Self, Self::__Root> { Ok(root) } fn __try_from_root(root: Self::__Root) -> Result<Self, Self::__Root> { Ok(root) }
} }
impl<T: InHierarchy<IsRoot = TLFalse> + Extends> UnderRootImpl<TLFalse> for T { impl<T: InHierarchy<IsRoot = TLFalse> + Extends> UnderRootImpl<TLFalse> for T {
type __Root = <<Self as Extends>::Parent as UnderRootImpl< type __Root = <<Self as Extends>::Parent as UnderRootImpl<
<<Self as Extends>::Parent as InHierarchy>::IsRoot, <<Self as Extends>::Parent as InHierarchy>::IsRoot,
>>::__Root; >>::__Root;
fn __into_root(self) -> Self::__Root { fn __into_root(self) -> Self::__Root {
<Self as Into<<Self as Extends>::Parent>>::into(self).into_root() <Self as Into<<Self as Extends>::Parent>>::into(self).into_root()
} }
fn __try_from_root(root: Self::__Root) -> Result<Self, Self::__Root> { fn __try_from_root(root: Self::__Root) -> Result<Self, Self::__Root> {
let parent = <Self as Extends>::Parent::try_from_root(root)?; let parent = <Self as Extends>::Parent::try_from_root(root)?;
parent.clone().try_into().map_err(|_| parent.into_root()) parent.clone().try_into().map_err(|_| parent.into_root())
} }
} }

View File

@@ -2,20 +2,20 @@ use super::coding::Coding;
use crate::helpers::enc_vec; use crate::helpers::enc_vec;
pub trait Request: Coding + Sized + Send + 'static { pub trait Request: Coding + Sized + Send + 'static {
type Response: Coding + Send + 'static; type Response: Coding + Send + 'static;
} }
pub fn respond<R: Request>(_: &R, rep: R::Response) -> Vec<u8> { enc_vec(&rep) } pub fn respond<R: Request>(_: &R, rep: R::Response) -> Vec<u8> { enc_vec(&rep) }
pub fn respond_with<R: Request>(r: &R, f: impl FnOnce(&R) -> R::Response) -> Vec<u8> { pub fn respond_with<R: Request>(r: &R, f: impl FnOnce(&R) -> R::Response) -> Vec<u8> {
respond(r, f(r)) respond(r, f(r))
} }
pub trait Channel: 'static { pub trait Channel: 'static {
type Req: Coding + Sized + Send + 'static; type Req: Coding + Sized + Send + 'static;
type Notif: Coding + Sized + Send + 'static; type Notif: Coding + Sized + Send + 'static;
} }
pub trait MsgSet: Send + Sync + 'static { pub trait MsgSet: Send + Sync + 'static {
type In: Channel; type In: Channel;
type Out: Channel; type Out: Channel;
} }

View File

@@ -6,6 +6,6 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
ordered-float = "4.2.0" ordered-float = "4.6.0"
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" } orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }

View File

@@ -3,7 +3,9 @@ use std::num::NonZeroU64;
use orchid_api_derive::{Coding, Hierarchy}; use orchid_api_derive::{Coding, Hierarchy};
use orchid_api_traits::Request; use orchid_api_traits::Request;
use crate::{ExprTicket, Expression, ExtHostReq, HostExtNotif, HostExtReq, OrcResult, SysId, TStrv}; use crate::{
ExprTicket, Expression, ExtHostReq, HostExtNotif, HostExtReq, OrcResult, SysId, TStrv,
};
pub type AtomData = Vec<u8>; pub type AtomData = Vec<u8>;
@@ -15,34 +17,34 @@ pub struct AtomId(pub NonZeroU64);
/// This has the same semantics as [Atom] except in that the owner is implied. /// This has the same semantics as [Atom] except in that the owner is implied.
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding)] #[derive(Clone, Debug, Hash, PartialEq, Eq, Coding)]
pub struct LocalAtom { pub struct LocalAtom {
pub drop: Option<AtomId>, pub drop: Option<AtomId>,
pub data: AtomData, pub data: AtomData,
} }
impl LocalAtom { impl LocalAtom {
pub fn associate(self, owner: SysId) -> Atom { Atom { owner, drop: self.drop, data: self.data } } pub fn associate(self, owner: SysId) -> Atom { Atom { owner, drop: self.drop, data: self.data } }
} }
/// An atom representation that can be serialized and sent around. Atoms /// An atom representation that can be serialized and sent around. Atoms
/// represent the smallest increment of work. /// represent the smallest increment of work.
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding)] #[derive(Clone, Debug, Hash, PartialEq, Eq, Coding)]
pub struct Atom { pub struct Atom {
/// Instance ID of the system that created the atom /// Instance ID of the system that created the atom
pub owner: SysId, pub owner: SysId,
/// Indicates how the owner should be notified when this atom is dropped. /// Indicates how the owner should be notified when this atom is dropped.
/// Construction is always explicit and atoms are never cloned. /// Construction is always explicit and atoms are never cloned.
/// ///
/// Atoms with `drop == None` are also known as trivial, they can be /// Atoms with `drop == None` are also known as trivial, they can be
/// duplicated and stored with no regard to expression lifetimes. NOTICE /// duplicated and stored with no regard to expression lifetimes. NOTICE
/// that this only applies to the atom. If it's referenced with an /// that this only applies to the atom. If it's referenced with an
/// [ExprTicket], the ticket itself can still expire. /// [ExprTicket], the ticket itself can still expire.
/// ///
/// Notice also that the atoms still expire when the system is dropped, and /// Notice also that the atoms still expire when the system is dropped, and
/// are not portable across instances of the same system, so this doesn't /// are not portable across instances of the same system, so this doesn't
/// imply that the atom is serializable. /// imply that the atom is serializable.
pub drop: Option<AtomId>, pub drop: Option<AtomId>,
/// Data stored in the atom. This could be a key into a map, or the raw data /// Data stored in the atom. This could be a key into a map, or the raw data
/// of the atom if it isn't too big. /// of the atom if it isn't too big.
pub data: AtomData, pub data: AtomData,
} }
/// Attempt to apply an atom as a function to an expression /// Attempt to apply an atom as a function to an expression
@@ -50,7 +52,7 @@ pub struct Atom {
#[extends(AtomReq, HostExtReq)] #[extends(AtomReq, HostExtReq)]
pub struct CallRef(pub Atom, pub ExprTicket); pub struct CallRef(pub Atom, pub ExprTicket);
impl Request for CallRef { impl Request for CallRef {
type Response = Expression; type Response = Expression;
} }
/// Attempt to apply an atom as a function, consuming the atom and enabling the /// Attempt to apply an atom as a function, consuming the atom and enabling the
@@ -60,21 +62,21 @@ impl Request for CallRef {
#[extends(AtomReq, HostExtReq)] #[extends(AtomReq, HostExtReq)]
pub struct FinalCall(pub Atom, pub ExprTicket); pub struct FinalCall(pub Atom, pub ExprTicket);
impl Request for FinalCall { impl Request for FinalCall {
type Response = Expression; type Response = Expression;
} }
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(AtomReq, HostExtReq)] #[extends(AtomReq, HostExtReq)]
pub struct SerializeAtom(pub Atom); pub struct SerializeAtom(pub Atom);
impl Request for SerializeAtom { impl Request for SerializeAtom {
type Response = Option<(Vec<u8>, Vec<ExprTicket>)>; type Response = Option<(Vec<u8>, Vec<ExprTicket>)>;
} }
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(HostExtReq)] #[extends(HostExtReq)]
pub struct DeserAtom(pub SysId, pub Vec<u8>, pub Vec<ExprTicket>); pub struct DeserAtom(pub SysId, pub Vec<u8>, pub Vec<ExprTicket>);
impl Request for DeserAtom { impl Request for DeserAtom {
type Response = Atom; type Response = Atom;
} }
/// A request blindly routed to the system that provides an atom. /// A request blindly routed to the system that provides an atom.
@@ -82,26 +84,26 @@ impl Request for DeserAtom {
#[extends(AtomReq, HostExtReq)] #[extends(AtomReq, HostExtReq)]
pub struct Fwded(pub Atom, pub TStrv, pub Vec<u8>); pub struct Fwded(pub Atom, pub TStrv, pub Vec<u8>);
impl Request for Fwded { impl Request for Fwded {
type Response = Option<Vec<u8>>; type Response = Option<Vec<u8>>;
} }
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(ExtHostReq)] #[extends(ExtHostReq)]
pub struct Fwd(pub Atom, pub TStrv, pub Vec<u8>); pub struct Fwd(pub Atom, pub TStrv, pub Vec<u8>);
impl Request for Fwd { impl Request for Fwd {
type Response = Option<Vec<u8>>; type Response = Option<Vec<u8>>;
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub enum NextStep { pub enum NextStep {
Continue(Expression), Continue(Expression),
Halt, Halt,
} }
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(AtomReq, HostExtReq)] #[extends(AtomReq, HostExtReq)]
pub struct Command(pub Atom); pub struct Command(pub Atom);
impl Request for Command { impl Request for Command {
type Response = OrcResult<NextStep>; type Response = OrcResult<NextStep>;
} }
/// Notification that an atom is being dropped because its associated expression /// Notification that an atom is being dropped because its associated expression
@@ -115,7 +117,7 @@ pub struct AtomDrop(pub SysId, pub AtomId);
#[extends(AtomReq, HostExtReq)] #[extends(AtomReq, HostExtReq)]
pub struct AtomPrint(pub Atom); pub struct AtomPrint(pub Atom);
impl Request for AtomPrint { impl Request for AtomPrint {
type Response = String; type Response = String;
} }
/// Requests that apply to an existing atom instance /// Requests that apply to an existing atom instance
@@ -123,24 +125,24 @@ impl Request for AtomPrint {
#[extends(HostExtReq)] #[extends(HostExtReq)]
#[extendable] #[extendable]
pub enum AtomReq { pub enum AtomReq {
CallRef(CallRef), CallRef(CallRef),
FinalCall(FinalCall), FinalCall(FinalCall),
Fwded(Fwded), Fwded(Fwded),
Command(Command), Command(Command),
AtomPrint(AtomPrint), AtomPrint(AtomPrint),
SerializeAtom(SerializeAtom), SerializeAtom(SerializeAtom),
} }
impl AtomReq { impl AtomReq {
/// Obtain the first [Atom] argument of the request. All requests in this /// Obtain the first [Atom] argument of the request. All requests in this
/// subclass have at least one atom argument. /// subclass have at least one atom argument.
pub fn get_atom(&self) -> &Atom { pub fn get_atom(&self) -> &Atom {
match self { match self {
Self::CallRef(CallRef(a, ..)) Self::CallRef(CallRef(a, ..))
| Self::Command(Command(a)) | Self::Command(Command(a))
| Self::FinalCall(FinalCall(a, ..)) | Self::FinalCall(FinalCall(a, ..))
| Self::Fwded(Fwded(a, ..)) | Self::Fwded(Fwded(a, ..))
| Self::AtomPrint(AtomPrint(a)) | Self::AtomPrint(AtomPrint(a))
| Self::SerializeAtom(SerializeAtom(a)) => a, | Self::SerializeAtom(SerializeAtom(a)) => a,
} }
} }
} }

View File

@@ -10,11 +10,11 @@ pub struct ErrId(pub NonZeroU16);
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct ErrLocation { pub struct ErrLocation {
/// Description of the relation of this location to the error. If not used, /// Description of the relation of this location to the error. If not used,
/// set to empty string /// set to empty string
pub message: Arc<String>, pub message: Arc<String>,
/// Location in code where the error emerged. This is usually [Location::Gen]. /// Location in code where the error emerged. This is usually [Location::Gen].
pub location: Location, pub location: Location,
} }
/// Programming errors raised by extensions. At runtime these produce the /// Programming errors raised by extensions. At runtime these produce the
@@ -24,14 +24,14 @@ pub struct ErrLocation {
/// and a bottom if the file name isn't a string. /// and a bottom if the file name isn't a string.
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct OrcError { pub struct OrcError {
/// General description of the kind of error. /// General description of the kind of error.
pub description: TStr, pub description: TStr,
/// Specific information about the exact error, preferably containing concrete /// Specific information about the exact error, preferably containing concrete
/// values. /// values.
pub message: Arc<String>, pub message: Arc<String>,
/// Specific code fragments that have contributed to the emergence of the /// Specific code fragments that have contributed to the emergence of the
/// error. /// error.
pub locations: Vec<ErrLocation>, pub locations: Vec<ErrLocation>,
} }
/// If this is an [`Err`] then the [`Vec`] must not be empty. /// If this is an [`Err`] then the [`Vec`] must not be empty.

View File

@@ -43,9 +43,9 @@ pub struct Release(pub SysId, pub ExprTicket);
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(ExprNotif, ExtHostNotif)] #[extends(ExprNotif, ExtHostNotif)]
pub struct Move { pub struct Move {
pub dec: SysId, pub dec: SysId,
pub inc: SysId, pub inc: SysId,
pub expr: ExprTicket, pub expr: ExprTicket,
} }
/// A description of a new expression. It is used as the return value of /// A description of a new expression. It is used as the return value of
@@ -53,48 +53,48 @@ pub struct Move {
/// [crate::tree::Tree]. /// [crate::tree::Tree].
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub enum ExpressionKind { pub enum ExpressionKind {
/// Apply the lhs as a function to the rhs /// Apply the lhs as a function to the rhs
Call(Box<Expression>, Box<Expression>), Call(Box<Expression>, Box<Expression>),
/// Lambda function. The number operates as an argument name /// Lambda function. The number operates as an argument name
Lambda(u64, Box<Expression>), Lambda(u64, Box<Expression>),
/// Binds the argument passed to the lambda with the same ID in the same /// Binds the argument passed to the lambda with the same ID in the same
/// template /// template
Arg(u64), Arg(u64),
/// Insert the specified host-expression in the template here. When the clause /// Insert the specified host-expression in the template here. When the clause
/// is used in the const tree, this variant is forbidden. /// is used in the const tree, this variant is forbidden.
Slot(ExprTicket), Slot(ExprTicket),
/// The lhs must be fully processed before the rhs can be processed. /// The lhs must be fully processed before the rhs can be processed.
/// Equivalent to Haskell's function of the same name /// Equivalent to Haskell's function of the same name
Seq(Box<Expression>, Box<Expression>), Seq(Box<Expression>, Box<Expression>),
/// Insert a new atom in the tree. When the clause is used in the const tree, /// Insert a new atom in the tree. When the clause is used in the const tree,
/// the atom must be trivial. This is always a newly constructed atom, if you /// the atom must be trivial. This is always a newly constructed atom, if you
/// want to reference an existing atom, use the corresponding [ExprTicket]. /// want to reference an existing atom, use the corresponding [ExprTicket].
/// Because the atom is newly constructed, it also must belong to this system. /// Because the atom is newly constructed, it also must belong to this system.
NewAtom(Atom), NewAtom(Atom),
/// A reference to a constant /// A reference to a constant
Const(TStrv), Const(TStrv),
/// A static runtime error. /// A static runtime error.
Bottom(Vec<OrcError>), Bottom(Vec<OrcError>),
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct Expression { pub struct Expression {
pub kind: ExpressionKind, pub kind: ExpressionKind,
pub location: Location, pub location: Location,
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub enum InspectedKind { pub enum InspectedKind {
Atom(Atom), Atom(Atom),
Bottom(Vec<OrcError>), Bottom(Vec<OrcError>),
Opaque, Opaque,
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct Inspected { pub struct Inspected {
pub kind: InspectedKind, pub kind: InspectedKind,
pub location: Location, pub location: Location,
pub refcount: u32, pub refcount: u32,
} }
/// Obtain information about an expression. Used to act upon arguments by /// Obtain information about an expression. Used to act upon arguments by
@@ -103,24 +103,24 @@ pub struct Inspected {
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(ExprReq, ExtHostReq)] #[extends(ExprReq, ExtHostReq)]
pub struct Inspect { pub struct Inspect {
pub target: ExprTicket, pub target: ExprTicket,
} }
impl Request for Inspect { impl Request for Inspect {
type Response = Inspected; type Response = Inspected;
} }
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(ExtHostReq)] #[extends(ExtHostReq)]
#[extendable] #[extendable]
pub enum ExprReq { pub enum ExprReq {
Inspect(Inspect), Inspect(Inspect),
} }
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(ExtHostNotif)] #[extends(ExtHostNotif)]
#[extendable] #[extendable]
pub enum ExprNotif { pub enum ExprNotif {
Acquire(Acquire), Acquire(Acquire),
Release(Release), Release(Release),
Move(Move), Move(Move),
} }

View File

@@ -12,10 +12,10 @@ use crate::{ExtHostReq, HostExtReq};
#[extends(ExtHostReq)] #[extends(ExtHostReq)]
#[extendable] #[extendable]
pub enum IntReq { pub enum IntReq {
InternStr(InternStr), InternStr(InternStr),
InternStrv(InternStrv), InternStrv(InternStrv),
ExternStr(ExternStr), ExternStr(ExternStr),
ExternStrv(ExternStrv), ExternStrv(ExternStrv),
} }
/// replica -> master to intern a string on the master. Repeatable. /// replica -> master to intern a string on the master. Repeatable.
@@ -25,7 +25,7 @@ pub enum IntReq {
#[extends(IntReq, ExtHostReq)] #[extends(IntReq, ExtHostReq)]
pub struct InternStr(pub Arc<String>); pub struct InternStr(pub Arc<String>);
impl Request for InternStr { impl Request for InternStr {
type Response = TStr; type Response = TStr;
} }
/// replica -> master to find the interned string corresponding to a key. /// replica -> master to find the interned string corresponding to a key.
@@ -37,7 +37,7 @@ impl Request for InternStr {
#[extends(IntReq, ExtHostReq)] #[extends(IntReq, ExtHostReq)]
pub struct ExternStr(pub TStr); pub struct ExternStr(pub TStr);
impl Request for ExternStr { impl Request for ExternStr {
type Response = Arc<String>; type Response = Arc<String>;
} }
/// replica -> master to intern a vector of interned strings /// replica -> master to intern a vector of interned strings
/// ///
@@ -48,7 +48,7 @@ impl Request for ExternStr {
#[extends(IntReq, ExtHostReq)] #[extends(IntReq, ExtHostReq)]
pub struct InternStrv(pub Arc<Vec<TStr>>); pub struct InternStrv(pub Arc<Vec<TStr>>);
impl Request for InternStrv { impl Request for InternStrv {
type Response = TStrv; type Response = TStrv;
} }
/// replica -> master to find the vector of interned strings corresponding to a /// replica -> master to find the vector of interned strings corresponding to a
/// token /// token
@@ -60,7 +60,7 @@ impl Request for InternStrv {
#[extends(IntReq, ExtHostReq)] #[extends(IntReq, ExtHostReq)]
pub struct ExternStrv(pub TStrv); pub struct ExternStrv(pub TStrv);
impl Request for ExternStrv { impl Request for ExternStrv {
type Response = Arc<Vec<TStr>>; type Response = Arc<Vec<TStr>>;
} }
/// A substitute for an interned string in serialized datastructures. /// A substitute for an interned string in serialized datastructures.
@@ -77,13 +77,13 @@ pub struct TStrv(pub NonZeroU64);
#[extends(HostExtReq)] #[extends(HostExtReq)]
pub struct Sweep; pub struct Sweep;
impl Request for Sweep { impl Request for Sweep {
type Response = Retained; type Response = Retained;
} }
/// List of keys in this replica that couldn't be sweeped because local /// List of keys in this replica that couldn't be sweeped because local
/// datastructures reference their value. /// datastructures reference their value.
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct Retained { pub struct Retained {
pub strings: Vec<TStr>, pub strings: Vec<TStr>,
pub vecs: Vec<TStrv>, pub vecs: Vec<TStrv>,
} }

View File

@@ -14,33 +14,33 @@ pub struct CharFilter(pub Vec<RangeInclusive<char>>);
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(HostExtReq)] #[extends(HostExtReq)]
pub struct LexExpr { pub struct LexExpr {
pub sys: SysId, pub sys: SysId,
pub id: ParsId, pub id: ParsId,
pub text: TStr, pub text: TStr,
pub pos: u32, pub pos: u32,
} }
impl Request for LexExpr { impl Request for LexExpr {
type Response = Option<OrcResult<LexedExpr>>; type Response = Option<OrcResult<LexedExpr>>;
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct LexedExpr { pub struct LexedExpr {
pub pos: u32, pub pos: u32,
pub expr: TokenTree, pub expr: TokenTree,
} }
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(ExtHostReq)] #[extends(ExtHostReq)]
pub struct SubLex { pub struct SubLex {
pub id: ParsId, pub id: ParsId,
pub pos: u32, pub pos: u32,
} }
impl Request for SubLex { impl Request for SubLex {
type Response = Option<SubLexed>; type Response = Option<SubLexed>;
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct SubLexed { pub struct SubLexed {
pub pos: u32, pub pos: u32,
pub ticket: TreeTicket, pub ticket: TreeTicket,
} }

View File

@@ -6,29 +6,29 @@ use crate::{TStr, TStrv};
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub enum Location { pub enum Location {
/// Location inaccessible. Locations are always debugging aids and never /// Location inaccessible. Locations are always debugging aids and never
/// mandatory. /// mandatory.
None, None,
/// Associated with a slot when wrapped in an expression. /// Associated with a slot when wrapped in an expression.
SlotTarget, SlotTarget,
/// Used in functions to denote the generated code that carries on the /// Used in functions to denote the generated code that carries on the
/// location of the call. /// location of the call.
Inherit, Inherit,
Gen(CodeGenInfo), Gen(CodeGenInfo),
/// Range and file /// Range and file
SourceRange(SourceRange), SourceRange(SourceRange),
/// Range only, file implied. Most notably used by parsers /// Range only, file implied. Most notably used by parsers
Range(Range<u32>), Range(Range<u32>),
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct SourceRange { pub struct SourceRange {
pub path: TStrv, pub path: TStrv,
pub range: Range<u32>, pub range: Range<u32>,
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct CodeGenInfo { pub struct CodeGenInfo {
pub generator: TStrv, pub generator: TStrv,
pub details: TStr, pub details: TStr,
} }

View File

@@ -4,8 +4,8 @@ use crate::ExtHostNotif;
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub enum LogStrategy { pub enum LogStrategy {
StdErr, StdErr,
File(String), File(String),
} }
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]

View File

@@ -5,79 +5,82 @@ use orchid_api_derive::{Coding, Hierarchy};
use orchid_api_traits::Request; use orchid_api_traits::Request;
use ordered_float::NotNan; use ordered_float::NotNan;
use crate::{Atom, Comment, ExtHostReq, HostExtReq, Location, OrcResult, Paren, ParsId, SysId, TStr, TStrv}; use crate::{
Atom, Comment, ExtHostReq, HostExtReq, Location, OrcResult, Paren, ParsId, SysId, TStr, TStrv,
};
#[derive(Clone, Copy, Debug, Coding, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Clone, Copy, Debug, Coding, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MacroTreeId(pub NonZeroU64); pub struct MacroTreeId(pub NonZeroU64);
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct MacroTree { pub struct MacroTree {
pub location: Location, pub location: Location,
pub token: MacroToken, pub token: MacroToken,
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub enum MacroToken { pub enum MacroToken {
S(Paren, Vec<MacroTree>), S(Paren, Vec<MacroTree>),
Name(TStrv), Name(TStrv),
Slot(MacroTreeId), Slot(MacroTreeId),
Lambda(Vec<MacroTree>, Vec<MacroTree>), Lambda(Vec<MacroTree>, Vec<MacroTree>),
Ph(Placeholder), Ph(Placeholder),
Atom(Atom), Atom(Atom),
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct MacroBlock { pub struct MacroBlock {
pub priority: Option<NotNan<f64>>, pub priority: Option<NotNan<f64>>,
pub rules: Vec<MacroRule>, pub rules: Vec<MacroRule>,
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct MacroRule { pub struct MacroRule {
pub location: Location, pub location: Location,
pub comments: Vec<Comment>, pub comments: Vec<Comment>,
pub pattern: Vec<MacroTree>, pub pattern: Vec<MacroTree>,
pub id: MacroId, pub id: MacroId,
} }
/// A specific macro rule with a specific pattern across invocations /// A specific macro rule with a specific pattern across invocations
#[derive(Clone, Copy, Debug, Coding, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, Coding, PartialEq, Eq, Hash)]
pub struct MacroId(pub NonZeroU64); pub struct MacroId(pub NonZeroU64);
/// After a pattern matches, this call executes the body of the macro. This request returns None /// After a pattern matches, this call executes the body of the macro. This
/// if an inner nested request raised an exception /// request returns None if an inner nested request raised an exception
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(HostExtReq)] #[extends(HostExtReq)]
pub struct ApplyMacro { pub struct ApplyMacro {
pub sys: SysId, pub sys: SysId,
pub id: MacroId, pub id: MacroId,
/// Recursion token /// Recursion token
pub run_id: ParsId, pub run_id: ParsId,
/// Must contain exactly the keys that were specified as placeholders in the pattern /// Must contain exactly the keys that were specified as placeholders in the
pub params: HashMap<TStr, Vec<MacroTree>>, /// pattern
pub params: HashMap<TStr, Vec<MacroTree>>,
} }
impl Request for ApplyMacro { impl Request for ApplyMacro {
type Response = Option<OrcResult<Vec<MacroTree>>>; type Response = Option<OrcResult<Vec<MacroTree>>>;
} }
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(ExtHostReq)] #[extends(ExtHostReq)]
pub struct RunMacros { pub struct RunMacros {
pub run_id: ParsId, pub run_id: ParsId,
pub query: Vec<MacroTree>, pub query: Vec<MacroTree>,
} }
impl Request for RunMacros { impl Request for RunMacros {
type Response = Option<Vec<MacroTree>>; type Response = Option<Vec<MacroTree>>;
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct Placeholder { pub struct Placeholder {
pub name: TStr, pub name: TStr,
pub kind: PhKind, pub kind: PhKind,
} }
#[derive(Clone, Copy, Debug, Coding)] #[derive(Clone, Copy, Debug, Coding)]
pub enum PhKind { pub enum PhKind {
Scalar, Scalar,
Vector { priority: u8, at_least_one: bool }, Vector { priority: u8, at_least_one: bool },
} }

View File

@@ -11,11 +11,11 @@ pub struct ParsId(pub NonZeroU64);
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(HostExtReq)] #[extends(HostExtReq)]
pub struct ParseLine { pub struct ParseLine {
pub sys: SysId, pub sys: SysId,
pub comments: Vec<Comment>, pub comments: Vec<Comment>,
pub exported: bool, pub exported: bool,
pub line: Vec<TokenTree>, pub line: Vec<TokenTree>,
} }
impl Request for ParseLine { impl Request for ParseLine {
type Response = OrcResult<Vec<TokenTree>>; type Response = OrcResult<Vec<TokenTree>>;
} }

View File

@@ -25,63 +25,63 @@
use std::io::{Read, Write}; use std::io::{Read, Write};
use orchid_api_derive::{Coding, Hierarchy}; use orchid_api_derive::{Coding, Hierarchy};
use orchid_api_traits::{read_exact, write_exact, Channel, Decode, Encode, MsgSet, Request}; use orchid_api_traits::{Channel, Decode, Encode, MsgSet, Request, read_exact, write_exact};
use crate::{atom, expr, interner, lexer, logging, macros, parser, system, tree, vfs}; use crate::{atom, expr, interner, lexer, logging, macros, parser, system, tree, vfs};
static HOST_INTRO: &[u8] = b"Orchid host, binary API v0\n"; static HOST_INTRO: &[u8] = b"Orchid host, binary API v0\n";
pub struct HostHeader { pub struct HostHeader {
pub log_strategy: logging::LogStrategy, pub log_strategy: logging::LogStrategy,
} }
impl Decode for HostHeader { impl Decode for HostHeader {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
read_exact(read, HOST_INTRO); read_exact(read, HOST_INTRO);
Self { log_strategy: logging::LogStrategy::decode(read) } Self { log_strategy: logging::LogStrategy::decode(read) }
} }
} }
impl Encode for HostHeader { impl Encode for HostHeader {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { fn encode<W: Write + ?Sized>(&self, write: &mut W) {
write_exact(write, HOST_INTRO); write_exact(write, HOST_INTRO);
self.log_strategy.encode(write) self.log_strategy.encode(write)
} }
} }
static EXT_INTRO: &[u8] = b"Orchid extension, binary API v0\n"; static EXT_INTRO: &[u8] = b"Orchid extension, binary API v0\n";
pub struct ExtensionHeader { pub struct ExtensionHeader {
pub name: String, pub name: String,
pub systems: Vec<system::SystemDecl>, pub systems: Vec<system::SystemDecl>,
} }
impl Decode for ExtensionHeader { impl Decode for ExtensionHeader {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
read_exact(read, EXT_INTRO); read_exact(read, EXT_INTRO);
Self { name: String::decode(read), systems: Vec::decode(read) } Self { name: String::decode(read), systems: Vec::decode(read) }
} }
} }
impl Encode for ExtensionHeader { impl Encode for ExtensionHeader {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { fn encode<W: Write + ?Sized>(&self, write: &mut W) {
write_exact(write, EXT_INTRO); write_exact(write, EXT_INTRO);
self.name.encode(write); self.name.encode(write);
self.systems.encode(write) self.systems.encode(write)
} }
} }
#[derive(Clone, Debug, Copy, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)] #[derive(Clone, Debug, Copy, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
pub struct Ping; pub struct Ping;
impl Request for Ping { impl Request for Ping {
type Response = (); type Response = ();
} }
/// Requests running from the extension to the host /// Requests running from the extension to the host
#[derive(Clone, Coding, Hierarchy)] #[derive(Clone, Coding, Hierarchy)]
#[extendable] #[extendable]
pub enum ExtHostReq { pub enum ExtHostReq {
Ping(Ping), Ping(Ping),
IntReq(interner::IntReq), IntReq(interner::IntReq),
Fwd(atom::Fwd), Fwd(atom::Fwd),
SysFwd(system::SysFwd), SysFwd(system::SysFwd),
ExprReq(expr::ExprReq), ExprReq(expr::ExprReq),
SubLex(lexer::SubLex), SubLex(lexer::SubLex),
RunMacros(macros::RunMacros), RunMacros(macros::RunMacros),
} }
/// Notifications sent from the extension to the host /// Notifications sent from the extension to the host
@@ -89,93 +89,93 @@ pub enum ExtHostReq {
#[derive(Debug, Clone, Coding, Hierarchy)] #[derive(Debug, Clone, Coding, Hierarchy)]
#[extendable] #[extendable]
pub enum ExtHostNotif { pub enum ExtHostNotif {
ExprNotif(expr::ExprNotif), ExprNotif(expr::ExprNotif),
Log(logging::Log), Log(logging::Log),
} }
pub struct ExtHostChannel; pub struct ExtHostChannel;
impl Channel for ExtHostChannel { impl Channel for ExtHostChannel {
type Notif = ExtHostNotif; type Notif = ExtHostNotif;
type Req = ExtHostReq; type Req = ExtHostReq;
} }
/// Requests running from the host to the extension /// Requests running from the host to the extension
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extendable] #[extendable]
pub enum HostExtReq { pub enum HostExtReq {
Ping(Ping), Ping(Ping),
SysReq(system::SysReq), SysReq(system::SysReq),
Sweep(interner::Sweep), Sweep(interner::Sweep),
AtomReq(atom::AtomReq), AtomReq(atom::AtomReq),
DeserAtom(atom::DeserAtom), DeserAtom(atom::DeserAtom),
LexExpr(lexer::LexExpr), LexExpr(lexer::LexExpr),
ParseLine(parser::ParseLine), ParseLine(parser::ParseLine),
GetMember(tree::GetMember), GetMember(tree::GetMember),
VfsReq(vfs::VfsReq), VfsReq(vfs::VfsReq),
ApplyMacro(macros::ApplyMacro), ApplyMacro(macros::ApplyMacro),
} }
/// Notifications sent from the host to the extension /// Notifications sent from the host to the extension
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extendable] #[extendable]
pub enum HostExtNotif { pub enum HostExtNotif {
SystemDrop(system::SystemDrop), SystemDrop(system::SystemDrop),
AtomDrop(atom::AtomDrop), AtomDrop(atom::AtomDrop),
/// The host can assume that after this notif is sent, a correctly written /// The host can assume that after this notif is sent, a correctly written
/// extension will eventually exit. /// extension will eventually exit.
Exit, Exit,
} }
pub struct HostExtChannel; pub struct HostExtChannel;
impl Channel for HostExtChannel { impl Channel for HostExtChannel {
type Notif = HostExtNotif; type Notif = HostExtNotif;
type Req = HostExtReq; type Req = HostExtReq;
} }
/// Message set viewed from the extension's perspective /// Message set viewed from the extension's perspective
pub struct ExtMsgSet; pub struct ExtMsgSet;
impl MsgSet for ExtMsgSet { impl MsgSet for ExtMsgSet {
type In = HostExtChannel; type In = HostExtChannel;
type Out = ExtHostChannel; type Out = ExtHostChannel;
} }
/// Message Set viewed from the host's perspective /// Message Set viewed from the host's perspective
pub struct HostMsgSet; pub struct HostMsgSet;
impl MsgSet for HostMsgSet { impl MsgSet for HostMsgSet {
type In = ExtHostChannel; type In = ExtHostChannel;
type Out = HostExtChannel; type Out = HostExtChannel;
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use orchid_api_traits::enc_vec; use orchid_api_traits::enc_vec;
use ordered_float::NotNan; use ordered_float::NotNan;
use super::*; use super::*;
#[test] #[test]
fn host_header_enc() { fn host_header_enc() {
let hh = HostHeader { log_strategy: logging::LogStrategy::File("SomeFile".to_string()) }; let hh = HostHeader { log_strategy: logging::LogStrategy::File("SomeFile".to_string()) };
let mut enc = &enc_vec(&hh)[..]; let mut enc = &enc_vec(&hh)[..];
eprintln!("Encoded to {enc:?}"); eprintln!("Encoded to {enc:?}");
HostHeader::decode(&mut enc); HostHeader::decode(&mut enc);
assert_eq!(enc, []); assert_eq!(enc, []);
} }
#[test] #[test]
fn ext_header_enc() { fn ext_header_enc() {
let eh = ExtensionHeader { let eh = ExtensionHeader {
name: "my_extension".to_string(), name: "my_extension".to_string(),
systems: vec![system::SystemDecl { systems: vec![system::SystemDecl {
id: system::SysDeclId(1.try_into().unwrap()), id: system::SysDeclId(1.try_into().unwrap()),
name: "misc".to_string(), name: "misc".to_string(),
depends: vec!["std".to_string()], depends: vec!["std".to_string()],
priority: NotNan::new(1f64).unwrap(), priority: NotNan::new(1f64).unwrap(),
}], }],
}; };
let mut enc = &enc_vec(&eh)[..]; let mut enc = &enc_vec(&eh)[..];
eprintln!("Encoded to {enc:?}"); eprintln!("Encoded to {enc:?}");
ExtensionHeader::decode(&mut enc); ExtensionHeader::decode(&mut enc);
assert_eq!(enc, []) assert_eq!(enc, [])
} }
} }

View File

@@ -19,21 +19,21 @@ pub struct SysId(pub NonZeroU16);
/// extension header, so it cannot rely on the interner. /// extension header, so it cannot rely on the interner.
#[derive(Debug, Clone, Coding)] #[derive(Debug, Clone, Coding)]
pub struct SystemDecl { pub struct SystemDecl {
/// ID of the system, unique within the library /// ID of the system, unique within the library
pub id: SysDeclId, pub id: SysDeclId,
/// This can be depended upon. Exactly one of each kind will be loaded /// This can be depended upon. Exactly one of each kind will be loaded
pub name: String, pub name: String,
/// If multiple instances of a system are found, the highest priority will be /// If multiple instances of a system are found, the highest priority will be
/// used. This can be used for version counting, but also for fallbacks if a /// used. This can be used for version counting, but also for fallbacks if a
/// negative number is found. /// negative number is found.
/// ///
/// Systems cannot depend on specific versions and older versions of systems /// Systems cannot depend on specific versions and older versions of systems
/// are never loaded. Compatibility can be determined on a per-system basis /// are never loaded. Compatibility can be determined on a per-system basis
/// through an algorithm chosen by the provider. /// through an algorithm chosen by the provider.
pub priority: NotNan<f64>, pub priority: NotNan<f64>,
/// List of systems needed for this one to work correctly. These will be /// List of systems needed for this one to work correctly. These will be
/// looked up, and an error produced if they aren't found. /// looked up, and an error produced if they aren't found.
pub depends: Vec<String>, pub depends: Vec<String>,
} }
/// Host -> extension; instantiate a system according to its [SystemDecl]. /// Host -> extension; instantiate a system according to its [SystemDecl].
@@ -43,26 +43,26 @@ pub struct SystemDecl {
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(SysReq, HostExtReq)] #[extends(SysReq, HostExtReq)]
pub struct NewSystem { pub struct NewSystem {
/// ID of the system /// ID of the system
pub system: SysDeclId, pub system: SysDeclId,
/// ID of the system instance, unique for the host /// ID of the system instance, unique for the host
pub id: SysId, pub id: SysId,
/// Instance IDs for dependencies, in the order that the names appear in the /// Instance IDs for dependencies, in the order that the names appear in the
/// declaration /// declaration
pub depends: Vec<SysId>, pub depends: Vec<SysId>,
} }
impl Request for NewSystem { impl Request for NewSystem {
type Response = SystemInst; type Response = SystemInst;
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct SystemInst { pub struct SystemInst {
/// The set of possible starting characters of tokens the lexer of this system /// The set of possible starting characters of tokens the lexer of this system
/// can process. The lexer will notify this system if it encounters one of /// can process. The lexer will notify this system if it encounters one of
/// these characters.9 /// these characters.9
pub lex_filter: CharFilter, pub lex_filter: CharFilter,
pub line_types: Vec<TStr>, pub line_types: Vec<TStr>,
pub const_root: HashMap<TStr, MemberKind>, pub const_root: HashMap<TStr, MemberKind>,
} }
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
@@ -73,20 +73,20 @@ pub struct SystemDrop(pub SysId);
#[extends(SysReq, HostExtReq)] #[extends(SysReq, HostExtReq)]
pub struct SysFwded(pub SysId, pub Vec<u8>); pub struct SysFwded(pub SysId, pub Vec<u8>);
impl Request for SysFwded { impl Request for SysFwded {
type Response = Vec<u8>; type Response = Vec<u8>;
} }
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(ExtHostReq)] #[extends(ExtHostReq)]
pub struct SysFwd(pub SysId, pub Vec<u8>); pub struct SysFwd(pub SysId, pub Vec<u8>);
impl Request for SysFwd { impl Request for SysFwd {
type Response = Vec<u8>; type Response = Vec<u8>;
} }
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(HostExtReq)] #[extends(HostExtReq)]
#[extendable] #[extendable]
pub enum SysReq { pub enum SysReq {
NewSystem(NewSystem), NewSystem(NewSystem),
SysFwded(SysFwded), SysFwded(SysFwded),
} }

View File

@@ -7,7 +7,7 @@ use orchid_api_traits::Request;
use ordered_float::NotNan; use ordered_float::NotNan;
use crate::{ use crate::{
Atom, Expression, HostExtReq, Location, MacroBlock, OrcError, Placeholder, SysId, TStr, TStrv, Atom, Expression, HostExtReq, Location, MacroBlock, OrcError, Placeholder, SysId, TStr, TStrv,
}; };
/// A token tree from a lexer recursion request. Its lifetime is the lex call, /// A token tree from a lexer recursion request. Its lifetime is the lex call,
@@ -22,42 +22,42 @@ pub struct TreeTicket(pub NonZeroU64);
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct TokenTree { pub struct TokenTree {
pub token: Token, pub token: Token,
pub range: Range<u32>, pub range: Range<u32>,
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub enum Token { pub enum Token {
/// Lambda function head, from the opening \ until the beginning of the body. /// Lambda function head, from the opening \ until the beginning of the body.
LambdaHead(Vec<TokenTree>), LambdaHead(Vec<TokenTree>),
/// A name segment or an operator. /// A name segment or an operator.
Name(TStr), Name(TStr),
/// :: /// ::
NS, NS,
/// Line break. /// Line break.
BR, BR,
/// ( Round parens ), [ Square brackets ] or { Curly braces } /// ( Round parens ), [ Square brackets ] or { Curly braces }
S(Paren, Vec<TokenTree>), S(Paren, Vec<TokenTree>),
/// A new atom /// A new atom
Atom(Atom), Atom(Atom),
/// Anchor to insert a subtree /// Anchor to insert a subtree
Slot(TreeTicket), Slot(TreeTicket),
/// A static compile-time error returned by failing lexers if /// A static compile-time error returned by failing lexers if
/// the rest of the source is likely still meaningful /// the rest of the source is likely still meaningful
Bottom(Vec<OrcError>), Bottom(Vec<OrcError>),
/// A comment /// A comment
Comment(Arc<String>), Comment(Arc<String>),
/// Placeholder /// Placeholder
Ph(Placeholder), Ph(Placeholder),
/// Macro block head /// Macro block head
Macro(Option<NotNan<f64>>), Macro(Option<NotNan<f64>>),
} }
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, Coding)] #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, Coding)]
pub enum Paren { pub enum Paren {
Round, Round,
Square, Square,
Curly, Curly,
} }
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)] #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
@@ -65,46 +65,46 @@ pub struct TreeId(pub NonZeroU64);
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct Item { pub struct Item {
pub location: Location, pub location: Location,
pub comments: Vec<Comment>, pub comments: Vec<Comment>,
pub kind: ItemKind, pub kind: ItemKind,
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub enum ItemKind { pub enum ItemKind {
Member(Member), Member(Member),
Macro(MacroBlock), Macro(MacroBlock),
Export(TStr), Export(TStr),
Import(TStrv), Import(TStrv),
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct Comment { pub struct Comment {
pub text: TStr, pub text: TStr,
pub location: Location, pub location: Location,
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct Member { pub struct Member {
pub name: TStr, pub name: TStr,
pub kind: MemberKind, pub kind: MemberKind,
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub enum MemberKind { pub enum MemberKind {
Const(Expression), Const(Expression),
Module(Module), Module(Module),
Lazy(TreeId), Lazy(TreeId),
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct Module { pub struct Module {
pub items: Vec<Item>, pub items: Vec<Item>,
} }
#[derive(Clone, Copy, Debug, Coding, Hierarchy)] #[derive(Clone, Copy, Debug, Coding, Hierarchy)]
#[extends(HostExtReq)] #[extends(HostExtReq)]
pub struct GetMember(pub SysId, pub TreeId); pub struct GetMember(pub SysId, pub TreeId);
impl Request for GetMember { impl Request for GetMember {
type Response = MemberKind; type Response = MemberKind;
} }

View File

@@ -14,34 +14,34 @@ pub struct VfsId(pub NonZeroU16);
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub enum Loaded { pub enum Loaded {
Code(String), Code(String),
Collection(Vec<TStr>), Collection(Vec<TStr>),
} }
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(VfsReq, HostExtReq)] #[extends(VfsReq, HostExtReq)]
pub struct VfsRead(pub SysId, pub VfsId, pub Vec<TStr>); pub struct VfsRead(pub SysId, pub VfsId, pub Vec<TStr>);
impl Request for VfsRead { impl Request for VfsRead {
type Response = OrcResult<Loaded>; type Response = OrcResult<Loaded>;
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub enum EagerVfs { pub enum EagerVfs {
Lazy(VfsId), Lazy(VfsId),
Eager(HashMap<TStr, EagerVfs>), Eager(HashMap<TStr, EagerVfs>),
} }
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(VfsReq, HostExtReq)] #[extends(VfsReq, HostExtReq)]
pub struct GetVfs(pub SysId); pub struct GetVfs(pub SysId);
impl Request for GetVfs { impl Request for GetVfs {
type Response = EagerVfs; type Response = EagerVfs;
} }
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(HostExtReq)] #[extends(HostExtReq)]
#[extendable] #[extendable]
pub enum VfsReq { pub enum VfsReq {
GetVfs(GetVfs), GetVfs(GetVfs),
VfsRead(VfsRead), VfsRead(VfsRead),
} }

View File

@@ -10,14 +10,14 @@ derive_destructure = "1.0.0"
dyn-clone = "1.0.17" dyn-clone = "1.0.17"
hashbrown = "0.15.2" hashbrown = "0.15.2"
itertools = "0.14.0" itertools = "0.14.0"
lazy_static = "1.4.0" lazy_static = "1.5.0"
never = "0.1.0" never = "0.1.0"
num-traits = "0.2.19" num-traits = "0.2.19"
orchid-api = { version = "0.1.0", path = "../orchid-api" } orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" } orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
ordered-float = "4.2.0" ordered-float = "4.6.0"
rust-embed = "8.3.0" rust-embed = "8.5.0"
rust_decimal = "1.35.0" rust_decimal = "1.36.0"
substack = "1.1.0" substack = "1.1.1"
trait-set = "0.3.0" trait-set = "0.3.0"

View File

@@ -3,27 +3,27 @@ use std::ops::Deref;
use std::sync::Arc; use std::sync::Arc;
pub enum ArcCow<'a, T: ?Sized + ToOwned> { pub enum ArcCow<'a, T: ?Sized + ToOwned> {
Borrowed(&'a T), Borrowed(&'a T),
Owned(Arc<T::Owned>), Owned(Arc<T::Owned>),
} }
impl<T: ?Sized + ToOwned> ArcCow<'_, T> { impl<T: ?Sized + ToOwned> ArcCow<'_, T> {
pub fn owned(value: T::Owned) -> Self { Self::Owned(Arc::new(value)) } pub fn owned(value: T::Owned) -> Self { Self::Owned(Arc::new(value)) }
} }
impl<T: ?Sized + ToOwned> Clone for ArcCow<'_, T> { impl<T: ?Sized + ToOwned> Clone for ArcCow<'_, T> {
fn clone(&self) -> Self { fn clone(&self) -> Self {
match self { match self {
Self::Borrowed(r) => Self::Borrowed(r), Self::Borrowed(r) => Self::Borrowed(r),
Self::Owned(b) => Self::Owned(b.clone()), Self::Owned(b) => Self::Owned(b.clone()),
} }
} }
} }
impl<T: ?Sized + ToOwned> Deref for ArcCow<'_, T> { impl<T: ?Sized + ToOwned> Deref for ArcCow<'_, T> {
type Target = T; type Target = T;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
match self { match self {
Self::Borrowed(t) => t, Self::Borrowed(t) => t,
Self::Owned(b) => b.as_ref().borrow(), Self::Owned(b) => b.as_ref().borrow(),
} }
} }
} }

View File

@@ -8,54 +8,54 @@ use crate::api;
pub type CRange = RangeInclusive<char>; pub type CRange = RangeInclusive<char>;
pub trait ICFilter: fmt::Debug { pub trait ICFilter: fmt::Debug {
fn ranges(&self) -> &[RangeInclusive<char>]; fn ranges(&self) -> &[RangeInclusive<char>];
} }
impl ICFilter for [RangeInclusive<char>] { impl ICFilter for [RangeInclusive<char>] {
fn ranges(&self) -> &[RangeInclusive<char>] { self } fn ranges(&self) -> &[RangeInclusive<char>] { self }
} }
impl ICFilter for api::CharFilter { impl ICFilter for api::CharFilter {
fn ranges(&self) -> &[RangeInclusive<char>] { &self.0 } fn ranges(&self) -> &[RangeInclusive<char>] { &self.0 }
} }
fn try_merge_char_ranges(left: CRange, right: CRange) -> Result<CRange, (CRange, CRange)> { fn try_merge_char_ranges(left: CRange, right: CRange) -> Result<CRange, (CRange, CRange)> {
match *left.end() as u32 + 1 < *right.start() as u32 { match *left.end() as u32 + 1 < *right.start() as u32 {
true => Err((left, right)), true => Err((left, right)),
false => Ok(*left.start()..=*right.end()), false => Ok(*left.start()..=*right.end()),
} }
} }
/// Process the character ranges to make them adhere to the structural /// Process the character ranges to make them adhere to the structural
/// requirements of [CharFilter] /// requirements of [CharFilter]
pub fn mk_char_filter(items: impl IntoIterator<Item = CRange>) -> api::CharFilter { pub fn mk_char_filter(items: impl IntoIterator<Item = CRange>) -> api::CharFilter {
api::CharFilter( api::CharFilter(
(items.into_iter()) (items.into_iter())
.filter(|r| *r.start() as u32 <= *r.end() as u32) .filter(|r| *r.start() as u32 <= *r.end() as u32)
.sorted_by_key(|r| *r.start() as u32) .sorted_by_key(|r| *r.start() as u32)
.coalesce(try_merge_char_ranges) .coalesce(try_merge_char_ranges)
.collect_vec(), .collect_vec(),
) )
} }
/// Decide whether a char filter matches a character via binary search /// Decide whether a char filter matches a character via binary search
pub fn char_filter_match(cf: &(impl ICFilter + ?Sized), c: char) -> bool { pub fn char_filter_match(cf: &(impl ICFilter + ?Sized), c: char) -> bool {
match cf.ranges().binary_search_by_key(&c, |l| *l.end()) { match cf.ranges().binary_search_by_key(&c, |l| *l.end()) {
Ok(_) => true, // c is the end of a range Ok(_) => true, // c is the end of a range
Err(i) if i == cf.ranges().len() => false, // all ranges end before c Err(i) if i == cf.ranges().len() => false, // all ranges end before c
Err(i) => cf.ranges()[i].contains(&c), /* c between cf.0[i-1]?.end and cf.0[i].end, Err(i) => cf.ranges()[i].contains(&c), /* c between cf.0[i-1]?.end and cf.0[i].end,
* check [i] */ * check [i] */
} }
} }
/// Merge two char filters into a filter that matches if either of the /// Merge two char filters into a filter that matches if either of the
/// constituents would match. /// constituents would match.
pub fn char_filter_union( pub fn char_filter_union(
l: &(impl ICFilter + ?Sized), l: &(impl ICFilter + ?Sized),
r: &(impl ICFilter + ?Sized), r: &(impl ICFilter + ?Sized),
) -> api::CharFilter { ) -> api::CharFilter {
api::CharFilter( api::CharFilter(
(l.ranges().iter().merge_by(r.ranges(), |l, r| l.start() <= r.start())) (l.ranges().iter().merge_by(r.ranges(), |l, r| l.start() <= r.start()))
.cloned() .cloned()
.coalesce(try_merge_char_ranges) .coalesce(try_merge_char_ranges)
.collect_vec(), .collect_vec(),
) )
} }

View File

@@ -6,19 +6,19 @@ use never::Never;
/// variety of types for different purposes. Very broadly, if the operation /// variety of types for different purposes. Very broadly, if the operation
/// succeeds, the result should represent _both_ inputs. /// succeeds, the result should represent _both_ inputs.
pub trait Combine: Sized { pub trait Combine: Sized {
/// Information about the failure /// Information about the failure
type Error; type Error;
/// Merge two values into a value that represents both, if this is possible. /// Merge two values into a value that represents both, if this is possible.
fn combine(self, other: Self) -> Result<Self, Self::Error>; fn combine(self, other: Self) -> Result<Self, Self::Error>;
} }
impl Combine for Never { impl Combine for Never {
type Error = Never; type Error = Never;
fn combine(self, _: Self) -> Result<Self, Self::Error> { match self {} } fn combine(self, _: Self) -> Result<Self, Self::Error> { match self {} }
} }
impl Combine for () { impl Combine for () {
type Error = Never; type Error = Never;
fn combine(self, (): Self) -> Result<Self, Self::Error> { Ok(()) } fn combine(self, (): Self) -> Result<Self, Self::Error> { Ok(()) }
} }

View File

@@ -12,158 +12,158 @@ use crate::location::Pos;
/// processing got stuck, a command that is likely to be incorrect /// processing got stuck, a command that is likely to be incorrect
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct ErrPos { pub struct ErrPos {
/// The suspected origin /// The suspected origin
pub position: Pos, pub position: Pos,
/// Any information about the role of this origin /// Any information about the role of this origin
pub message: Option<Arc<String>>, pub message: Option<Arc<String>>,
} }
impl ErrPos { impl ErrPos {
pub fn new(msg: &str, position: Pos) -> Self { pub fn new(msg: &str, position: Pos) -> Self {
Self { message: Some(Arc::new(msg.to_string())), position } Self { message: Some(Arc::new(msg.to_string())), position }
} }
fn from_api(api: &api::ErrLocation) -> Self { fn from_api(api: &api::ErrLocation) -> Self {
Self { Self {
message: Some(api.message.clone()).filter(|s| !s.is_empty()), message: Some(api.message.clone()).filter(|s| !s.is_empty()),
position: Pos::from_api(&api.location), position: Pos::from_api(&api.location),
} }
} }
fn to_api(&self) -> api::ErrLocation { fn to_api(&self) -> api::ErrLocation {
api::ErrLocation { api::ErrLocation {
message: self.message.clone().unwrap_or_default(), message: self.message.clone().unwrap_or_default(),
location: self.position.to_api(), location: self.position.to_api(),
} }
} }
} }
impl From<Pos> for ErrPos { impl From<Pos> for ErrPos {
fn from(origin: Pos) -> Self { Self { position: origin, message: None } } fn from(origin: Pos) -> Self { Self { position: origin, message: None } }
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct OrcErr { pub struct OrcErr {
pub description: Tok<String>, pub description: Tok<String>,
pub message: Arc<String>, pub message: Arc<String>,
pub positions: Vec<ErrPos>, pub positions: Vec<ErrPos>,
} }
impl OrcErr { impl OrcErr {
fn to_api(&self) -> api::OrcError { fn to_api(&self) -> api::OrcError {
api::OrcError { api::OrcError {
description: self.description.to_api(), description: self.description.to_api(),
message: self.message.clone(), message: self.message.clone(),
locations: self.positions.iter().map(ErrPos::to_api).collect(), locations: self.positions.iter().map(ErrPos::to_api).collect(),
} }
} }
fn from_api(api: &api::OrcError) -> Self { fn from_api(api: &api::OrcError) -> Self {
Self { Self {
description: Tok::from_api(api.description), description: Tok::from_api(api.description),
message: api.message.clone(), message: api.message.clone(),
positions: api.locations.iter().map(ErrPos::from_api).collect(), positions: api.locations.iter().map(ErrPos::from_api).collect(),
} }
} }
} }
impl Eq for OrcErr {} impl Eq for OrcErr {}
impl PartialEq for OrcErr { impl PartialEq for OrcErr {
fn eq(&self, other: &Self) -> bool { self.description == other.description } fn eq(&self, other: &Self) -> bool { self.description == other.description }
} }
impl From<OrcErr> for Vec<OrcErr> { impl From<OrcErr> for Vec<OrcErr> {
fn from(value: OrcErr) -> Self { vec![value] } fn from(value: OrcErr) -> Self { vec![value] }
} }
impl fmt::Display for OrcErr { impl fmt::Display for OrcErr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let pstr = self.positions.iter().map(|p| format!("{p:?}")).join("; "); let pstr = self.positions.iter().map(|p| format!("{p:?}")).join("; ");
write!(f, "{}: {} @ {}", self.description, self.message, pstr) write!(f, "{}: {} @ {}", self.description, self.message, pstr)
} }
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct EmptyErrv; pub struct EmptyErrv;
impl fmt::Display for EmptyErrv { impl fmt::Display for EmptyErrv {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "OrcErrv must not be empty") write!(f, "OrcErrv must not be empty")
} }
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct OrcErrv(Vec<OrcErr>); pub struct OrcErrv(Vec<OrcErr>);
impl OrcErrv { impl OrcErrv {
pub fn new(errors: impl IntoIterator<Item = OrcErr>) -> Result<Self, EmptyErrv> { pub fn new(errors: impl IntoIterator<Item = OrcErr>) -> Result<Self, EmptyErrv> {
let v = errors.into_iter().collect_vec(); let v = errors.into_iter().collect_vec();
if v.is_empty() { Err(EmptyErrv) } else { Ok(Self(v)) } if v.is_empty() { Err(EmptyErrv) } else { Ok(Self(v)) }
} }
#[must_use] #[must_use]
pub fn extended<T>(mut self, errors: impl IntoIterator<Item = T>) -> Self pub fn extended<T>(mut self, errors: impl IntoIterator<Item = T>) -> Self
where Self: Extend<T> { where Self: Extend<T> {
self.extend(errors); self.extend(errors);
self self
} }
#[must_use] #[must_use]
pub fn len(&self) -> usize { self.0.len() } pub fn len(&self) -> usize { self.0.len() }
#[must_use] #[must_use]
pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn is_empty(&self) -> bool { self.len() == 0 }
#[must_use] #[must_use]
pub fn any(&self, f: impl FnMut(&OrcErr) -> bool) -> bool { self.0.iter().any(f) } pub fn any(&self, f: impl FnMut(&OrcErr) -> bool) -> bool { self.0.iter().any(f) }
#[must_use] #[must_use]
pub fn keep_only(self, f: impl FnMut(&OrcErr) -> bool) -> Option<Self> { pub fn keep_only(self, f: impl FnMut(&OrcErr) -> bool) -> Option<Self> {
let v = self.0.into_iter().filter(f).collect_vec(); let v = self.0.into_iter().filter(f).collect_vec();
if v.is_empty() { None } else { Some(Self(v)) } if v.is_empty() { None } else { Some(Self(v)) }
} }
#[must_use] #[must_use]
pub fn one(&self) -> Option<&OrcErr> { (self.0.len() == 1).then(|| &self.0[9]) } pub fn one(&self) -> Option<&OrcErr> { (self.0.len() == 1).then(|| &self.0[9]) }
pub fn pos_iter(&self) -> impl Iterator<Item = ErrPos> + '_ { pub fn pos_iter(&self) -> impl Iterator<Item = ErrPos> + '_ {
self.0.iter().flat_map(|e| e.positions.iter().cloned()) self.0.iter().flat_map(|e| e.positions.iter().cloned())
} }
pub fn to_api(&self) -> Vec<api::OrcError> { self.0.iter().map(OrcErr::to_api).collect() } pub fn to_api(&self) -> Vec<api::OrcError> { self.0.iter().map(OrcErr::to_api).collect() }
pub fn from_api<'a>(api: impl IntoIterator<Item = &'a api::OrcError>) -> Self { pub fn from_api<'a>(api: impl IntoIterator<Item = &'a api::OrcError>) -> Self {
Self(api.into_iter().map(OrcErr::from_api).collect()) Self(api.into_iter().map(OrcErr::from_api).collect())
} }
} }
impl From<OrcErr> for OrcErrv { impl From<OrcErr> for OrcErrv {
fn from(value: OrcErr) -> Self { Self(vec![value]) } fn from(value: OrcErr) -> Self { Self(vec![value]) }
} }
impl Add for OrcErrv { impl Add for OrcErrv {
type Output = Self; type Output = Self;
fn add(self, rhs: Self) -> Self::Output { Self(self.0.into_iter().chain(rhs.0).collect_vec()) } fn add(self, rhs: Self) -> Self::Output { Self(self.0.into_iter().chain(rhs.0).collect_vec()) }
} }
impl Extend<OrcErr> for OrcErrv { impl Extend<OrcErr> for OrcErrv {
fn extend<T: IntoIterator<Item = OrcErr>>(&mut self, iter: T) { self.0.extend(iter) } fn extend<T: IntoIterator<Item = OrcErr>>(&mut self, iter: T) { self.0.extend(iter) }
} }
impl Extend<OrcErrv> for OrcErrv { impl Extend<OrcErrv> for OrcErrv {
fn extend<T: IntoIterator<Item = OrcErrv>>(&mut self, iter: T) { fn extend<T: IntoIterator<Item = OrcErrv>>(&mut self, iter: T) {
self.0.extend(iter.into_iter().flatten()) self.0.extend(iter.into_iter().flatten())
} }
} }
impl IntoIterator for OrcErrv { impl IntoIterator for OrcErrv {
type IntoIter = std::vec::IntoIter<OrcErr>; type IntoIter = std::vec::IntoIter<OrcErr>;
type Item = OrcErr; type Item = OrcErr;
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() } fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
} }
impl fmt::Display for OrcErrv { impl fmt::Display for OrcErrv {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0.iter().join("\n")) write!(f, "{}", self.0.iter().join("\n"))
} }
} }
pub type OrcRes<T> = Result<T, OrcErrv>; pub type OrcRes<T> = Result<T, OrcErrv>;
pub fn mk_err( pub fn mk_err(
description: Tok<String>, description: Tok<String>,
message: impl AsRef<str>, message: impl AsRef<str>,
posv: impl IntoIterator<Item = ErrPos>, posv: impl IntoIterator<Item = ErrPos>,
) -> OrcErr { ) -> OrcErr {
OrcErr { OrcErr {
description, description,
message: Arc::new(message.as_ref().to_string()), message: Arc::new(message.as_ref().to_string()),
positions: posv.into_iter().collect(), positions: posv.into_iter().collect(),
} }
} }
pub fn mk_errv( pub fn mk_errv(
description: Tok<String>, description: Tok<String>,
message: impl AsRef<str>, message: impl AsRef<str>,
posv: impl IntoIterator<Item = ErrPos>, posv: impl IntoIterator<Item = ErrPos>,
) -> OrcErrv { ) -> OrcErrv {
mk_err(description, message, posv).into() mk_err(description, message, posv).into()
} }
pub trait Reporter { pub trait Reporter {
fn report(&self, e: impl Into<OrcErrv>); fn report(&self, e: impl Into<OrcErrv>);
} }

View File

@@ -1,67 +1,67 @@
//! Multiple-listener-single-delivery event system. //! Multiple-listener-single-delivery event system.
use std::mem; use std::mem;
use std::sync::mpsc::{self, sync_channel};
use std::sync::Mutex; use std::sync::Mutex;
use std::sync::mpsc::{self, sync_channel};
struct Reply<T, U> { struct Reply<T, U> {
resub: bool, resub: bool,
outcome: Result<U, T>, outcome: Result<U, T>,
} }
struct Listener<T, E> { struct Listener<T, E> {
sink: mpsc::SyncSender<T>, sink: mpsc::SyncSender<T>,
source: mpsc::Receiver<Reply<T, E>>, source: mpsc::Receiver<Reply<T, E>>,
} }
pub struct Event<T, U> { pub struct Event<T, U> {
listeners: Mutex<Vec<Listener<T, U>>>, listeners: Mutex<Vec<Listener<T, U>>>,
} }
impl<T, U> Event<T, U> { impl<T, U> Event<T, U> {
pub const fn new() -> Self { Self { listeners: Mutex::new(Vec::new()) } } pub const fn new() -> Self { Self { listeners: Mutex::new(Vec::new()) } }
pub fn dispatch(&self, mut ev: T) -> Option<U> { pub fn dispatch(&self, mut ev: T) -> Option<U> {
let mut listeners = self.listeners.lock().unwrap(); let mut listeners = self.listeners.lock().unwrap();
let mut alt_list = Vec::with_capacity(listeners.len()); let mut alt_list = Vec::with_capacity(listeners.len());
mem::swap(&mut *listeners, &mut alt_list); mem::swap(&mut *listeners, &mut alt_list);
let mut items = alt_list.into_iter(); let mut items = alt_list.into_iter();
while let Some(l) = items.next() { while let Some(l) = items.next() {
l.sink.send(ev).unwrap(); l.sink.send(ev).unwrap();
let Reply { resub, outcome } = l.source.recv().unwrap(); let Reply { resub, outcome } = l.source.recv().unwrap();
if resub { if resub {
listeners.push(l); listeners.push(l);
} }
match outcome { match outcome {
Ok(res) => { Ok(res) => {
listeners.extend(items); listeners.extend(items);
return Some(res); return Some(res);
}, },
Err(next) => { Err(next) => {
ev = next; ev = next;
}, },
} }
} }
None None
} }
pub fn get_one<V>(&self, mut filter: impl FnMut(&T) -> bool, f: impl FnOnce(T) -> (U, V)) -> V { pub fn get_one<V>(&self, mut filter: impl FnMut(&T) -> bool, f: impl FnOnce(T) -> (U, V)) -> V {
let mut listeners = self.listeners.lock().unwrap(); let mut listeners = self.listeners.lock().unwrap();
let (sink, request) = sync_channel(0); let (sink, request) = sync_channel(0);
let (response, source) = sync_channel(0); let (response, source) = sync_channel(0);
listeners.push(Listener { sink, source }); listeners.push(Listener { sink, source });
mem::drop(listeners); mem::drop(listeners);
loop { loop {
let t = request.recv().unwrap(); let t = request.recv().unwrap();
if filter(&t) { if filter(&t) {
let (u, v) = f(t); let (u, v) = f(t);
response.send(Reply { resub: false, outcome: Ok(u) }).unwrap(); response.send(Reply { resub: false, outcome: Ok(u) }).unwrap();
return v; return v;
} }
response.send(Reply { resub: true, outcome: Err(t) }).unwrap(); response.send(Reply { resub: true, outcome: Err(t) }).unwrap();
} }
} }
} }
impl<T, U> Default for Event<T, U> { impl<T, U> Default for Event<T, U> {
fn default() -> Self { Self::new() } fn default() -> Self { Self::new() }
} }

View File

@@ -6,45 +6,45 @@ use std::sync::{Mutex, MutexGuard, OnceLock};
use hashbrown::HashMap; use hashbrown::HashMap;
pub struct IdStore<T> { pub struct IdStore<T> {
table: OnceLock<Mutex<HashMap<NonZeroU64, T>>>, table: OnceLock<Mutex<HashMap<NonZeroU64, T>>>,
id: AtomicU64, id: AtomicU64,
} }
impl<T> IdStore<T> { impl<T> IdStore<T> {
pub const fn new() -> Self { Self { table: OnceLock::new(), id: AtomicU64::new(1) } } pub const fn new() -> Self { Self { table: OnceLock::new(), id: AtomicU64::new(1) } }
pub fn add(&self, t: T) -> IdRecord<'_, T> { pub fn add(&self, t: T) -> IdRecord<'_, T> {
let tbl = self.table.get_or_init(Mutex::default); let tbl = self.table.get_or_init(Mutex::default);
let mut tbl_g = tbl.lock().unwrap(); let mut tbl_g = tbl.lock().unwrap();
let id: NonZeroU64 = self.id.fetch_add(1, Ordering::Relaxed).try_into().unwrap(); let id: NonZeroU64 = self.id.fetch_add(1, Ordering::Relaxed).try_into().unwrap();
assert!(tbl_g.insert(id, t).is_none(), "atom ID wraparound"); assert!(tbl_g.insert(id, t).is_none(), "atom ID wraparound");
IdRecord(id, tbl_g) IdRecord(id, tbl_g)
} }
pub fn get(&self, id: impl Into<NonZeroU64>) -> Option<IdRecord<'_, T>> { pub fn get(&self, id: impl Into<NonZeroU64>) -> Option<IdRecord<'_, T>> {
let tbl = self.table.get_or_init(Mutex::default); let tbl = self.table.get_or_init(Mutex::default);
let tbl_g = tbl.lock().unwrap(); let tbl_g = tbl.lock().unwrap();
let id64 = id.into(); let id64 = id.into();
if tbl_g.contains_key(&id64) { Some(IdRecord(id64, tbl_g)) } else { None } if tbl_g.contains_key(&id64) { Some(IdRecord(id64, tbl_g)) } else { None }
} }
pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn is_empty(&self) -> bool { self.len() == 0 }
pub fn len(&self) -> usize { self.table.get().map(|t| t.lock().unwrap().len()).unwrap_or(0) } pub fn len(&self) -> usize { self.table.get().map(|t| t.lock().unwrap().len()).unwrap_or(0) }
} }
impl<T> Default for IdStore<T> { impl<T> Default for IdStore<T> {
fn default() -> Self { Self::new() } fn default() -> Self { Self::new() }
} }
pub struct IdRecord<'a, T>(NonZeroU64, MutexGuard<'a, HashMap<NonZeroU64, T>>); pub struct IdRecord<'a, T>(NonZeroU64, MutexGuard<'a, HashMap<NonZeroU64, T>>);
impl<T> IdRecord<'_, T> { impl<T> IdRecord<'_, T> {
pub fn id(&self) -> NonZeroU64 { self.0 } pub fn id(&self) -> NonZeroU64 { self.0 }
pub fn remove(mut self) -> T { self.1.remove(&self.0).unwrap() } pub fn remove(mut self) -> T { self.1.remove(&self.0).unwrap() }
} }
impl<T> Deref for IdRecord<'_, T> { impl<T> Deref for IdRecord<'_, T> {
type Target = T; type Target = T;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
self.1.get(&self.0).expect("Existence checked on construction") self.1.get(&self.0).expect("Existence checked on construction")
} }
} }
impl<T> DerefMut for IdRecord<'_, T> { impl<T> DerefMut for IdRecord<'_, T> {
fn deref_mut(&mut self) -> &mut Self::Target { fn deref_mut(&mut self) -> &mut Self::Target {
self.1.get_mut(&self.0).expect("Existence checked on construction") self.1.get_mut(&self.0).expect("Existence checked on construction")
} }
} }

View File

@@ -2,7 +2,7 @@ use std::borrow::Borrow;
use std::hash::BuildHasher as _; use std::hash::BuildHasher as _;
use std::num::NonZeroU64; use std::num::NonZeroU64;
use std::ops::{Deref, DerefMut}; use std::ops::{Deref, DerefMut};
use std::sync::{atomic, Arc, Mutex, MutexGuard}; use std::sync::{Arc, Mutex, MutexGuard, atomic};
use std::{fmt, hash, mem}; use std::{fmt, hash, mem};
use hashbrown::{HashMap, HashSet}; use hashbrown::{HashMap, HashSet};
@@ -19,148 +19,149 @@ struct ForceSized<T>(T);
#[derive(Clone)] #[derive(Clone)]
pub struct Tok<T: Interned> { pub struct Tok<T: Interned> {
data: Arc<T>, data: Arc<T>,
marker: ForceSized<T::Marker>, marker: ForceSized<T::Marker>,
} }
impl<T: Interned> Tok<T> { impl<T: Interned> Tok<T> {
pub fn new(data: Arc<T>, marker: T::Marker) -> Self { Self { data, marker: ForceSized(marker) } } pub fn new(data: Arc<T>, marker: T::Marker) -> Self { Self { data, marker: ForceSized(marker) } }
pub fn to_api(&self) -> T::Marker { self.marker.0 } pub fn to_api(&self) -> T::Marker { self.marker.0 }
pub fn from_api<M>(marker: M) -> Self where M: InternMarker<Interned = T> { pub fn from_api<M>(marker: M) -> Self
deintern(marker) where M: InternMarker<Interned = T> {
} deintern(marker)
pub fn arc(&self) -> Arc<T> { self.data.clone() } }
pub fn arc(&self) -> Arc<T> { self.data.clone() }
} }
impl<T: Interned> Deref for Tok<T> { impl<T: Interned> Deref for Tok<T> {
type Target = T; type Target = T;
fn deref(&self) -> &Self::Target { self.data.as_ref() } fn deref(&self) -> &Self::Target { self.data.as_ref() }
} }
impl<T: Interned> Ord for Tok<T> { impl<T: Interned> Ord for Tok<T> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.to_api().cmp(&other.to_api()) } fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.to_api().cmp(&other.to_api()) }
} }
impl<T: Interned> PartialOrd for Tok<T> { impl<T: Interned> PartialOrd for Tok<T> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) } fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) }
} }
impl<T: Interned> Eq for Tok<T> {} impl<T: Interned> Eq for Tok<T> {}
impl<T: Interned> PartialEq for Tok<T> { impl<T: Interned> PartialEq for Tok<T> {
fn eq(&self, other: &Self) -> bool { self.cmp(other).is_eq() } fn eq(&self, other: &Self) -> bool { self.cmp(other).is_eq() }
} }
impl<T: Interned> hash::Hash for Tok<T> { impl<T: Interned> hash::Hash for Tok<T> {
fn hash<H: hash::Hasher>(&self, state: &mut H) { self.to_api().hash(state) } fn hash<H: hash::Hasher>(&self, state: &mut H) { self.to_api().hash(state) }
} }
impl<T: Interned + fmt::Display> fmt::Display for Tok<T> { impl<T: Interned + fmt::Display> fmt::Display for Tok<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", &*self.data) write!(f, "{}", &*self.data)
} }
} }
impl<T: Interned + fmt::Debug> fmt::Debug for Tok<T> { impl<T: Interned + fmt::Debug> fmt::Debug for Tok<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Token({} -> {:?})", self.to_api().get_id(), self.data.as_ref()) write!(f, "Token({} -> {:?})", self.to_api().get_id(), self.data.as_ref())
} }
} }
impl<T: Interned + Encode> Encode for Tok<T> { impl<T: Interned + Encode> Encode for Tok<T> {
fn encode<W: std::io::Write + ?Sized>(&self, write: &mut W) { self.data.encode(write) } fn encode<W: std::io::Write + ?Sized>(&self, write: &mut W) { self.data.encode(write) }
} }
impl<T: Interned + Decode> Decode for Tok<T> { impl<T: Interned + Decode> Decode for Tok<T> {
fn decode<R: std::io::Read + ?Sized>(read: &mut R) -> Self { intern(&T::decode(read)) } fn decode<R: std::io::Read + ?Sized>(read: &mut R) -> Self { intern(&T::decode(read)) }
} }
pub trait Interned: Eq + hash::Hash + Clone + fmt::Debug + Internable<Interned = Self> { pub trait Interned: Eq + hash::Hash + Clone + fmt::Debug + Internable<Interned = Self> {
type Marker: InternMarker<Interned = Self> + Sized; type Marker: InternMarker<Interned = Self> + Sized;
fn intern( fn intern(
self: Arc<Self>, self: Arc<Self>,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized), req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Self::Marker; ) -> Self::Marker;
fn bimap(interner: &mut TypedInterners) -> &mut Bimap<Self>; fn bimap(interner: &mut TypedInterners) -> &mut Bimap<Self>;
} }
pub trait Internable: fmt::Debug { pub trait Internable: fmt::Debug {
type Interned: Interned; type Interned: Interned;
fn get_owned(&self) -> Arc<Self::Interned>; fn get_owned(&self) -> Arc<Self::Interned>;
} }
pub trait InternMarker: Copy + PartialEq + Eq + PartialOrd + Ord + hash::Hash + Sized { pub trait InternMarker: Copy + PartialEq + Eq + PartialOrd + Ord + hash::Hash + Sized {
type Interned: Interned<Marker = Self>; type Interned: Interned<Marker = Self>;
fn resolve( fn resolve(
self, self,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized), req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Tok<Self::Interned>; ) -> Tok<Self::Interned>;
fn get_id(self) -> NonZeroU64; fn get_id(self) -> NonZeroU64;
fn from_id(id: NonZeroU64) -> Self; fn from_id(id: NonZeroU64) -> Self;
} }
impl Interned for String { impl Interned for String {
type Marker = api::TStr; type Marker = api::TStr;
fn intern( fn intern(
self: Arc<Self>, self: Arc<Self>,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized), req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Self::Marker { ) -> Self::Marker {
req.request(api::InternStr(self)) req.request(api::InternStr(self))
} }
fn bimap(interners: &mut TypedInterners) -> &mut Bimap<Self> { &mut interners.strings } fn bimap(interners: &mut TypedInterners) -> &mut Bimap<Self> { &mut interners.strings }
} }
impl InternMarker for api::TStr { impl InternMarker for api::TStr {
type Interned = String; type Interned = String;
fn resolve( fn resolve(
self, self,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized), req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Tok<Self::Interned> { ) -> Tok<Self::Interned> {
Tok::new(req.request(api::ExternStr(self)), self) Tok::new(req.request(api::ExternStr(self)), self)
} }
fn get_id(self) -> NonZeroU64 { self.0 } fn get_id(self) -> NonZeroU64 { self.0 }
fn from_id(id: NonZeroU64) -> Self { Self(id) } fn from_id(id: NonZeroU64) -> Self { Self(id) }
} }
impl Internable for str { impl Internable for str {
type Interned = String; type Interned = String;
fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_string()) } fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_string()) }
} }
impl Internable for String { impl Internable for String {
type Interned = String; type Interned = String;
fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_string()) } fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_string()) }
} }
impl Interned for Vec<Tok<String>> { impl Interned for Vec<Tok<String>> {
type Marker = api::TStrv; type Marker = api::TStrv;
fn intern( fn intern(
self: Arc<Self>, self: Arc<Self>,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized), req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Self::Marker { ) -> Self::Marker {
req.request(api::InternStrv(Arc::new(self.iter().map(|t| t.to_api()).collect()))) req.request(api::InternStrv(Arc::new(self.iter().map(|t| t.to_api()).collect())))
} }
fn bimap(interners: &mut TypedInterners) -> &mut Bimap<Self> { &mut interners.vecs } fn bimap(interners: &mut TypedInterners) -> &mut Bimap<Self> { &mut interners.vecs }
} }
impl InternMarker for api::TStrv { impl InternMarker for api::TStrv {
type Interned = Vec<Tok<String>>; type Interned = Vec<Tok<String>>;
fn resolve( fn resolve(
self, self,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized), req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Tok<Self::Interned> { ) -> Tok<Self::Interned> {
let data = let data =
Arc::new(req.request(api::ExternStrv(self)).iter().map(|m| deintern(*m)).collect_vec()); Arc::new(req.request(api::ExternStrv(self)).iter().map(|m| deintern(*m)).collect_vec());
Tok::new(data, self) Tok::new(data, self)
} }
fn get_id(self) -> NonZeroU64 { self.0 } fn get_id(self) -> NonZeroU64 { self.0 }
fn from_id(id: NonZeroU64) -> Self { Self(id) } fn from_id(id: NonZeroU64) -> Self { Self(id) }
} }
impl Internable for [Tok<String>] { impl Internable for [Tok<String>] {
type Interned = Vec<Tok<String>>; type Interned = Vec<Tok<String>>;
fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_vec()) } fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_vec()) }
} }
impl Internable for Vec<Tok<String>> { impl Internable for Vec<Tok<String>> {
type Interned = Vec<Tok<String>>; type Interned = Vec<Tok<String>>;
fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_vec()) } fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_vec()) }
} }
impl Internable for Vec<api::TStr> { impl Internable for Vec<api::TStr> {
type Interned = Vec<Tok<String>>; type Interned = Vec<Tok<String>>;
fn get_owned(&self) -> Arc<Self::Interned> { fn get_owned(&self) -> Arc<Self::Interned> {
Arc::new(self.iter().map(|ts| deintern(*ts)).collect()) Arc::new(self.iter().map(|ts| deintern(*ts)).collect())
} }
} }
impl Internable for [api::TStr] { impl Internable for [api::TStr] {
type Interned = Vec<Tok<String>>; type Interned = Vec<Tok<String>>;
fn get_owned(&self) -> Arc<Self::Interned> { fn get_owned(&self) -> Arc<Self::Interned> {
Arc::new(self.iter().map(|ts| deintern(*ts)).collect()) Arc::new(self.iter().map(|ts| deintern(*ts)).collect())
} }
} }
/// The number of references held to any token by the interner. /// The number of references held to any token by the interner.
@@ -168,138 +169,138 @@ const BASE_RC: usize = 3;
#[test] #[test]
fn base_rc_correct() { fn base_rc_correct() {
let tok = Tok::new(Arc::new("foo".to_string()), api::TStr(1.try_into().unwrap())); let tok = Tok::new(Arc::new("foo".to_string()), api::TStr(1.try_into().unwrap()));
let mut bimap = Bimap::default(); let mut bimap = Bimap::default();
bimap.insert(tok.clone()); bimap.insert(tok.clone());
assert_eq!(Arc::strong_count(&tok.data), BASE_RC + 1, "the bimap plus the current instance"); assert_eq!(Arc::strong_count(&tok.data), BASE_RC + 1, "the bimap plus the current instance");
} }
pub struct Bimap<T: Interned> { pub struct Bimap<T: Interned> {
intern: HashMap<Arc<T>, Tok<T>>, intern: HashMap<Arc<T>, Tok<T>>,
by_id: HashMap<T::Marker, Tok<T>>, by_id: HashMap<T::Marker, Tok<T>>,
} }
impl<T: Interned> Bimap<T> { impl<T: Interned> Bimap<T> {
pub fn insert(&mut self, token: Tok<T>) { pub fn insert(&mut self, token: Tok<T>) {
self.intern.insert(token.data.clone(), token.clone()); self.intern.insert(token.data.clone(), token.clone());
self.by_id.insert(token.to_api(), token); self.by_id.insert(token.to_api(), token);
} }
pub fn by_marker(&self, marker: T::Marker) -> Option<Tok<T>> { self.by_id.get(&marker).cloned() } pub fn by_marker(&self, marker: T::Marker) -> Option<Tok<T>> { self.by_id.get(&marker).cloned() }
pub fn by_value<Q: Eq + hash::Hash>(&self, q: &Q) -> Option<Tok<T>> pub fn by_value<Q: Eq + hash::Hash>(&self, q: &Q) -> Option<Tok<T>>
where T: Borrow<Q> { where T: Borrow<Q> {
(self.intern.raw_entry()) (self.intern.raw_entry())
.from_hash(self.intern.hasher().hash_one(q), |k| k.as_ref().borrow() == q) .from_hash(self.intern.hasher().hash_one(q), |k| k.as_ref().borrow() == q)
.map(|p| p.1.clone()) .map(|p| p.1.clone())
} }
pub fn sweep_replica(&mut self) -> Vec<T::Marker> { pub fn sweep_replica(&mut self) -> Vec<T::Marker> {
(self.intern) (self.intern)
.extract_if(|k, _| Arc::strong_count(k) == BASE_RC) .extract_if(|k, _| Arc::strong_count(k) == BASE_RC)
.map(|(_, v)| { .map(|(_, v)| {
self.by_id.remove(&v.to_api()); self.by_id.remove(&v.to_api());
v.to_api() v.to_api()
}) })
.collect() .collect()
} }
pub fn sweep_master(&mut self, retained: HashSet<T::Marker>) { pub fn sweep_master(&mut self, retained: HashSet<T::Marker>) {
self.intern.retain(|k, v| BASE_RC < Arc::strong_count(k) || retained.contains(&v.to_api())) self.intern.retain(|k, v| BASE_RC < Arc::strong_count(k) || retained.contains(&v.to_api()))
} }
} }
impl<T: Interned> Default for Bimap<T> { impl<T: Interned> Default for Bimap<T> {
fn default() -> Self { Self { by_id: HashMap::new(), intern: HashMap::new() } } fn default() -> Self { Self { by_id: HashMap::new(), intern: HashMap::new() } }
} }
pub trait UpComm { pub trait UpComm {
fn up<R: Request>(&self, req: R) -> R::Response; fn up<R: Request>(&self, req: R) -> R::Response;
} }
#[derive(Default)] #[derive(Default)]
pub struct TypedInterners { pub struct TypedInterners {
strings: Bimap<String>, strings: Bimap<String>,
vecs: Bimap<Vec<Tok<String>>>, vecs: Bimap<Vec<Tok<String>>>,
} }
#[derive(Default)] #[derive(Default)]
pub struct Interner { pub struct Interner {
interners: TypedInterners, interners: TypedInterners,
master: Option<Box<dyn DynRequester<Transfer = api::IntReq>>>, master: Option<Box<dyn DynRequester<Transfer = api::IntReq>>>,
} }
static ID: atomic::AtomicU64 = atomic::AtomicU64::new(1); static ID: atomic::AtomicU64 = atomic::AtomicU64::new(1);
static INTERNER: Mutex<Option<Interner>> = Mutex::new(None); static INTERNER: Mutex<Option<Interner>> = Mutex::new(None);
pub fn interner() -> impl DerefMut<Target = Interner> { pub fn interner() -> impl DerefMut<Target = Interner> {
struct G(MutexGuard<'static, Option<Interner>>); struct G(MutexGuard<'static, Option<Interner>>);
impl Deref for G { impl Deref for G {
type Target = Interner; type Target = Interner;
fn deref(&self) -> &Self::Target { self.0.as_ref().expect("Guard pre-initialized") } fn deref(&self) -> &Self::Target { self.0.as_ref().expect("Guard pre-initialized") }
} }
impl DerefMut for G { impl DerefMut for G {
fn deref_mut(&mut self) -> &mut Self::Target { fn deref_mut(&mut self) -> &mut Self::Target {
self.0.as_mut().expect("Guard pre-iniitialized") self.0.as_mut().expect("Guard pre-iniitialized")
} }
} }
let mut g = INTERNER.lock().unwrap(); let mut g = INTERNER.lock().unwrap();
g.get_or_insert_with(Interner::default); g.get_or_insert_with(Interner::default);
G(g) G(g)
} }
pub fn init_replica(req: impl DynRequester<Transfer = api::IntReq> + 'static) { pub fn init_replica(req: impl DynRequester<Transfer = api::IntReq> + 'static) {
let mut g = INTERNER.lock().unwrap(); let mut g = INTERNER.lock().unwrap();
assert!(g.is_none(), "Attempted to initialize replica interner after first use"); assert!(g.is_none(), "Attempted to initialize replica interner after first use");
*g = Some(Interner { *g = Some(Interner {
master: Some(Box::new(req)), master: Some(Box::new(req)),
interners: TypedInterners { strings: Bimap::default(), vecs: Bimap::default() }, interners: TypedInterners { strings: Bimap::default(), vecs: Bimap::default() },
}) })
} }
pub fn intern<T: Interned>(t: &(impl Internable<Interned = T> + ?Sized)) -> Tok<T> { pub fn intern<T: Interned>(t: &(impl Internable<Interned = T> + ?Sized)) -> Tok<T> {
let data = t.get_owned(); let data = t.get_owned();
let mut g = interner(); let mut g = interner();
let job = format!("{t:?} in {}", if g.master.is_some() { "replica" } else { "master" }); let job = format!("{t:?} in {}", if g.master.is_some() { "replica" } else { "master" });
eprintln!("Interning {job}"); eprintln!("Interning {job}");
let typed = T::bimap(&mut g.interners); let typed = T::bimap(&mut g.interners);
if let Some(tok) = typed.by_value(&data) { if let Some(tok) = typed.by_value(&data) {
return tok; return tok;
} }
let marker = match &mut g.master { let marker = match &mut g.master {
Some(c) => data.clone().intern(&**c), Some(c) => data.clone().intern(&**c),
None => None =>
T::Marker::from_id(NonZeroU64::new(ID.fetch_add(1, atomic::Ordering::Relaxed)).unwrap()), T::Marker::from_id(NonZeroU64::new(ID.fetch_add(1, atomic::Ordering::Relaxed)).unwrap()),
}; };
let tok = Tok::new(data, marker); let tok = Tok::new(data, marker);
T::bimap(&mut g.interners).insert(tok.clone()); T::bimap(&mut g.interners).insert(tok.clone());
mem::drop(g); mem::drop(g);
eprintln!("Interned {job}"); eprintln!("Interned {job}");
tok tok
} }
fn deintern<M: InternMarker>(marker: M) -> Tok<M::Interned> { fn deintern<M: InternMarker>(marker: M) -> Tok<M::Interned> {
let mut g = interner(); let mut g = interner();
if let Some(tok) = M::Interned::bimap(&mut g.interners).by_marker(marker) { if let Some(tok) = M::Interned::bimap(&mut g.interners).by_marker(marker) {
return tok; return tok;
} }
let master = g.master.as_mut().expect("ID not in local interner and this is master"); let master = g.master.as_mut().expect("ID not in local interner and this is master");
let token = marker.resolve(&**master); let token = marker.resolve(&**master);
M::Interned::bimap(&mut g.interners).insert(token.clone()); M::Interned::bimap(&mut g.interners).insert(token.clone());
token token
} }
pub fn merge_retained(into: &mut api::Retained, from: &api::Retained) { pub fn merge_retained(into: &mut api::Retained, from: &api::Retained) {
into.strings = into.strings.iter().chain(&from.strings).copied().unique().collect(); into.strings = into.strings.iter().chain(&from.strings).copied().unique().collect();
into.vecs = into.vecs.iter().chain(&from.vecs).copied().unique().collect(); into.vecs = into.vecs.iter().chain(&from.vecs).copied().unique().collect();
} }
pub fn sweep_replica() -> api::Retained { pub fn sweep_replica() -> api::Retained {
let mut g = interner(); let mut g = interner();
assert!(g.master.is_some(), "Not a replica"); assert!(g.master.is_some(), "Not a replica");
api::Retained { api::Retained {
strings: g.interners.strings.sweep_replica(), strings: g.interners.strings.sweep_replica(),
vecs: g.interners.vecs.sweep_replica(), vecs: g.interners.vecs.sweep_replica(),
} }
} }
/// Create a thread-local token instance and copy it. This ensures that the /// Create a thread-local token instance and copy it. This ensures that the
@@ -308,47 +309,47 @@ pub fn sweep_replica() -> api::Retained {
/// expression (i.e. a literal). /// expression (i.e. a literal).
#[macro_export] #[macro_export]
macro_rules! intern { macro_rules! intern {
($ty:ty : $expr:expr) => {{ ($ty:ty : $expr:expr) => {{
thread_local! { thread_local! {
static VALUE: $crate::interner::Tok<<$ty as $crate::interner::Internable>::Interned> static VALUE: $crate::interner::Tok<<$ty as $crate::interner::Internable>::Interned>
= $crate::interner::intern::< = $crate::interner::intern::<
<$ty as $crate::interner::Internable>::Interned <$ty as $crate::interner::Internable>::Interned
>($expr as &$ty); >($expr as &$ty);
} }
VALUE.with(|v| v.clone()) VALUE.with(|v| v.clone())
}}; }};
} }
pub fn sweep_master(retained: api::Retained) { pub fn sweep_master(retained: api::Retained) {
let mut g = interner(); let mut g = interner();
assert!(g.master.is_none(), "Not master"); assert!(g.master.is_none(), "Not master");
g.interners.strings.sweep_master(retained.strings.into_iter().collect()); g.interners.strings.sweep_master(retained.strings.into_iter().collect());
g.interners.vecs.sweep_master(retained.vecs.into_iter().collect()); g.interners.vecs.sweep_master(retained.vecs.into_iter().collect());
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::num::NonZero; use std::num::NonZero;
use orchid_api_traits::{enc_vec, Decode}; use orchid_api_traits::{Decode, enc_vec};
use super::*; use super::*;
use crate::api; use crate::api;
#[test] #[test]
fn test_i() { fn test_i() {
let _: Tok<String> = intern!(str: "foo"); let _: Tok<String> = intern!(str: "foo");
let _: Tok<Vec<Tok<String>>> = intern!([Tok<String>]: &[ let _: Tok<Vec<Tok<String>>> = intern!([Tok<String>]: &[
intern!(str: "bar"), intern!(str: "bar"),
intern!(str: "baz") intern!(str: "baz")
]); ]);
} }
#[test] #[test]
fn test_coding() { fn test_coding() {
let coded = api::TStr(NonZero::new(3u64).unwrap()); let coded = api::TStr(NonZero::new(3u64).unwrap());
let mut enc = &enc_vec(&coded)[..]; let mut enc = &enc_vec(&coded)[..];
api::TStr::decode(&mut enc); api::TStr::decode(&mut enc);
assert_eq!(enc, [], "Did not consume all of {enc:?}") assert_eq!(enc, [], "Did not consume all of {enc:?}")
} }
} }

View File

@@ -8,38 +8,38 @@ use never::Never;
/// Combine two hashmaps via an infallible value merger. See also /// Combine two hashmaps via an infallible value merger. See also
/// [try_join_maps] /// [try_join_maps]
pub fn join_maps<K: Eq + Hash, V>( pub fn join_maps<K: Eq + Hash, V>(
left: HashMap<K, V>, left: HashMap<K, V>,
right: HashMap<K, V>, right: HashMap<K, V>,
mut merge: impl FnMut(&K, V, V) -> V, mut merge: impl FnMut(&K, V, V) -> V,
) -> HashMap<K, V> { ) -> HashMap<K, V> {
let (val, ev) = try_join_maps::<K, V, Never>(left, right, |k, l, r| Ok(merge(k, l, r))); let (val, ev) = try_join_maps::<K, V, Never>(left, right, |k, l, r| Ok(merge(k, l, r)));
if let Some(e) = ev.first() { if let Some(e) = ev.first() {
match *e {} match *e {}
} }
val val
} }
/// Combine two hashmaps via a fallible value merger. See also [join_maps] /// Combine two hashmaps via a fallible value merger. See also [join_maps]
pub fn try_join_maps<K: Eq + Hash, V, E>( pub fn try_join_maps<K: Eq + Hash, V, E>(
left: HashMap<K, V>, left: HashMap<K, V>,
mut right: HashMap<K, V>, mut right: HashMap<K, V>,
mut merge: impl FnMut(&K, V, V) -> Result<V, E>, mut merge: impl FnMut(&K, V, V) -> Result<V, E>,
) -> (HashMap<K, V>, Vec<E>) { ) -> (HashMap<K, V>, Vec<E>) {
let mut mixed = HashMap::with_capacity(left.len() + right.len()); let mut mixed = HashMap::with_capacity(left.len() + right.len());
let mut errors = Vec::new(); let mut errors = Vec::new();
for (key, lval) in left { for (key, lval) in left {
let val = match right.remove(&key) { let val = match right.remove(&key) {
None => lval, None => lval,
Some(rval) => match merge(&key, lval, rval) { Some(rval) => match merge(&key, lval, rval) {
Ok(v) => v, Ok(v) => v,
Err(e) => { Err(e) => {
errors.push(e); errors.push(e);
continue; continue;
}, },
}, },
}; };
mixed.insert(key, val); mixed.insert(key, val);
} }
mixed.extend(right); mixed.extend(right);
(mixed, errors) (mixed, errors)
} }

View File

@@ -1,18 +1,20 @@
use orchid_api as api; use orchid_api as api;
pub mod box_cow;
pub mod boxed_iter; pub mod boxed_iter;
pub mod char_filter;
pub mod clone; pub mod clone;
pub mod combine; pub mod combine;
pub mod event;
pub mod msg;
pub mod box_cow;
pub mod char_filter;
pub mod error; pub mod error;
pub mod event;
pub mod id_store; pub mod id_store;
pub mod interner; pub mod interner;
pub mod join; pub mod join;
pub mod location; pub mod location;
pub mod logging; pub mod logging;
pub mod macros;
mod match_mapping;
pub mod msg;
pub mod name; pub mod name;
pub mod number; pub mod number;
pub mod parse; pub mod parse;
@@ -22,5 +24,3 @@ pub mod sequence;
pub mod side; pub mod side;
pub mod tokens; pub mod tokens;
pub mod tree; pub mod tree;
pub mod macros;
mod match_mapping;

View File

@@ -1,148 +1,142 @@
//! Structures that show where code or semantic elements came from //! Structures that show where code or semantic elements came from
use crate::match_mapping;
use std::fmt; use std::fmt;
use std::hash::Hash; use std::hash::Hash;
use std::ops::Range; use std::ops::Range;
use trait_set::trait_set; use trait_set::trait_set;
use crate::interner::{intern, Tok}; use crate::interner::{Tok, intern};
use crate::name::Sym; use crate::name::Sym;
use crate::{api, intern, sym}; use crate::{api, intern, match_mapping, sym};
trait_set! { trait_set! {
pub trait GetSrc = FnMut(&Sym) -> Tok<String>; pub trait GetSrc = FnMut(&Sym) -> Tok<String>;
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum Pos { pub enum Pos {
None, None,
SlotTarget, SlotTarget,
/// Used in functions to denote the generated code that carries on the /// Used in functions to denote the generated code that carries on the
/// location of the call. Not allowed in the const tree. /// location of the call. Not allowed in the const tree.
Inherit, Inherit,
Gen(CodeGenInfo), Gen(CodeGenInfo),
/// Range and file /// Range and file
SourceRange(SourceRange), SourceRange(SourceRange),
/// Range only, file implied. Most notably used by parsers /// Range only, file implied. Most notably used by parsers
Range(Range<u32>), Range(Range<u32>),
} }
impl Pos { impl Pos {
pub fn pretty_print(&self, get_src: &mut impl GetSrc) -> String { pub fn pretty_print(&self, get_src: &mut impl GetSrc) -> String {
match self { match self {
Self::Gen(g) => g.to_string(), Self::Gen(g) => g.to_string(),
Self::SourceRange(sr) => sr.pretty_print(&get_src(&sr.path)), Self::SourceRange(sr) => sr.pretty_print(&get_src(&sr.path)),
// Can't pretty print partial and meta-location // Can't pretty print partial and meta-location
other => format!("{other:?}"), other => format!("{other:?}"),
} }
} }
pub fn from_api(api: &api::Location) -> Self { pub fn from_api(api: &api::Location) -> Self {
match_mapping!(api, api::Location => Pos { match_mapping!(api, api::Location => Pos {
None, Inherit, SlotTarget, None, Inherit, SlotTarget,
Range(r.clone()), Range(r.clone()),
Gen(cgi => CodeGenInfo::from_api(cgi)), Gen(cgi => CodeGenInfo::from_api(cgi)),
SourceRange(sr => SourceRange::from_api(sr)) SourceRange(sr => SourceRange::from_api(sr))
}) })
} }
pub fn to_api(&self) -> api::Location { pub fn to_api(&self) -> api::Location {
match_mapping!(self, Pos => api::Location { match_mapping!(self, Pos => api::Location {
None, Inherit, SlotTarget, None, Inherit, SlotTarget,
Range(r.clone()), Range(r.clone()),
Gen(cgi.to_api()), Gen(cgi.to_api()),
SourceRange(sr.to_api()), SourceRange(sr.to_api()),
}) })
} }
} }
/// Exact source code location. Includes where the code was loaded from, what /// Exact source code location. Includes where the code was loaded from, what
/// the original source code was, and a byte range. /// the original source code was, and a byte range.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct SourceRange { pub struct SourceRange {
pub(crate) path: Sym, pub(crate) path: Sym,
pub(crate) range: Range<u32>, pub(crate) range: Range<u32>,
} }
impl SourceRange { impl SourceRange {
pub fn new(range: &Range<u32>, path: &Sym) -> Self { pub fn new(range: &Range<u32>, path: &Sym) -> Self {
Self { range: range.clone(), path: path.clone() } Self { range: range.clone(), path: path.clone() }
} }
/// Create a dud [SourceRange] for testing. Its value is unspecified and /// Create a dud [SourceRange] for testing. Its value is unspecified and
/// volatile. /// volatile.
pub fn mock() -> Self { Self { range: 0..1, path: sym!(test) } } pub fn mock() -> Self { Self { range: 0..1, path: sym!(test) } }
/// Path the source text was loaded from /// Path the source text was loaded from
pub fn path(&self) -> Sym { self.path.clone() } pub fn path(&self) -> Sym { self.path.clone() }
/// Byte range /// Byte range
pub fn range(&self) -> Range<u32> { self.range.clone() } pub fn range(&self) -> Range<u32> { self.range.clone() }
/// 0-based index of first byte /// 0-based index of first byte
pub fn start(&self) -> u32 { self.range.start } pub fn start(&self) -> u32 { self.range.start }
/// 0-based index of last byte + 1 /// 0-based index of last byte + 1
pub fn end(&self) -> u32 { self.range.end } pub fn end(&self) -> u32 { self.range.end }
/// Syntactic location /// Syntactic location
pub fn pos(&self) -> Pos { Pos::SourceRange(self.clone()) } pub fn pos(&self) -> Pos { Pos::SourceRange(self.clone()) }
/// Transform the numeric byte range /// Transform the numeric byte range
pub fn map_range(&self, map: impl FnOnce(Range<u32>) -> Range<u32>) -> Self { pub fn map_range(&self, map: impl FnOnce(Range<u32>) -> Range<u32>) -> Self {
Self { range: map(self.range()), path: self.path() } Self { range: map(self.range()), path: self.path() }
} }
pub fn pretty_print(&self, src: &str) -> String { pub fn pretty_print(&self, src: &str) -> String {
let (sl, sc) = pos2lc(src, self.range.start); let (sl, sc) = pos2lc(src, self.range.start);
let (el, ec) = pos2lc(src, self.range.end); let (el, ec) = pos2lc(src, self.range.end);
match (el == sl, ec <= sc + 1) { match (el == sl, ec <= sc + 1) {
(true, true) => format!("{sl}:{sc}"), (true, true) => format!("{sl}:{sc}"),
(true, false) => format!("{sl}:{sc}..{ec}"), (true, false) => format!("{sl}:{sc}..{ec}"),
(false, _) => format!("{sl}:{sc}..{el}:{ec}"), (false, _) => format!("{sl}:{sc}..{el}:{ec}"),
} }
} }
pub fn zw(path: Sym, pos: u32) -> Self { pub fn zw(path: Sym, pos: u32) -> Self { Self { path, range: pos..pos } }
Self { path, range: pos..pos } fn from_api(api: &api::SourceRange) -> Self {
} Self { path: Sym::from_api(api.path), range: api.range.clone() }
fn from_api(api: &api::SourceRange) -> Self { }
Self { path: Sym::from_api(api.path), range: api.range.clone() } fn to_api(&self) -> api::SourceRange {
} api::SourceRange { path: self.path.to_api(), range: self.range.clone() }
fn to_api(&self) -> api::SourceRange { }
api::SourceRange { path: self.path.to_api(), range: self.range.clone() }
}
} }
/// Information about a code generator attached to the generated code /// Information about a code generator attached to the generated code
#[derive(Clone, PartialEq, Eq, Hash)] #[derive(Clone, PartialEq, Eq, Hash)]
pub struct CodeGenInfo { pub struct CodeGenInfo {
/// formatted like a Rust namespace /// formatted like a Rust namespace
pub generator: Sym, pub generator: Sym,
/// Unformatted user message with relevant circumstances and parameters /// Unformatted user message with relevant circumstances and parameters
pub details: Tok<String>, pub details: Tok<String>,
} }
impl CodeGenInfo { impl CodeGenInfo {
/// A codegen marker with no user message and parameters /// A codegen marker with no user message and parameters
pub fn no_details(generator: Sym) -> Self { Self { generator, details: intern!(str: "") } } pub fn no_details(generator: Sym) -> Self { Self { generator, details: intern!(str: "") } }
/// A codegen marker with a user message or parameters /// A codegen marker with a user message or parameters
pub fn details(generator: Sym, details: impl AsRef<str>) -> Self { pub fn details(generator: Sym, details: impl AsRef<str>) -> Self {
Self { generator, details: intern(details.as_ref()) } Self { generator, details: intern(details.as_ref()) }
} }
/// Syntactic location /// Syntactic location
pub fn pos(&self) -> Pos { Pos::Gen(self.clone()) } pub fn pos(&self) -> Pos { Pos::Gen(self.clone()) }
fn from_api(api: &api::CodeGenInfo) -> Self { fn from_api(api: &api::CodeGenInfo) -> Self {
Self { Self { generator: Sym::from_api(api.generator), details: Tok::from_api(api.details) }
generator: Sym::from_api(api.generator), }
details: Tok::from_api(api.details), fn to_api(&self) -> api::CodeGenInfo {
} api::CodeGenInfo { generator: self.generator.to_api(), details: self.details.to_api() }
} }
fn to_api(&self) -> api::CodeGenInfo {
api::CodeGenInfo { generator: self.generator.to_api(), details: self.details.to_api() }
}
} }
impl fmt::Debug for CodeGenInfo { impl fmt::Debug for CodeGenInfo {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "CodeGenInfo({self})") } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "CodeGenInfo({self})") }
} }
impl fmt::Display for CodeGenInfo { impl fmt::Display for CodeGenInfo {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "generated by {}", self.generator)?; write!(f, "generated by {}", self.generator)?;
if !self.details.is_empty() { write!(f, ", details: {}", self.details) } else { write!(f, ".") } if !self.details.is_empty() { write!(f, ", details: {}", self.details) } else { write!(f, ".") }
} }
} }
#[must_use] #[must_use]
fn pos2lc(s: &str, i: u32) -> (u32, u32) { fn pos2lc(s: &str, i: u32) -> (u32, u32) {
s.chars() s.chars()
.take(i.try_into().unwrap()) .take(i.try_into().unwrap())
.fold((1, 1), |(line, col), char| if char == '\n' { (line + 1, 1) } else { (line, col + 1) }) .fold((1, 1), |(line, col), char| if char == '\n' { (line + 1, 1) } else { (line, col + 1) })
} }

View File

@@ -1,6 +1,6 @@
use std::fmt::Arguments; use std::fmt::Arguments;
use std::fs::File; use std::fs::File;
use std::io::{stderr, Write}; use std::io::{Write, stderr};
pub use api::LogStrategy; pub use api::LogStrategy;
use itertools::Itertools; use itertools::Itertools;
@@ -10,21 +10,21 @@ use crate::api;
#[derive(Clone)] #[derive(Clone)]
pub struct Logger(api::LogStrategy); pub struct Logger(api::LogStrategy);
impl Logger { impl Logger {
pub fn new(strat: api::LogStrategy) -> Self { Self(strat) } pub fn new(strat: api::LogStrategy) -> Self { Self(strat) }
pub fn log(&self, msg: impl AsRef<str>) { writeln!(self, "{}", msg.as_ref()) } pub fn log(&self, msg: impl AsRef<str>) { writeln!(self, "{}", msg.as_ref()) }
pub fn strat(&self) -> api::LogStrategy { self.0.clone() } pub fn strat(&self) -> api::LogStrategy { self.0.clone() }
pub fn log_buf(&self, event: impl AsRef<str>, buf: &[u8]) { pub fn log_buf(&self, event: impl AsRef<str>, buf: &[u8]) {
if std::env::var("ORCHID_LOG_BUFFERS").is_ok_and(|v| !v.is_empty()) { if std::env::var("ORCHID_LOG_BUFFERS").is_ok_and(|v| !v.is_empty()) {
writeln!(self, "{}: [{}]", event.as_ref(), buf.iter().map(|b| format!("{b:02x}")).join(" ")) writeln!(self, "{}: [{}]", event.as_ref(), buf.iter().map(|b| format!("{b:02x}")).join(" "))
} }
} }
pub fn write_fmt(&self, fmt: Arguments) { pub fn write_fmt(&self, fmt: Arguments) {
match &self.0 { match &self.0 {
api::LogStrategy::StdErr => stderr().write_fmt(fmt).expect("Could not write to stderr!"), api::LogStrategy::StdErr => stderr().write_fmt(fmt).expect("Could not write to stderr!"),
api::LogStrategy::File(f) => { api::LogStrategy::File(f) => {
let mut file = File::open(f).expect("Could not open logfile"); let mut file = File::open(f).expect("Could not open logfile");
file.write_fmt(fmt).expect("Could not write to logfile"); file.write_fmt(fmt).expect("Could not write to logfile");
}, },
} }
} }
} }

View File

@@ -13,84 +13,84 @@ use crate::{api, match_mapping};
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct MacroSlot<'a>(api::MacroTreeId, PhantomData<&'a ()>); pub struct MacroSlot<'a>(api::MacroTreeId, PhantomData<&'a ()>);
impl MacroSlot<'_> { impl MacroSlot<'_> {
pub fn id(self) -> api::MacroTreeId { self.0 } pub fn id(self) -> api::MacroTreeId { self.0 }
} }
trait_set! { trait_set! {
pub trait MacroAtomToApi<A> = FnMut(&A) -> api::MacroToken; pub trait MacroAtomToApi<A> = FnMut(&A) -> api::MacroToken;
pub trait MacroAtomFromApi<'a, A> = FnMut(&api::Atom) -> MTok<'a, A>; pub trait MacroAtomFromApi<'a, A> = FnMut(&api::Atom) -> MTok<'a, A>;
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct MTree<'a, A> { pub struct MTree<'a, A> {
pub pos: Pos, pub pos: Pos,
pub tok: Arc<MTok<'a, A>>, pub tok: Arc<MTok<'a, A>>,
} }
impl<'a, A> MTree<'a, A> { impl<'a, A> MTree<'a, A> {
pub(crate) fn from_api(api: &api::MacroTree, do_atom: &mut impl MacroAtomFromApi<'a, A>) -> Self { pub(crate) fn from_api(api: &api::MacroTree, do_atom: &mut impl MacroAtomFromApi<'a, A>) -> Self {
Self { pos: Pos::from_api(&api.location), tok: Arc::new(MTok::from_api(&api.token, do_atom)) } Self { pos: Pos::from_api(&api.location), tok: Arc::new(MTok::from_api(&api.token, do_atom)) }
} }
pub(crate) fn to_api(&self, do_atom: &mut impl MacroAtomToApi<A>) -> api::MacroTree { pub(crate) fn to_api(&self, do_atom: &mut impl MacroAtomToApi<A>) -> api::MacroTree {
api::MacroTree { location: self.pos.to_api(), token: self.tok.to_api(do_atom) } api::MacroTree { location: self.pos.to_api(), token: self.tok.to_api(do_atom) }
} }
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum MTok<'a, A> { pub enum MTok<'a, A> {
S(Paren, Vec<MTree<'a, A>>), S(Paren, Vec<MTree<'a, A>>),
Name(Sym), Name(Sym),
Slot(MacroSlot<'a>), Slot(MacroSlot<'a>),
Lambda(Vec<MTree<'a, A>>, Vec<MTree<'a, A>>), Lambda(Vec<MTree<'a, A>>, Vec<MTree<'a, A>>),
Ph(Ph), Ph(Ph),
Atom(A), Atom(A),
/// Used in extensions to directly return input /// Used in extensions to directly return input
Ref(Arc<MTok<'a, Never>>), Ref(Arc<MTok<'a, Never>>),
/// Used in the matcher to skip previous macro output which can only go in /// Used in the matcher to skip previous macro output which can only go in
/// vectorial placeholders /// vectorial placeholders
Done(Arc<MTok<'a, A>>), Done(Arc<MTok<'a, A>>),
} }
impl<'a, A> MTok<'a, A> { impl<'a, A> MTok<'a, A> {
pub(crate) fn from_api( pub(crate) fn from_api(
api: &api::MacroToken, api: &api::MacroToken,
do_atom: &mut impl MacroAtomFromApi<'a, A>, do_atom: &mut impl MacroAtomFromApi<'a, A>,
) -> Self { ) -> Self {
match_mapping!(&api, api::MacroToken => MTok::<'a, A> { match_mapping!(&api, api::MacroToken => MTok::<'a, A> {
Lambda(x => mtreev_from_api(x, do_atom), b => mtreev_from_api(b, do_atom)), Lambda(x => mtreev_from_api(x, do_atom), b => mtreev_from_api(b, do_atom)),
Name(t => Sym::from_api(*t)), Name(t => Sym::from_api(*t)),
Slot(tk => MacroSlot(*tk, PhantomData)), Slot(tk => MacroSlot(*tk, PhantomData)),
S(p.clone(), b => mtreev_from_api(b, do_atom)), S(p.clone(), b => mtreev_from_api(b, do_atom)),
Ph(ph => Ph::from_api(ph)), Ph(ph => Ph::from_api(ph)),
} { } {
api::MacroToken::Atom(a) => do_atom(a) api::MacroToken::Atom(a) => do_atom(a)
}) })
} }
pub(crate) fn to_api(&self, do_atom: &mut impl MacroAtomToApi<A>) -> api::MacroToken { pub(crate) fn to_api(&self, do_atom: &mut impl MacroAtomToApi<A>) -> api::MacroToken {
fn sink(n: &Never) -> api::MacroToken { match *n {} } fn sink(n: &Never) -> api::MacroToken { match *n {} }
match_mapping!(&self, MTok => api::MacroToken { match_mapping!(&self, MTok => api::MacroToken {
Lambda(x => mtreev_to_api(x, do_atom), b => mtreev_to_api(b, do_atom)), Lambda(x => mtreev_to_api(x, do_atom), b => mtreev_to_api(b, do_atom)),
Name(t.tok().to_api()), Name(t.tok().to_api()),
Ph(ph.to_api()), Ph(ph.to_api()),
S(p.clone(), b => mtreev_to_api(b, do_atom)), S(p.clone(), b => mtreev_to_api(b, do_atom)),
Slot(tk.0.clone()), Slot(tk.0.clone()),
} { } {
MTok::Ref(r) => r.to_api(&mut sink), MTok::Ref(r) => r.to_api(&mut sink),
MTok::Done(t) => t.to_api(do_atom), MTok::Done(t) => t.to_api(do_atom),
MTok::Atom(a) => do_atom(a), MTok::Atom(a) => do_atom(a),
}) })
} }
pub fn at(self, pos: Pos) -> MTree<'a, A> { MTree { pos, tok: Arc::new(self) } } pub fn at(self, pos: Pos) -> MTree<'a, A> { MTree { pos, tok: Arc::new(self) } }
} }
pub fn mtreev_from_api<'a, 'b, A>( pub fn mtreev_from_api<'a, 'b, A>(
api: impl IntoIterator<Item = &'b api::MacroTree>, api: impl IntoIterator<Item = &'b api::MacroTree>,
do_atom: &mut impl MacroAtomFromApi<'a, A>, do_atom: &mut impl MacroAtomFromApi<'a, A>,
) -> Vec<MTree<'a, A>> { ) -> Vec<MTree<'a, A>> {
api.into_iter().map(|api| MTree::from_api(api, do_atom)).collect_vec() api.into_iter().map(|api| MTree::from_api(api, do_atom)).collect_vec()
} }
pub fn mtreev_to_api<'a: 'b, 'b, A: 'b>( pub fn mtreev_to_api<'a: 'b, 'b, A: 'b>(
v: impl IntoIterator<Item = &'b MTree<'a, A>>, v: impl IntoIterator<Item = &'b MTree<'a, A>>,
do_atom: &mut impl MacroAtomToApi<A>, do_atom: &mut impl MacroAtomToApi<A>,
) -> Vec<api::MacroTree> { ) -> Vec<api::MacroTree> {
v.into_iter().map(|t| t.to_api(do_atom)).collect_vec() v.into_iter().map(|t| t.to_api(do_atom)).collect_vec()
} }

View File

@@ -1,6 +1,6 @@
/// A shorthand for mapping over enums with identical structure. Used for converting between /// A shorthand for mapping over enums with identical structure. Used for
/// owned enums and the corresponding API enums that only differ in the type of their /// converting between owned enums and the corresponding API enums that only
/// fields. /// differ in the type of their fields.
/// ///
/// The basic form is /// The basic form is
/// ```ignore /// ```ignore

View File

@@ -3,14 +3,14 @@ use std::io;
use orchid_api_traits::{Decode, Encode}; use orchid_api_traits::{Decode, Encode};
pub fn send_msg(write: &mut impl io::Write, msg: &[u8]) -> io::Result<()> { pub fn send_msg(write: &mut impl io::Write, msg: &[u8]) -> io::Result<()> {
u32::try_from(msg.len()).unwrap().encode(write); u32::try_from(msg.len()).unwrap().encode(write);
write.write_all(msg)?; write.write_all(msg)?;
write.flush() write.flush()
} }
pub fn recv_msg(read: &mut impl io::Read) -> io::Result<Vec<u8>> { pub fn recv_msg(read: &mut impl io::Read) -> io::Result<Vec<u8>> {
let len = u32::decode(read); let len = u32::decode(read);
let mut msg = vec![0u8; len as usize]; let mut msg = vec![0u8; len as usize];
read.read_exact(&mut msg)?; read.read_exact(&mut msg)?;
Ok(msg) Ok(msg)
} }

View File

@@ -12,11 +12,11 @@ use itertools::Itertools;
use trait_set::trait_set; use trait_set::trait_set;
use crate::api; use crate::api;
use crate::interner::{intern, InternMarker, Tok}; use crate::interner::{InternMarker, Tok, intern};
trait_set! { trait_set! {
/// Traits that all name iterators should implement /// Traits that all name iterators should implement
pub trait NameIter = Iterator<Item = Tok<String>> + DoubleEndedIterator + ExactSizeIterator; pub trait NameIter = Iterator<Item = Tok<String>> + DoubleEndedIterator + ExactSizeIterator;
} }
/// A borrowed name fragment which can be empty. See [VPath] for the owned /// A borrowed name fragment which can be empty. See [VPath] for the owned
@@ -25,129 +25,129 @@ trait_set! {
#[repr(transparent)] #[repr(transparent)]
pub struct PathSlice([Tok<String>]); pub struct PathSlice([Tok<String>]);
impl PathSlice { impl PathSlice {
/// Create a new [PathSlice] /// Create a new [PathSlice]
pub fn new(slice: &[Tok<String>]) -> &PathSlice { pub fn new(slice: &[Tok<String>]) -> &PathSlice {
// SAFETY: This is ok because PathSlice is #[repr(transparent)] // SAFETY: This is ok because PathSlice is #[repr(transparent)]
unsafe { &*(slice as *const [Tok<String>] as *const PathSlice) } unsafe { &*(slice as *const [Tok<String>] as *const PathSlice) }
} }
/// Convert to an owned name fragment /// Convert to an owned name fragment
pub fn to_vpath(&self) -> VPath { VPath(self.0.to_vec()) } pub fn to_vpath(&self) -> VPath { VPath(self.0.to_vec()) }
/// Iterate over the tokens /// Iterate over the tokens
pub fn iter(&self) -> impl NameIter + '_ { self.into_iter() } pub fn iter(&self) -> impl NameIter + '_ { self.into_iter() }
/// Iterate over the segments /// Iterate over the segments
pub fn str_iter(&self) -> impl Iterator<Item = &'_ str> { pub fn str_iter(&self) -> impl Iterator<Item = &'_ str> {
Box::new(self.0.iter().map(|s| s.as_str())) Box::new(self.0.iter().map(|s| s.as_str()))
} }
/// Find the longest shared prefix of this name and another sequence /// Find the longest shared prefix of this name and another sequence
pub fn coprefix<'a>(&'a self, other: &PathSlice) -> &'a PathSlice { pub fn coprefix<'a>(&'a self, other: &PathSlice) -> &'a PathSlice {
&self[0..self.iter().zip(other.iter()).take_while(|(l, r)| l == r).count() as u16] &self[0..self.iter().zip(other.iter()).take_while(|(l, r)| l == r).count() as u16]
} }
/// Find the longest shared suffix of this name and another sequence /// Find the longest shared suffix of this name and another sequence
pub fn cosuffix<'a>(&'a self, other: &PathSlice) -> &'a PathSlice { pub fn cosuffix<'a>(&'a self, other: &PathSlice) -> &'a PathSlice {
&self[0..self.iter().zip(other.iter()).take_while(|(l, r)| l == r).count() as u16] &self[0..self.iter().zip(other.iter()).take_while(|(l, r)| l == r).count() as u16]
} }
/// Remove another /// Remove another
pub fn strip_prefix<'a>(&'a self, other: &PathSlice) -> Option<&'a PathSlice> { pub fn strip_prefix<'a>(&'a self, other: &PathSlice) -> Option<&'a PathSlice> {
let shared = self.coprefix(other).len(); let shared = self.coprefix(other).len();
(shared == other.len()).then_some(PathSlice::new(&self[shared..])) (shared == other.len()).then_some(PathSlice::new(&self[shared..]))
} }
/// Number of path segments /// Number of path segments
pub fn len(&self) -> u16 { self.0.len().try_into().expect("Too long name!") } pub fn len(&self) -> u16 { self.0.len().try_into().expect("Too long name!") }
pub fn get<I: NameIndex>(&self, index: I) -> Option<&I::Output> { index.get(self) } pub fn get<I: NameIndex>(&self, index: I) -> Option<&I::Output> { index.get(self) }
/// Whether there are any path segments. In other words, whether this is a /// Whether there are any path segments. In other words, whether this is a
/// valid name /// valid name
pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn is_empty(&self) -> bool { self.len() == 0 }
/// Obtain a reference to the held slice. With all indexing traits shadowed, /// Obtain a reference to the held slice. With all indexing traits shadowed,
/// this is better done explicitly /// this is better done explicitly
pub fn as_slice(&self) -> &[Tok<String>] { self } pub fn as_slice(&self) -> &[Tok<String>] { self }
/// Global empty path slice /// Global empty path slice
pub fn empty() -> &'static Self { PathSlice::new(&[]) } pub fn empty() -> &'static Self { PathSlice::new(&[]) }
} }
impl fmt::Debug for PathSlice { impl fmt::Debug for PathSlice {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") }
} }
impl fmt::Display for PathSlice { impl fmt::Display for PathSlice {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.str_iter().join("::")) write!(f, "{}", self.str_iter().join("::"))
} }
} }
impl Borrow<[Tok<String>]> for PathSlice { impl Borrow<[Tok<String>]> for PathSlice {
fn borrow(&self) -> &[Tok<String>] { &self.0 } fn borrow(&self) -> &[Tok<String>] { &self.0 }
} }
impl<'a> IntoIterator for &'a PathSlice { impl<'a> IntoIterator for &'a PathSlice {
type IntoIter = Cloned<slice::Iter<'a, Tok<String>>>; type IntoIter = Cloned<slice::Iter<'a, Tok<String>>>;
type Item = Tok<String>; type Item = Tok<String>;
fn into_iter(self) -> Self::IntoIter { self.0.iter().cloned() } fn into_iter(self) -> Self::IntoIter { self.0.iter().cloned() }
} }
pub trait NameIndex { pub trait NameIndex {
type Output: ?Sized; type Output: ?Sized;
fn get(self, name: &PathSlice) -> Option<&Self::Output>; fn get(self, name: &PathSlice) -> Option<&Self::Output>;
} }
impl<T: NameIndex> Index<T> for PathSlice { impl<T: NameIndex> Index<T> for PathSlice {
type Output = T::Output; type Output = T::Output;
fn index(&self, index: T) -> &Self::Output { index.get(self).expect("Index out of bounds") } fn index(&self, index: T) -> &Self::Output { index.get(self).expect("Index out of bounds") }
} }
mod idx_impls { mod idx_impls {
use std::ops; use std::ops;
use super::{conv_range, NameIndex, PathSlice}; use super::{NameIndex, PathSlice, conv_range};
use crate::interner::Tok; use crate::interner::Tok;
impl NameIndex for u16 { impl NameIndex for u16 {
type Output = Tok<String>; type Output = Tok<String>;
fn get(self, name: &PathSlice) -> Option<&Self::Output> { name.0.get(self as usize) } fn get(self, name: &PathSlice) -> Option<&Self::Output> { name.0.get(self as usize) }
} }
impl NameIndex for ops::RangeFull { impl NameIndex for ops::RangeFull {
type Output = PathSlice; type Output = PathSlice;
fn get(self, name: &PathSlice) -> Option<&Self::Output> { Some(name) } fn get(self, name: &PathSlice) -> Option<&Self::Output> { Some(name) }
} }
macro_rules! impl_range_index_for_pathslice { macro_rules! impl_range_index_for_pathslice {
($range:ident) => { ($range:ident) => {
impl ops::Index<ops::$range<u16>> for PathSlice { impl ops::Index<ops::$range<u16>> for PathSlice {
type Output = Self; type Output = Self;
fn index(&self, index: ops::$range<u16>) -> &Self::Output { fn index(&self, index: ops::$range<u16>) -> &Self::Output {
Self::new(&self.0[conv_range::<u16, usize>(index)]) Self::new(&self.0[conv_range::<u16, usize>(index)])
} }
} }
}; };
} }
impl_range_index_for_pathslice!(RangeFrom); impl_range_index_for_pathslice!(RangeFrom);
impl_range_index_for_pathslice!(RangeTo); impl_range_index_for_pathslice!(RangeTo);
impl_range_index_for_pathslice!(Range); impl_range_index_for_pathslice!(Range);
impl_range_index_for_pathslice!(RangeInclusive); impl_range_index_for_pathslice!(RangeInclusive);
impl_range_index_for_pathslice!(RangeToInclusive); impl_range_index_for_pathslice!(RangeToInclusive);
} }
impl Deref for PathSlice { impl Deref for PathSlice {
type Target = [Tok<String>]; type Target = [Tok<String>];
fn deref(&self) -> &Self::Target { &self.0 } fn deref(&self) -> &Self::Target { &self.0 }
} }
impl Borrow<PathSlice> for [Tok<String>] { impl Borrow<PathSlice> for [Tok<String>] {
fn borrow(&self) -> &PathSlice { PathSlice::new(self) } fn borrow(&self) -> &PathSlice { PathSlice::new(self) }
} }
impl<const N: usize> Borrow<PathSlice> for [Tok<String>; N] { impl<const N: usize> Borrow<PathSlice> for [Tok<String>; N] {
fn borrow(&self) -> &PathSlice { PathSlice::new(&self[..]) } fn borrow(&self) -> &PathSlice { PathSlice::new(&self[..]) }
} }
impl Borrow<PathSlice> for Vec<Tok<String>> { impl Borrow<PathSlice> for Vec<Tok<String>> {
fn borrow(&self) -> &PathSlice { PathSlice::new(&self[..]) } fn borrow(&self) -> &PathSlice { PathSlice::new(&self[..]) }
} }
pub fn conv_bound<T: Into<U> + Clone, U>(bound: Bound<&T>) -> Bound<U> { pub fn conv_bound<T: Into<U> + Clone, U>(bound: Bound<&T>) -> Bound<U> {
match bound { match bound {
Bound::Included(i) => Bound::Included(i.clone().into()), Bound::Included(i) => Bound::Included(i.clone().into()),
Bound::Excluded(i) => Bound::Excluded(i.clone().into()), Bound::Excluded(i) => Bound::Excluded(i.clone().into()),
Bound::Unbounded => Bound::Unbounded, Bound::Unbounded => Bound::Unbounded,
} }
} }
pub fn conv_range<'a, T: Into<U> + Clone + 'a, U: 'a>( pub fn conv_range<'a, T: Into<U> + Clone + 'a, U: 'a>(
range: impl RangeBounds<T>, range: impl RangeBounds<T>,
) -> (Bound<U>, Bound<U>) { ) -> (Bound<U>, Bound<U>) {
(conv_bound(range.start_bound()), conv_bound(range.end_bound())) (conv_bound(range.start_bound()), conv_bound(range.end_bound()))
} }
/// A token path which may be empty. [VName] is the non-empty, /// A token path which may be empty. [VName] is the non-empty,
@@ -155,90 +155,90 @@ pub fn conv_range<'a, T: Into<U> + Clone + 'a, U: 'a>(
#[derive(Clone, Default, Hash, PartialEq, Eq)] #[derive(Clone, Default, Hash, PartialEq, Eq)]
pub struct VPath(pub Vec<Tok<String>>); pub struct VPath(pub Vec<Tok<String>>);
impl VPath { impl VPath {
/// Collect segments into a vector /// Collect segments into a vector
pub fn new(items: impl IntoIterator<Item = Tok<String>>) -> Self { pub fn new(items: impl IntoIterator<Item = Tok<String>>) -> Self {
Self(items.into_iter().collect()) Self(items.into_iter().collect())
} }
/// Number of path segments /// Number of path segments
pub fn len(&self) -> usize { self.0.len() } pub fn len(&self) -> usize { self.0.len() }
/// Whether there are any path segments. In other words, whether this is a /// Whether there are any path segments. In other words, whether this is a
/// valid name /// valid name
pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn is_empty(&self) -> bool { self.len() == 0 }
/// Prepend some tokens to the path /// Prepend some tokens to the path
pub fn prefix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self { pub fn prefix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
Self(items.into_iter().chain(self.0).collect()) Self(items.into_iter().chain(self.0).collect())
} }
/// Append some tokens to the path /// Append some tokens to the path
pub fn suffix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self { pub fn suffix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
Self(self.0.into_iter().chain(items).collect()) Self(self.0.into_iter().chain(items).collect())
} }
/// Partition the string by `::` namespace separators /// Partition the string by `::` namespace separators
pub fn parse(s: &str) -> Self { pub fn parse(s: &str) -> Self {
Self(if s.is_empty() { vec![] } else { s.split("::").map(intern).collect() }) Self(if s.is_empty() { vec![] } else { s.split("::").map(intern).collect() })
} }
/// Walk over the segments /// Walk over the segments
pub fn str_iter(&self) -> impl Iterator<Item = &'_ str> { pub fn str_iter(&self) -> impl Iterator<Item = &'_ str> {
Box::new(self.0.iter().map(|s| s.as_str())) Box::new(self.0.iter().map(|s| s.as_str()))
} }
/// Try to convert into non-empty version /// Try to convert into non-empty version
pub fn into_name(self) -> Result<VName, EmptyNameError> { VName::new(self.0) } pub fn into_name(self) -> Result<VName, EmptyNameError> { VName::new(self.0) }
/// Add a token to the path. Since now we know that it can't be empty, turn it /// Add a token to the path. Since now we know that it can't be empty, turn it
/// into a name. /// into a name.
pub fn name_with_prefix(self, name: Tok<String>) -> VName { pub fn name_with_prefix(self, name: Tok<String>) -> VName {
VName(self.into_iter().chain([name]).collect()) VName(self.into_iter().chain([name]).collect())
} }
/// Add a token to the beginning of the. Since now we know that it can't be /// Add a token to the beginning of the. Since now we know that it can't be
/// empty, turn it into a name. /// empty, turn it into a name.
pub fn name_with_suffix(self, name: Tok<String>) -> VName { pub fn name_with_suffix(self, name: Tok<String>) -> VName {
VName([name].into_iter().chain(self).collect()) VName([name].into_iter().chain(self).collect())
} }
/// Convert a fs path to a vpath /// Convert a fs path to a vpath
pub fn from_path(path: &Path) -> Option<(Self, bool)> { pub fn from_path(path: &Path) -> Option<(Self, bool)> {
let to_vpath = let to_vpath =
|p: &Path| p.iter().map(|c| c.to_str().map(intern)).collect::<Option<_>>().map(VPath); |p: &Path| p.iter().map(|c| c.to_str().map(intern)).collect::<Option<_>>().map(VPath);
match path.extension().map(|s| s.to_str()) { match path.extension().map(|s| s.to_str()) {
Some(Some("orc")) => Some((to_vpath(&path.with_extension(""))?, true)), Some(Some("orc")) => Some((to_vpath(&path.with_extension(""))?, true)),
None => Some((to_vpath(path)?, false)), None => Some((to_vpath(path)?, false)),
Some(_) => None, Some(_) => None,
} }
} }
} }
impl fmt::Debug for VPath { impl fmt::Debug for VPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") }
} }
impl fmt::Display for VPath { impl fmt::Display for VPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.str_iter().join("::")) write!(f, "{}", self.str_iter().join("::"))
} }
} }
impl FromIterator<Tok<String>> for VPath { impl FromIterator<Tok<String>> for VPath {
fn from_iter<T: IntoIterator<Item = Tok<String>>>(iter: T) -> Self { fn from_iter<T: IntoIterator<Item = Tok<String>>>(iter: T) -> Self {
Self(iter.into_iter().collect()) Self(iter.into_iter().collect())
} }
} }
impl IntoIterator for VPath { impl IntoIterator for VPath {
type Item = Tok<String>; type Item = Tok<String>;
type IntoIter = vec::IntoIter<Self::Item>; type IntoIter = vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() } fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
} }
impl Borrow<[Tok<String>]> for VPath { impl Borrow<[Tok<String>]> for VPath {
fn borrow(&self) -> &[Tok<String>] { self.0.borrow() } fn borrow(&self) -> &[Tok<String>] { self.0.borrow() }
} }
impl Borrow<PathSlice> for VPath { impl Borrow<PathSlice> for VPath {
fn borrow(&self) -> &PathSlice { PathSlice::new(&self.0[..]) } fn borrow(&self) -> &PathSlice { PathSlice::new(&self.0[..]) }
} }
impl Deref for VPath { impl Deref for VPath {
type Target = PathSlice; type Target = PathSlice;
fn deref(&self) -> &Self::Target { self.borrow() } fn deref(&self) -> &Self::Target { self.borrow() }
} }
impl<T> Index<T> for VPath impl<T> Index<T> for VPath
where PathSlice: Index<T> where PathSlice: Index<T>
{ {
type Output = <PathSlice as Index<T>>::Output; type Output = <PathSlice as Index<T>>::Output;
fn index(&self, index: T) -> &Self::Output { &Borrow::<PathSlice>::borrow(self)[index] } fn index(&self, index: T) -> &Self::Output { &Borrow::<PathSlice>::borrow(self)[index] }
} }
/// A mutable representation of a namespaced identifier of at least one segment. /// A mutable representation of a namespaced identifier of at least one segment.
@@ -250,71 +250,71 @@ where PathSlice: Index<T>
#[derive(Clone, Hash, PartialEq, Eq)] #[derive(Clone, Hash, PartialEq, Eq)]
pub struct VName(Vec<Tok<String>>); pub struct VName(Vec<Tok<String>>);
impl VName { impl VName {
/// Assert that the sequence isn't empty and wrap it in [VName] to represent /// Assert that the sequence isn't empty and wrap it in [VName] to represent
/// this invariant /// this invariant
pub fn new(items: impl IntoIterator<Item = Tok<String>>) -> Result<Self, EmptyNameError> { pub fn new(items: impl IntoIterator<Item = Tok<String>>) -> Result<Self, EmptyNameError> {
let data: Vec<_> = items.into_iter().collect(); let data: Vec<_> = items.into_iter().collect();
if data.is_empty() { Err(EmptyNameError) } else { Ok(Self(data)) } if data.is_empty() { Err(EmptyNameError) } else { Ok(Self(data)) }
} }
pub fn deintern(items: impl IntoIterator<Item = api::TStr>) -> Result<Self, EmptyNameError> { pub fn deintern(items: impl IntoIterator<Item = api::TStr>) -> Result<Self, EmptyNameError> {
Self::new(items.into_iter().map(Tok::from_api)) Self::new(items.into_iter().map(Tok::from_api))
} }
/// Unwrap the enclosed vector /// Unwrap the enclosed vector
pub fn into_vec(self) -> Vec<Tok<String>> { self.0 } pub fn into_vec(self) -> Vec<Tok<String>> { self.0 }
/// Get a reference to the enclosed vector /// Get a reference to the enclosed vector
pub fn vec(&self) -> &Vec<Tok<String>> { &self.0 } pub fn vec(&self) -> &Vec<Tok<String>> { &self.0 }
/// Mutable access to the underlying vector. To ensure correct results, this /// Mutable access to the underlying vector. To ensure correct results, this
/// must never be empty. /// must never be empty.
pub fn vec_mut(&mut self) -> &mut Vec<Tok<String>> { &mut self.0 } pub fn vec_mut(&mut self) -> &mut Vec<Tok<String>> { &mut self.0 }
/// Intern the name and return a [Sym] /// Intern the name and return a [Sym]
pub fn to_sym(&self) -> Sym { Sym(intern(&self.0[..])) } pub fn to_sym(&self) -> Sym { Sym(intern(&self.0[..])) }
/// If this name has only one segment, return it /// If this name has only one segment, return it
pub fn as_root(&self) -> Option<Tok<String>> { self.0.iter().exactly_one().ok().cloned() } pub fn as_root(&self) -> Option<Tok<String>> { self.0.iter().exactly_one().ok().cloned() }
/// Prepend the segments to this name /// Prepend the segments to this name
#[must_use = "This is a pure function"] #[must_use = "This is a pure function"]
pub fn prefix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self { pub fn prefix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
Self(items.into_iter().chain(self.0).collect()) Self(items.into_iter().chain(self.0).collect())
} }
/// Append the segments to this name /// Append the segments to this name
#[must_use = "This is a pure function"] #[must_use = "This is a pure function"]
pub fn suffix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self { pub fn suffix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
Self(self.0.into_iter().chain(items).collect()) Self(self.0.into_iter().chain(items).collect())
} }
/// Read a `::` separated namespaced name /// Read a `::` separated namespaced name
pub fn parse(s: &str) -> Result<Self, EmptyNameError> { Self::new(VPath::parse(s)) } pub fn parse(s: &str) -> Result<Self, EmptyNameError> { Self::new(VPath::parse(s)) }
pub fn literal(s: &'static str) -> Self { Self::parse(s).expect("empty literal !?") } pub fn literal(s: &'static str) -> Self { Self::parse(s).expect("empty literal !?") }
/// Obtain an iterator over the segments of the name /// Obtain an iterator over the segments of the name
pub fn iter(&self) -> impl Iterator<Item = Tok<String>> + '_ { self.0.iter().cloned() } pub fn iter(&self) -> impl Iterator<Item = Tok<String>> + '_ { self.0.iter().cloned() }
} }
impl fmt::Debug for VName { impl fmt::Debug for VName {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") }
} }
impl fmt::Display for VName { impl fmt::Display for VName {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.str_iter().join("::")) write!(f, "{}", self.str_iter().join("::"))
} }
} }
impl IntoIterator for VName { impl IntoIterator for VName {
type Item = Tok<String>; type Item = Tok<String>;
type IntoIter = vec::IntoIter<Self::Item>; type IntoIter = vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() } fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
} }
impl<T> Index<T> for VName impl<T> Index<T> for VName
where PathSlice: Index<T> where PathSlice: Index<T>
{ {
type Output = <PathSlice as Index<T>>::Output; type Output = <PathSlice as Index<T>>::Output;
fn index(&self, index: T) -> &Self::Output { &self.deref()[index] } fn index(&self, index: T) -> &Self::Output { &self.deref()[index] }
} }
impl Borrow<[Tok<String>]> for VName { impl Borrow<[Tok<String>]> for VName {
fn borrow(&self) -> &[Tok<String>] { self.0.borrow() } fn borrow(&self) -> &[Tok<String>] { self.0.borrow() }
} }
impl Borrow<PathSlice> for VName { impl Borrow<PathSlice> for VName {
fn borrow(&self) -> &PathSlice { PathSlice::new(&self.0[..]) } fn borrow(&self) -> &PathSlice { PathSlice::new(&self.0[..]) }
} }
impl Deref for VName { impl Deref for VName {
type Target = PathSlice; type Target = PathSlice;
fn deref(&self) -> &Self::Target { self.borrow() } fn deref(&self) -> &Self::Target { self.borrow() }
} }
/// Error produced when a non-empty name [VName] or [Sym] is constructed with an /// Error produced when a non-empty name [VName] or [Sym] is constructed with an
@@ -322,10 +322,10 @@ impl Deref for VName {
#[derive(Debug, Copy, Clone, Default, Hash, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Copy, Clone, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct EmptyNameError; pub struct EmptyNameError;
impl TryFrom<&[Tok<String>]> for VName { impl TryFrom<&[Tok<String>]> for VName {
type Error = EmptyNameError; type Error = EmptyNameError;
fn try_from(value: &[Tok<String>]) -> Result<Self, Self::Error> { fn try_from(value: &[Tok<String>]) -> Result<Self, Self::Error> {
Self::new(value.iter().cloned()) Self::new(value.iter().cloned())
} }
} }
/// An interned representation of a namespaced identifier. /// An interned representation of a namespaced identifier.
@@ -336,94 +336,94 @@ impl TryFrom<&[Tok<String>]> for VName {
#[derive(Clone, Hash, PartialEq, Eq)] #[derive(Clone, Hash, PartialEq, Eq)]
pub struct Sym(Tok<Vec<Tok<String>>>); pub struct Sym(Tok<Vec<Tok<String>>>);
impl Sym { impl Sym {
/// Assert that the sequence isn't empty, intern it and wrap it in a [Sym] to /// Assert that the sequence isn't empty, intern it and wrap it in a [Sym] to
/// represent this invariant /// represent this invariant
pub fn new(v: impl IntoIterator<Item = Tok<String>>) -> Result<Self, EmptyNameError> { pub fn new(v: impl IntoIterator<Item = Tok<String>>) -> Result<Self, EmptyNameError> {
let items = v.into_iter().collect_vec(); let items = v.into_iter().collect_vec();
Self::from_tok(intern(&items[..])) Self::from_tok(intern(&items[..]))
} }
/// Read a `::` separated namespaced name. /// Read a `::` separated namespaced name.
pub fn parse(s: &str) -> Result<Self, EmptyNameError> { pub fn parse(s: &str) -> Result<Self, EmptyNameError> {
Ok(Sym(intern(&VName::parse(s)?.into_vec()[..]))) Ok(Sym(intern(&VName::parse(s)?.into_vec()[..])))
} }
/// Assert that a token isn't empty, and wrap it in a [Sym] /// Assert that a token isn't empty, and wrap it in a [Sym]
pub fn from_tok(t: Tok<Vec<Tok<String>>>) -> Result<Self, EmptyNameError> { pub fn from_tok(t: Tok<Vec<Tok<String>>>) -> Result<Self, EmptyNameError> {
if t.is_empty() { Err(EmptyNameError) } else { Ok(Self(t)) } if t.is_empty() { Err(EmptyNameError) } else { Ok(Self(t)) }
} }
/// Grab the interner token /// Grab the interner token
pub fn tok(&self) -> Tok<Vec<Tok<String>>> { self.0.clone() } pub fn tok(&self) -> Tok<Vec<Tok<String>>> { self.0.clone() }
/// Get a number unique to this name suitable for arbitrary ordering. /// Get a number unique to this name suitable for arbitrary ordering.
pub fn id(&self) -> NonZeroU64 { self.0.to_api().get_id() } pub fn id(&self) -> NonZeroU64 { self.0.to_api().get_id() }
/// Extern the sym for editing /// Extern the sym for editing
pub fn to_vname(&self) -> VName { VName(self[..].to_vec()) } pub fn to_vname(&self) -> VName { VName(self[..].to_vec()) }
pub fn from_api(marker: api::TStrv) -> Sym { pub fn from_api(marker: api::TStrv) -> Sym {
Self::from_tok(Tok::from_api(marker)).expect("Empty sequence found for serialized Sym") Self::from_tok(Tok::from_api(marker)).expect("Empty sequence found for serialized Sym")
} }
pub fn to_api(&self) -> api::TStrv { self.tok().to_api() } pub fn to_api(&self) -> api::TStrv { self.tok().to_api() }
} }
impl fmt::Debug for Sym { impl fmt::Debug for Sym {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Sym({self})") } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Sym({self})") }
} }
impl fmt::Display for Sym { impl fmt::Display for Sym {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.str_iter().join("::")) write!(f, "{}", self.str_iter().join("::"))
} }
} }
impl<T> Index<T> for Sym impl<T> Index<T> for Sym
where PathSlice: Index<T> where PathSlice: Index<T>
{ {
type Output = <PathSlice as Index<T>>::Output; type Output = <PathSlice as Index<T>>::Output;
fn index(&self, index: T) -> &Self::Output { &self.deref()[index] } fn index(&self, index: T) -> &Self::Output { &self.deref()[index] }
} }
impl Borrow<[Tok<String>]> for Sym { impl Borrow<[Tok<String>]> for Sym {
fn borrow(&self) -> &[Tok<String>] { &self.0[..] } fn borrow(&self) -> &[Tok<String>] { &self.0[..] }
} }
impl Borrow<PathSlice> for Sym { impl Borrow<PathSlice> for Sym {
fn borrow(&self) -> &PathSlice { PathSlice::new(&self.0[..]) } fn borrow(&self) -> &PathSlice { PathSlice::new(&self.0[..]) }
} }
impl Deref for Sym { impl Deref for Sym {
type Target = PathSlice; type Target = PathSlice;
fn deref(&self) -> &Self::Target { self.borrow() } fn deref(&self) -> &Self::Target { self.borrow() }
} }
/// An abstraction over tokenized vs non-tokenized names so that they can be /// An abstraction over tokenized vs non-tokenized names so that they can be
/// handled together in datastructures. The names can never be empty /// handled together in datastructures. The names can never be empty
#[allow(clippy::len_without_is_empty)] // never empty #[allow(clippy::len_without_is_empty)] // never empty
pub trait NameLike: pub trait NameLike:
'static + Clone + Eq + Hash + fmt::Debug + fmt::Display + Borrow<PathSlice> 'static + Clone + Eq + Hash + fmt::Debug + fmt::Display + Borrow<PathSlice>
{ {
/// Convert into held slice /// Convert into held slice
fn as_slice(&self) -> &[Tok<String>] { Borrow::<PathSlice>::borrow(self) } fn as_slice(&self) -> &[Tok<String>] { Borrow::<PathSlice>::borrow(self) }
/// Get iterator over tokens /// Get iterator over tokens
fn iter(&self) -> impl NameIter + '_ { self.as_slice().iter().cloned() } fn iter(&self) -> impl NameIter + '_ { self.as_slice().iter().cloned() }
/// Get iterator over string segments /// Get iterator over string segments
fn str_iter(&self) -> impl Iterator<Item = &'_ str> + '_ { fn str_iter(&self) -> impl Iterator<Item = &'_ str> + '_ {
self.as_slice().iter().map(|t| t.as_str()) self.as_slice().iter().map(|t| t.as_str())
} }
/// Fully resolve the name for printing /// Fully resolve the name for printing
#[must_use] #[must_use]
fn to_strv(&self) -> Vec<String> { self.iter().map(|s| s.to_string()).collect() } fn to_strv(&self) -> Vec<String> { self.iter().map(|s| s.to_string()).collect() }
/// Format the name as an approximate filename /// Format the name as an approximate filename
fn as_src_path(&self) -> String { format!("{}.orc", self.iter().join("/")) } fn as_src_path(&self) -> String { format!("{}.orc", self.iter().join("/")) }
/// Return the number of segments in the name /// Return the number of segments in the name
fn len(&self) -> NonZeroUsize { fn len(&self) -> NonZeroUsize {
NonZeroUsize::try_from(self.iter().count()).expect("NameLike never empty") NonZeroUsize::try_from(self.iter().count()).expect("NameLike never empty")
} }
/// Like slice's `split_first` except we know that it always returns Some /// Like slice's `split_first` except we know that it always returns Some
fn split_first(&self) -> (Tok<String>, &PathSlice) { fn split_first(&self) -> (Tok<String>, &PathSlice) {
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty"); let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
(foot.clone(), PathSlice::new(torso)) (foot.clone(), PathSlice::new(torso))
} }
/// Like slice's `split_last` except we know that it always returns Some /// Like slice's `split_last` except we know that it always returns Some
fn split_last(&self) -> (Tok<String>, &PathSlice) { fn split_last(&self) -> (Tok<String>, &PathSlice) {
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty"); let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
(foot.clone(), PathSlice::new(torso)) (foot.clone(), PathSlice::new(torso))
} }
/// Get the first element /// Get the first element
fn first(&self) -> Tok<String> { self.split_first().0 } fn first(&self) -> Tok<String> { self.split_first().0 }
/// Get the last element /// Get the last element
fn last(&self) -> Tok<String> { self.split_last().0 } fn last(&self) -> Tok<String> { self.split_last().0 }
} }
impl NameLike for Sym {} impl NameLike for Sym {}
@@ -492,35 +492,35 @@ macro_rules! path_slice {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::borrow::Borrow; use std::borrow::Borrow;
use super::{PathSlice, Sym, VName}; use super::{PathSlice, Sym, VName};
use crate::interner::{intern, Tok}; use crate::interner::{Tok, intern};
use crate::name::VPath; use crate::name::VPath;
#[test] #[test]
fn recur() { fn recur() {
let myname = vname!(foo::bar); let myname = vname!(foo::bar);
let _borrowed_slice: &[Tok<String>] = myname.borrow(); let _borrowed_slice: &[Tok<String>] = myname.borrow();
let _borrowed_pathslice: &PathSlice = myname.borrow(); let _borrowed_pathslice: &PathSlice = myname.borrow();
let _deref_pathslice: &PathSlice = &myname; let _deref_pathslice: &PathSlice = &myname;
let _as_slice_out: &[Tok<String>] = myname.as_slice(); let _as_slice_out: &[Tok<String>] = myname.as_slice();
} }
#[test] #[test]
fn literals() { fn literals() {
assert_eq!( assert_eq!(
sym!(foo::bar::baz), sym!(foo::bar::baz),
Sym::new([intern("foo"), intern("bar"), intern("baz")]).unwrap() Sym::new([intern("foo"), intern("bar"), intern("baz")]).unwrap()
); );
assert_eq!( assert_eq!(
vname!(foo::bar::baz), vname!(foo::bar::baz),
VName::new([intern("foo"), intern("bar"), intern("baz")]).unwrap() VName::new([intern("foo"), intern("bar"), intern("baz")]).unwrap()
); );
assert_eq!(vpath!(foo::bar::baz), VPath::new([intern("foo"), intern("bar"), intern("baz")])); assert_eq!(vpath!(foo::bar::baz), VPath::new([intern("foo"), intern("bar"), intern("baz")]));
assert_eq!( assert_eq!(
path_slice!(foo::bar::baz), path_slice!(foo::bar::baz),
PathSlice::new(&[intern("foo"), intern("bar"), intern("baz")]) PathSlice::new(&[intern("foo"), intern("bar"), intern("baz")])
); );
} }
} }

View File

@@ -1,131 +1,131 @@
use std::num::IntErrorKind; use std::num::IntErrorKind;
use std::ops::Range; use std::ops::Range;
use num_traits::ToPrimitive;
use ordered_float::NotNan; use ordered_float::NotNan;
use rust_decimal::Decimal; use rust_decimal::Decimal;
use num_traits::ToPrimitive;
use crate::error::{mk_err, OrcErr}; use crate::error::{OrcErr, mk_err};
use crate::intern; use crate::intern;
use crate::location::Pos; use crate::location::Pos;
/// A number, either floating point or unsigned int, parsed by Orchid. /// A number, either floating point or unsigned int, parsed by Orchid.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Numeric { pub enum Numeric {
/// A nonnegative integer /// A nonnegative integer
Uint(u64), Uint(u64),
/// A binary float other than NaN /// A binary float other than NaN
Float(NotNan<f64>), Float(NotNan<f64>),
/// A decimal number /// A decimal number
Decimal(Decimal), Decimal(Decimal),
} }
impl Numeric { impl Numeric {
pub fn decimal(num: i64, scale: u32) -> Self { Self::Decimal(Decimal::new(num, scale)) } pub fn decimal(num: i64, scale: u32) -> Self { Self::Decimal(Decimal::new(num, scale)) }
pub fn float(value: f64) -> Self { Self::Float(NotNan::new(value).unwrap()) } pub fn float(value: f64) -> Self { Self::Float(NotNan::new(value).unwrap()) }
pub fn to_f64(self) -> NotNan<f64> { pub fn to_f64(self) -> NotNan<f64> {
match self { match self {
Self::Float(f) => f, Self::Float(f) => f,
Self::Decimal(d) => { Self::Decimal(d) => {
let f = d.to_f64().expect("This is apparently always possible"); let f = d.to_f64().expect("This is apparently always possible");
NotNan::new(f).expect("decimal was nan") NotNan::new(f).expect("decimal was nan")
}, },
Self::Uint(i) => NotNan::new(i as f64).expect("int cannot be NaN"), Self::Uint(i) => NotNan::new(i as f64).expect("int cannot be NaN"),
} }
} }
} }
/// Rasons why [parse_num] might fail. See [NumError]. /// Rasons why [parse_num] might fail. See [NumError].
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum NumErrorKind { pub enum NumErrorKind {
/// The literal describes [f64::NAN] /// The literal describes [f64::NAN]
NaN, NaN,
/// Some integer appearing in the literal overflows [usize] /// Some integer appearing in the literal overflows [usize]
Overflow, Overflow,
/// A character that isn't a digit in the given base was found /// A character that isn't a digit in the given base was found
InvalidDigit, InvalidDigit,
} }
impl NumErrorKind { impl NumErrorKind {
fn from_int(kind: &IntErrorKind) -> Self { fn from_int(kind: &IntErrorKind) -> Self {
match kind { match kind {
IntErrorKind::InvalidDigit => Self::InvalidDigit, IntErrorKind::InvalidDigit => Self::InvalidDigit,
IntErrorKind::NegOverflow | IntErrorKind::PosOverflow => Self::Overflow, IntErrorKind::NegOverflow | IntErrorKind::PosOverflow => Self::Overflow,
_ => panic!("Impossible error condition"), _ => panic!("Impossible error condition"),
} }
} }
} }
/// Error produced by [parse_num] /// Error produced by [parse_num]
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct NumError { pub struct NumError {
/// Location /// Location
pub range: Range<usize>, pub range: Range<usize>,
/// Reason /// Reason
pub kind: NumErrorKind, pub kind: NumErrorKind,
} }
pub fn num_to_err(NumError { kind, range }: NumError, offset: u32) -> OrcErr { pub fn num_to_err(NumError { kind, range }: NumError, offset: u32) -> OrcErr {
mk_err( mk_err(
intern!(str: "Failed to parse number"), intern!(str: "Failed to parse number"),
match kind { match kind {
NumErrorKind::NaN => "NaN emerged during parsing", NumErrorKind::NaN => "NaN emerged during parsing",
NumErrorKind::InvalidDigit => "non-digit character encountered", NumErrorKind::InvalidDigit => "non-digit character encountered",
NumErrorKind::Overflow => "The number being described is too large or too accurate", NumErrorKind::Overflow => "The number being described is too large or too accurate",
}, },
[Pos::Range(offset + range.start as u32..offset + range.end as u32).into()], [Pos::Range(offset + range.start as u32..offset + range.end as u32).into()],
) )
} }
/// Parse a numbre literal out of text /// Parse a numbre literal out of text
pub fn parse_num(string: &str) -> Result<Numeric, NumError> { pub fn parse_num(string: &str) -> Result<Numeric, NumError> {
let overflow_err = NumError { range: 0..string.len(), kind: NumErrorKind::Overflow }; let overflow_err = NumError { range: 0..string.len(), kind: NumErrorKind::Overflow };
let (radix, noprefix, pos) = (string.strip_prefix("0x").map(|s| (16u8, s, 2))) let (radix, noprefix, pos) = (string.strip_prefix("0x").map(|s| (16u8, s, 2)))
.or_else(|| string.strip_prefix("0b").map(|s| (2u8, s, 2))) .or_else(|| string.strip_prefix("0b").map(|s| (2u8, s, 2)))
.or_else(|| string.strip_prefix("0o").map(|s| (8u8, s, 2))) .or_else(|| string.strip_prefix("0o").map(|s| (8u8, s, 2)))
.unwrap_or((10u8, string, 0)); .unwrap_or((10u8, string, 0));
// identity // identity
let (base, exponent) = match noprefix.split_once('p') { let (base, exponent) = match noprefix.split_once('p') {
Some((b, e)) => { Some((b, e)) => {
let (s, d, len) = e.strip_prefix('-').map_or((1, e, 0), |ue| (-1, ue, 1)); let (s, d, len) = e.strip_prefix('-').map_or((1, e, 0), |ue| (-1, ue, 1));
(b, s * int_parse(d, 10, pos + b.len() + 1 + len)? as i32) (b, s * int_parse(d, 10, pos + b.len() + 1 + len)? as i32)
}, },
None => (noprefix, 0), None => (noprefix, 0),
}; };
match base.split_once('.') { match base.split_once('.') {
None => { None => {
let base_usize = int_parse(base, radix, pos)?; let base_usize = int_parse(base, radix, pos)?;
if let Ok(pos_exp) = u32::try_from(exponent) { if let Ok(pos_exp) = u32::try_from(exponent) {
if let Some(radical) = u64::from(radix).checked_pow(pos_exp) { if let Some(radical) = u64::from(radix).checked_pow(pos_exp) {
let number = base_usize.checked_mul(radical).ok_or(overflow_err)?; let number = base_usize.checked_mul(radical).ok_or(overflow_err)?;
return Ok(Numeric::Uint(number)); return Ok(Numeric::Uint(number));
} }
} }
let f = (base_usize as f64) * (radix as f64).powi(exponent); let f = (base_usize as f64) * (radix as f64).powi(exponent);
let err = NumError { range: 0..string.len(), kind: NumErrorKind::NaN }; let err = NumError { range: 0..string.len(), kind: NumErrorKind::NaN };
Ok(Numeric::Float(NotNan::new(f).map_err(|_| err)?)) Ok(Numeric::Float(NotNan::new(f).map_err(|_| err)?))
}, },
Some((whole, part)) => { Some((whole, part)) => {
let whole_n = int_parse(whole, radix, pos)?; let whole_n = int_parse(whole, radix, pos)?;
let part_n = int_parse(part, radix, pos + whole.len() + 1)?; let part_n = int_parse(part, radix, pos + whole.len() + 1)?;
let scale = part.chars().filter(|c| *c != '_').count() as u32; let scale = part.chars().filter(|c| *c != '_').count() as u32;
if radix == 10 { if radix == 10 {
let mut scaled_unit = Decimal::ONE; let mut scaled_unit = Decimal::ONE;
(scaled_unit.set_scale(scale)) (scaled_unit.set_scale(scale))
.map_err(|_| NumError { range: 0..string.len(), kind: NumErrorKind::Overflow })?; .map_err(|_| NumError { range: 0..string.len(), kind: NumErrorKind::Overflow })?;
Ok(Numeric::Decimal(Decimal::from(whole_n) + scaled_unit * Decimal::from(part_n))) Ok(Numeric::Decimal(Decimal::from(whole_n) + scaled_unit * Decimal::from(part_n)))
} else { } else {
let real_val = whole_n as f64 + (part_n as f64 / (radix as f64).powi(scale as i32)); let real_val = whole_n as f64 + (part_n as f64 / (radix as f64).powi(scale as i32));
let f = real_val * (radix as f64).powi(exponent); let f = real_val * (radix as f64).powi(exponent);
Ok(Numeric::Float(NotNan::new(f).expect("None of the inputs are NaN"))) Ok(Numeric::Float(NotNan::new(f).expect("None of the inputs are NaN")))
} }
}, },
} }
} }
fn int_parse(s: &str, radix: u8, start: usize) -> Result<u64, NumError> { fn int_parse(s: &str, radix: u8, start: usize) -> Result<u64, NumError> {
let s = s.chars().filter(|c| *c != '_').collect::<String>(); let s = s.chars().filter(|c| *c != '_').collect::<String>();
let range = start..(start + s.len()); let range = start..(start + s.len());
u64::from_str_radix(&s, radix as u32) u64::from_str_radix(&s, radix as u32)
.map_err(|e| NumError { range, kind: NumErrorKind::from_int(e.kind()) }) .map_err(|e| NumError { range, kind: NumErrorKind::from_int(e.kind()) })
} }
/// Filter for characters that can appear in numbers /// Filter for characters that can appear in numbers
@@ -136,42 +136,42 @@ pub fn numstart(c: char) -> bool { c.is_ascii_digit() }
/// Print a number as a base-16 floating point literal /// Print a number as a base-16 floating point literal
#[must_use] #[must_use]
pub fn print_nat16(num: NotNan<f64>) -> String { pub fn print_nat16(num: NotNan<f64>) -> String {
if *num == 0.0 { if *num == 0.0 {
return "0x0".to_string(); return "0x0".to_string();
} else if num.is_infinite() { } else if num.is_infinite() {
return match num.is_sign_positive() { return match num.is_sign_positive() {
true => "Infinity".to_string(), true => "Infinity".to_string(),
false => "-Infinity".to_string(), false => "-Infinity".to_string(),
}; };
} else if num.is_nan() { } else if num.is_nan() {
return "NaN".to_string(); return "NaN".to_string();
} }
let exp = num.log(16.0).floor(); let exp = num.log(16.0).floor();
let man = *num / 16_f64.powf(exp); let man = *num / 16_f64.powf(exp);
format!("0x{man}p{exp:.0}") format!("0x{man}p{exp:.0}")
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::{parse_num, Numeric}; use super::{Numeric, parse_num};
#[test] #[test]
fn just_ints() { fn just_ints() {
let test = |s, n| assert_eq!(parse_num(s), Ok(Numeric::Uint(n))); let test = |s, n| assert_eq!(parse_num(s), Ok(Numeric::Uint(n)));
test("12345", 12345); test("12345", 12345);
test("0xcafebabe", 0xcafebabe); test("0xcafebabe", 0xcafebabe);
test("0o751", 0o751); test("0o751", 0o751);
test("0b111000111", 0b111000111); test("0b111000111", 0b111000111);
} }
#[test] #[test]
fn decimals() { fn decimals() {
let test = |s, n| assert_eq!(parse_num(s), Ok(n)); let test = |s, n| assert_eq!(parse_num(s), Ok(n));
test("3.1417", Numeric::decimal(31417, 4)); test("3.1417", Numeric::decimal(31417, 4));
test("0xf.cafe", Numeric::float(0xf as f64 + 0xcafe as f64 / 0x10000 as f64)); test("0xf.cafe", Numeric::float(0xf as f64 + 0xcafe as f64 / 0x10000 as f64));
test("34p3", Numeric::Uint(34000)); test("34p3", Numeric::Uint(34000));
test("0x2p3", Numeric::Uint(0x2 * 0x1000)); test("0x2p3", Numeric::Uint(0x2 * 0x1000));
test("1.5p3", Numeric::decimal(1500, 0)); test("1.5p3", Numeric::decimal(1500, 0));
test("0x2.5p3", Numeric::float((0x25 * 0x100) as f64)); test("0x2.5p3", Numeric::float((0x25 * 0x100) as f64));
} }
} }

View File

@@ -3,8 +3,8 @@ use std::ops::{Deref, Range};
use itertools::Itertools; use itertools::Itertools;
use crate::error::{mk_err, mk_errv, OrcRes, Reporter}; use crate::error::{OrcRes, Reporter, mk_err, mk_errv};
use crate::interner::{intern, Tok}; use crate::interner::{Tok, intern};
use crate::location::Pos; use crate::location::Pos;
use crate::name::VPath; use crate::name::VPath;
use crate::tree::{AtomRepr, ExtraTok, Paren, TokTree, Token}; use crate::tree::{AtomRepr, ExtraTok, Paren, TokTree, Token};
@@ -17,297 +17,299 @@ pub fn unrep_space(c: char) -> bool { c.is_whitespace() && !"\r\n".contains(c) }
#[derive(Debug)] #[derive(Debug)]
pub struct Snippet<'a, 'b, A: AtomRepr, X: ExtraTok> { pub struct Snippet<'a, 'b, A: AtomRepr, X: ExtraTok> {
prev: &'a TokTree<'b, A, X>, prev: &'a TokTree<'b, A, X>,
cur: &'a [TokTree<'b, A, X>], cur: &'a [TokTree<'b, A, X>],
} }
impl<'a, 'b, A: AtomRepr, X: ExtraTok> Snippet<'a, 'b, A, X> { impl<'a, 'b, A: AtomRepr, X: ExtraTok> Snippet<'a, 'b, A, X> {
pub fn new(prev: &'a TokTree<'b, A, X>, cur: &'a [TokTree<'b, A, X>]) -> Self { pub fn new(prev: &'a TokTree<'b, A, X>, cur: &'a [TokTree<'b, A, X>]) -> Self {
Self { prev, cur } Self { prev, cur }
} }
pub fn split_at(self, pos: u32) -> (Self, Self) { pub fn split_at(self, pos: u32) -> (Self, Self) {
let fst = Self { prev: self.prev, cur: &self.cur[..pos as usize] }; let fst = Self { prev: self.prev, cur: &self.cur[..pos as usize] };
let new_prev = if pos == 0 { self.prev } else { &self.cur[pos as usize - 1] }; let new_prev = if pos == 0 { self.prev } else { &self.cur[pos as usize - 1] };
let snd = Self { prev: new_prev, cur: &self.cur[pos as usize..] }; let snd = Self { prev: new_prev, cur: &self.cur[pos as usize..] };
(fst, snd) (fst, snd)
} }
pub fn find_idx(self, mut f: impl FnMut(&Token<'b, A, X>) -> bool) -> Option<u32> { pub fn find_idx(self, mut f: impl FnMut(&Token<'b, A, X>) -> bool) -> Option<u32> {
self.cur.iter().position(|t| f(&t.tok)).map(|t| t as u32) self.cur.iter().position(|t| f(&t.tok)).map(|t| t as u32)
} }
pub fn get(self, idx: u32) -> Option<&'a TokTree<'b, A, X>> { self.cur.get(idx as usize) } pub fn get(self, idx: u32) -> Option<&'a TokTree<'b, A, X>> { self.cur.get(idx as usize) }
pub fn len(self) -> u32 { self.cur.len() as u32 } pub fn len(self) -> u32 { self.cur.len() as u32 }
pub fn prev(self) -> &'a TokTree<'b, A, X> { self.prev } pub fn prev(self) -> &'a TokTree<'b, A, X> { self.prev }
pub fn pos(self) -> Range<u32> { pub fn pos(self) -> Range<u32> {
(self.cur.first().map(|f| f.range.start..self.cur.last().unwrap().range.end)) (self.cur.first().map(|f| f.range.start..self.cur.last().unwrap().range.end))
.unwrap_or(self.prev.range.clone()) .unwrap_or(self.prev.range.clone())
} }
pub fn pop_front(self) -> Option<(&'a TokTree<'b, A, X>, Self)> { pub fn pop_front(self) -> Option<(&'a TokTree<'b, A, X>, Self)> {
self.cur.first().map(|r| (r, self.split_at(1).1)) self.cur.first().map(|r| (r, self.split_at(1).1))
} }
pub fn pop_back(self) -> Option<(Self, &'a TokTree<'b, A, X>)> { pub fn pop_back(self) -> Option<(Self, &'a TokTree<'b, A, X>)> {
self.cur.last().map(|r| (self.split_at(self.len() - 1).0, r)) self.cur.last().map(|r| (self.split_at(self.len() - 1).0, r))
} }
pub fn split_once(self, f: impl FnMut(&Token<'b, A, X>) -> bool) -> Option<(Self, Self)> { pub fn split_once(self, f: impl FnMut(&Token<'b, A, X>) -> bool) -> Option<(Self, Self)> {
let idx = self.find_idx(f)?; let idx = self.find_idx(f)?;
Some((self.split_at(idx).0, self.split_at(idx + 1).1)) Some((self.split_at(idx).0, self.split_at(idx + 1).1))
} }
pub fn split( pub fn split(
mut self, mut self,
mut f: impl FnMut(&Token<'b, A, X>) -> bool, mut f: impl FnMut(&Token<'b, A, X>) -> bool,
) -> impl Iterator<Item = Self> { ) -> impl Iterator<Item = Self> {
iter::from_fn(move || { iter::from_fn(move || {
self.is_empty().then_some(())?; self.is_empty().then_some(())?;
let (ret, next) = self.split_once(&mut f).unwrap_or(self.split_at(self.len())); let (ret, next) = self.split_once(&mut f).unwrap_or(self.split_at(self.len()));
self = next; self = next;
Some(ret) Some(ret)
}) })
} }
pub fn is_empty(self) -> bool { self.len() == 0 } pub fn is_empty(self) -> bool { self.len() == 0 }
pub fn skip_fluff(self) -> Self { pub fn skip_fluff(self) -> Self {
let non_fluff_start = self.find_idx(|t| !matches!(t, Token::NS | Token::Comment(_))); let non_fluff_start = self.find_idx(|t| !matches!(t, Token::NS | Token::Comment(_)));
self.split_at(non_fluff_start.unwrap_or(self.len())).1 self.split_at(non_fluff_start.unwrap_or(self.len())).1
} }
} }
impl<A: AtomRepr, X: ExtraTok> Copy for Snippet<'_, '_, A, X> {} impl<A: AtomRepr, X: ExtraTok> Copy for Snippet<'_, '_, A, X> {}
impl<A: AtomRepr, X: ExtraTok> Clone for Snippet<'_, '_, A, X> { impl<A: AtomRepr, X: ExtraTok> Clone for Snippet<'_, '_, A, X> {
fn clone(&self) -> Self { *self } fn clone(&self) -> Self { *self }
} }
impl<'b, A: AtomRepr, X: ExtraTok> Deref for Snippet<'_, 'b, A, X> { impl<'b, A: AtomRepr, X: ExtraTok> Deref for Snippet<'_, 'b, A, X> {
type Target = [TokTree<'b, A, X>]; type Target = [TokTree<'b, A, X>];
fn deref(&self) -> &Self::Target { self.cur } fn deref(&self) -> &Self::Target { self.cur }
} }
/// Remove tokens that aren't meaningful in expression context, such as comments /// Remove tokens that aren't meaningful in expression context, such as comments
/// or line breaks /// or line breaks
pub fn strip_fluff<'a, A: AtomRepr, X: ExtraTok>( pub fn strip_fluff<'a, A: AtomRepr, X: ExtraTok>(
tt: &TokTree<'a, A, X>, tt: &TokTree<'a, A, X>,
) -> Option<TokTree<'a, A, X>> { ) -> Option<TokTree<'a, A, X>> {
let tok = match &tt.tok { let tok = match &tt.tok {
Token::BR => return None, Token::BR => return None,
Token::Comment(_) => return None, Token::Comment(_) => return None,
Token::LambdaHead(arg) => Token::LambdaHead(arg.iter().filter_map(strip_fluff).collect()), Token::LambdaHead(arg) => Token::LambdaHead(arg.iter().filter_map(strip_fluff).collect()),
Token::S(p, b) => Token::S(*p, b.iter().filter_map(strip_fluff).collect()), Token::S(p, b) => Token::S(*p, b.iter().filter_map(strip_fluff).collect()),
t => t.clone(), t => t.clone(),
}; };
Some(TokTree { tok, range: tt.range.clone() }) Some(TokTree { tok, range: tt.range.clone() })
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct Comment { pub struct Comment {
pub text: Tok<String>, pub text: Tok<String>,
pub pos: Pos, pub pos: Pos,
} }
impl Comment { impl Comment {
pub fn to_api(&self) -> api::Comment { pub fn to_api(&self) -> api::Comment {
api::Comment { location: self.pos.to_api(), text: self.text.to_api() } api::Comment { location: self.pos.to_api(), text: self.text.to_api() }
} }
pub fn from_api(api: &api::Comment) -> Self { pub fn from_api(api: &api::Comment) -> Self {
Self { pos: Pos::from_api(&api.location), text: Tok::from_api(api.text) } Self { pos: Pos::from_api(&api.location), text: Tok::from_api(api.text) }
} }
} }
pub fn line_items<'a, 'b, A: AtomRepr, X: ExtraTok>( pub fn line_items<'a, 'b, A: AtomRepr, X: ExtraTok>(
snip: Snippet<'a, 'b, A, X>, snip: Snippet<'a, 'b, A, X>,
) -> Vec<Parsed<'a, 'b, Vec<Comment>, A, X>> { ) -> Vec<Parsed<'a, 'b, Vec<Comment>, A, X>> {
let mut items = Vec::new(); let mut items = Vec::new();
let mut comments = Vec::new(); let mut comments = Vec::new();
for mut line in snip.split(|t| matches!(t, Token::BR)) { for mut line in snip.split(|t| matches!(t, Token::BR)) {
match &line.cur { match &line.cur {
[TokTree { tok: Token::S(Paren::Round, tokens), .. }] => line.cur = tokens, [TokTree { tok: Token::S(Paren::Round, tokens), .. }] => line.cur = tokens,
[] => continue, [] => continue,
_ => (), _ => (),
} }
match line.find_idx(|t| !matches!(t, Token::Comment(_))) { match line.find_idx(|t| !matches!(t, Token::Comment(_))) {
None => comments.extend(line.cur), None => comments.extend(line.cur),
Some(i) => { Some(i) => {
let (cmts, tail) = line.split_at(i); let (cmts, tail) = line.split_at(i);
let comments = Vec::from_iter(comments.drain(..).chain(cmts.cur).map(|t| match &t.tok { let comments = Vec::from_iter(comments.drain(..).chain(cmts.cur).map(|t| match &t.tok {
Token::Comment(c) => Comment { text: intern(&**c), pos: Pos::Range(t.range.clone()) }, Token::Comment(c) => Comment { text: intern(&**c), pos: Pos::Range(t.range.clone()) },
_ => unreachable!("All are comments checked above"), _ => unreachable!("All are comments checked above"),
})); }));
items.push(Parsed { output: comments, tail }); items.push(Parsed { output: comments, tail });
}, },
} }
} }
items items
} }
pub fn try_pop_no_fluff<'a, 'b, A: AtomRepr, X: ExtraTok>( pub fn try_pop_no_fluff<'a, 'b, A: AtomRepr, X: ExtraTok>(
snip: Snippet<'a, 'b, A, X>, snip: Snippet<'a, 'b, A, X>,
) -> ParseRes<'a, 'b, &'a TokTree<'b, A, X>, A, X> { ) -> ParseRes<'a, 'b, &'a TokTree<'b, A, X>, A, X> {
snip.skip_fluff().pop_front().map(|(output, tail)| Parsed { output, tail }).ok_or_else(|| { snip.skip_fluff().pop_front().map(|(output, tail)| Parsed { output, tail }).ok_or_else(|| {
mk_errv( mk_errv(
intern!(str: "Unexpected end"), intern!(str: "Unexpected end"),
"Pattern ends abruptly", "Pattern ends abruptly",
[Pos::Range(snip.pos()).into()], [Pos::Range(snip.pos()).into()],
) )
}) })
} }
pub fn expect_end(snip: Snippet<'_, '_, impl AtomRepr, impl ExtraTok>) -> OrcRes<()> { pub fn expect_end(snip: Snippet<'_, '_, impl AtomRepr, impl ExtraTok>) -> OrcRes<()> {
match snip.skip_fluff().get(0) { match snip.skip_fluff().get(0) {
Some(surplus) => Err(mk_errv( Some(surplus) => Err(mk_errv(
intern!(str: "Extra code after end of line"), intern!(str: "Extra code after end of line"),
"Code found after the end of the line", "Code found after the end of the line",
[Pos::Range(surplus.range.clone()).into()], [Pos::Range(surplus.range.clone()).into()],
)), )),
None => Ok(()), None => Ok(()),
} }
} }
pub fn expect_tok<'a, 'b, A: AtomRepr, X: ExtraTok>( pub fn expect_tok<'a, 'b, A: AtomRepr, X: ExtraTok>(
snip: Snippet<'a, 'b, A, X>, snip: Snippet<'a, 'b, A, X>,
tok: Tok<String>, tok: Tok<String>,
) -> ParseRes<'a, 'b, (), A, X> { ) -> ParseRes<'a, 'b, (), A, X> {
let Parsed { output: head, tail } = try_pop_no_fluff(snip)?; let Parsed { output: head, tail } = try_pop_no_fluff(snip)?;
match &head.tok { match &head.tok {
Token::Name(n) if *n == tok => Ok(Parsed { output: (), tail }), Token::Name(n) if *n == tok => Ok(Parsed { output: (), tail }),
t => Err(mk_errv( t => Err(mk_errv(
intern!(str: "Expected specific keyword"), intern!(str: "Expected specific keyword"),
format!("Expected {tok} but found {t}"), format!("Expected {tok} but found {t}"),
[Pos::Range(head.range.clone()).into()], [Pos::Range(head.range.clone()).into()],
)), )),
} }
} }
pub struct Parsed<'a, 'b, T, A: AtomRepr, X: ExtraTok> { pub struct Parsed<'a, 'b, T, A: AtomRepr, X: ExtraTok> {
pub output: T, pub output: T,
pub tail: Snippet<'a, 'b, A, X>, pub tail: Snippet<'a, 'b, A, X>,
} }
pub type ParseRes<'a, 'b, T, A, X> = OrcRes<Parsed<'a, 'b, T, A, X>>; pub type ParseRes<'a, 'b, T, A, X> = OrcRes<Parsed<'a, 'b, T, A, X>>;
pub fn parse_multiname<'a, 'b, A: AtomRepr, X: ExtraTok>( pub fn parse_multiname<'a, 'b, A: AtomRepr, X: ExtraTok>(
ctx: &impl Reporter, ctx: &impl Reporter,
tail: Snippet<'a, 'b, A, X>, tail: Snippet<'a, 'b, A, X>,
) -> ParseRes<'a, 'b, Vec<(Import, Pos)>, A, X> { ) -> ParseRes<'a, 'b, Vec<(Import, Pos)>, A, X> {
let ret = rec(ctx, tail); let ret = rec(ctx, tail);
#[allow(clippy::type_complexity)] // it's an internal function #[allow(clippy::type_complexity)] // it's an internal function
pub fn rec<'a, 'b, A: AtomRepr, X: ExtraTok>( pub fn rec<'a, 'b, A: AtomRepr, X: ExtraTok>(
ctx: &impl Reporter, ctx: &impl Reporter,
tail: Snippet<'a, 'b, A, X>, tail: Snippet<'a, 'b, A, X>,
) -> ParseRes<'a, 'b, Vec<(Vec<Tok<String>>, Option<Tok<String>>, Pos)>, A, X> { ) -> ParseRes<'a, 'b, Vec<(Vec<Tok<String>>, Option<Tok<String>>, Pos)>, A, X> {
let comma = intern!(str: ","); let comma = intern!(str: ",");
let globstar = intern!(str: "*"); let globstar = intern!(str: "*");
let (name, tail) = tail.skip_fluff().pop_front().ok_or_else(|| { let (name, tail) = tail.skip_fluff().pop_front().ok_or_else(|| {
mk_err(intern!(str: "Expected name"), "Expected a name, a list of names, or a globstar.", [ mk_err(intern!(str: "Expected name"), "Expected a name, a list of names, or a globstar.", [
Pos::Range(tail.pos()).into(), Pos::Range(tail.pos()).into(),
]) ])
})?; })?;
if let Some((Token::NS, tail)) = tail.skip_fluff().pop_front().map(|(tt, s)| (&tt.tok, s)) { if let Some((Token::NS, tail)) = tail.skip_fluff().pop_front().map(|(tt, s)| (&tt.tok, s)) {
let n = match &name.tok { let n = match &name.tok {
Token::Name(n) if n.starts_with(name_start) => Ok(n), Token::Name(n) if n.starts_with(name_start) => Ok(n),
_ => Err(mk_err(intern!(str: "Unexpected name prefix"), "Only names can precede ::", [ _ => Err(mk_err(intern!(str: "Unexpected name prefix"), "Only names can precede ::", [
Pos::Range(name.range.clone()).into(), Pos::Range(name.range.clone()).into(),
])), ])),
}; };
match (rec(ctx, tail), n) { match (rec(ctx, tail), n) {
(Err(ev), n) => Err(ev.extended(n.err())), (Err(ev), n) => Err(ev.extended(n.err())),
(Ok(Parsed { tail, .. }), Err(e)) => { (Ok(Parsed { tail, .. }), Err(e)) => {
ctx.report(e); ctx.report(e);
Ok(Parsed { output: vec![], tail }) Ok(Parsed { output: vec![], tail })
}, },
(Ok(Parsed { tail, output }), Ok(pre)) => Ok(Parsed { (Ok(Parsed { tail, output }), Ok(pre)) => Ok(Parsed {
output: output.into_iter().update(|i| i.0.push(pre.clone())).collect_vec(), output: output.into_iter().update(|i| i.0.push(pre.clone())).collect_vec(),
tail, tail,
}), }),
} }
} else { } else {
let output = match &name.tok { let output = match &name.tok {
Token::Name(ntok) => { Token::Name(ntok) => {
let nopt = match ntok { let nopt = match ntok {
n if *n == globstar => None, n if *n == globstar => None,
n if n.starts_with(op_char) => n if n.starts_with(op_char) => {
return Err(mk_errv( return Err(mk_errv(
intern!(str: "Unescaped operator in multiname"), intern!(str: "Unescaped operator in multiname"),
"Operators in multinames should be enclosed in []", "Operators in multinames should be enclosed in []",
[Pos::Range(name.range.clone()).into()], [Pos::Range(name.range.clone()).into()],
)), ));
n => Some(n.clone()), },
}; n => Some(n.clone()),
vec![(vec![], nopt, Pos::Range(name.range.clone()))] };
}, vec![(vec![], nopt, Pos::Range(name.range.clone()))]
Token::S(Paren::Square, b) => { },
let mut ok = Vec::new(); Token::S(Paren::Square, b) => {
b.iter().for_each(|tt| match &tt.tok { let mut ok = Vec::new();
Token::Name(n) if n.starts_with(op_char) => b.iter().for_each(|tt| match &tt.tok {
ok.push((vec![], Some(n.clone()), Pos::Range(tt.range.clone()))), Token::Name(n) if n.starts_with(op_char) =>
Token::BR | Token::Comment(_) => (), ok.push((vec![], Some(n.clone()), Pos::Range(tt.range.clone()))),
_ => ctx.report(mk_err( Token::BR | Token::Comment(_) => (),
intern!(str: "Non-operator in escapement in multiname"), _ => ctx.report(mk_err(
"In multinames, [] functions as a literal name list reserved for operators", intern!(str: "Non-operator in escapement in multiname"),
[Pos::Range(name.range.clone()).into()], "In multinames, [] functions as a literal name list reserved for operators",
)), [Pos::Range(name.range.clone()).into()],
}); )),
ok });
}, ok
Token::S(Paren::Round, b) => { },
let mut ok = Vec::new(); Token::S(Paren::Round, b) => {
let body = Snippet::new(name, b); let mut ok = Vec::new();
for csent in body.split(|n| matches!(n, Token::Name(n) if *n == comma)) { let body = Snippet::new(name, b);
match rec(ctx, csent) { for csent in body.split(|n| matches!(n, Token::Name(n) if *n == comma)) {
Err(e) => ctx.report(e), match rec(ctx, csent) {
Ok(Parsed { output, tail }) => match tail.get(0) { Err(e) => ctx.report(e),
None => ok.extend(output), Ok(Parsed { output, tail }) => match tail.get(0) {
Some(t) => ctx.report(mk_err( None => ok.extend(output),
intern!(str: "Unexpected token in multiname group"), Some(t) => ctx.report(mk_err(
"Unexpected token. Likely missing a :: or , or wanted [] instead of ()", intern!(str: "Unexpected token in multiname group"),
[Pos::Range(t.range.clone()).into()], "Unexpected token. Likely missing a :: or , or wanted [] instead of ()",
)), [Pos::Range(t.range.clone()).into()],
}, )),
} },
} }
ok }
}, ok
t => },
return Err(mk_errv( t => {
intern!(str: "Unrecognized name end"), return Err(mk_errv(
format!("Names cannot end with {t} tokens"), intern!(str: "Unrecognized name end"),
[Pos::Range(name.range.clone()).into()], format!("Names cannot end with {t} tokens"),
)), [Pos::Range(name.range.clone()).into()],
}; ));
Ok(Parsed { output, tail }) },
} };
} Ok(Parsed { output, tail })
ret.map(|Parsed { output, tail }| { }
let output = (output.into_iter()) }
.map(|(p, name, pos)| (Import { path: VPath::new(p.into_iter().rev()), name }, pos)) ret.map(|Parsed { output, tail }| {
.collect_vec(); let output = (output.into_iter())
Parsed { output, tail } .map(|(p, name, pos)| (Import { path: VPath::new(p.into_iter().rev()), name }, pos))
}) .collect_vec();
Parsed { output, tail }
})
} }
/// A compound name, possibly ending with a globstar /// A compound name, possibly ending with a globstar
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Import { pub struct Import {
pub path: VPath, pub path: VPath,
pub name: Option<Tok<String>>, pub name: Option<Tok<String>>,
} }
impl Import { impl Import {
// pub fn from_api(i: api::CompName) -> Self { // pub fn from_api(i: api::CompName) -> Self {
// Self { path: VPath::new(i.path.into_iter().map(deintern)), name: i.name.map(deintern) } // Self { path: VPath::new(i.path.into_iter().map(deintern)), name:
// } // i.name.map(deintern) } }
// pub fn to_api(&self) -> api::CompName { // pub fn to_api(&self) -> api::CompName {
// api::CompName { // api::CompName {
// path: self.path.iter().map(|t| t.marker()).collect(), // path: self.path.iter().map(|t| t.marker()).collect(),
// name: self.name.as_ref().map(|t| t.marker()), // name: self.name.as_ref().map(|t| t.marker()),
// } // }
// } // }
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use never::Never; use never::Never;
use super::Snippet; use super::Snippet;
fn _covary_snip_a<'a, 'b>( fn _covary_snip_a<'a, 'b>(
x: Snippet<'static, 'b, Never, Never>, x: Snippet<'static, 'b, Never, Never>,
) -> Snippet<'a, 'b, Never, Never> { ) -> Snippet<'a, 'b, Never, Never> {
x x
} }
fn _covary_snip_b<'a, 'b>( fn _covary_snip_b<'a, 'b>(
x: Snippet<'a, 'static, Never, Never>, x: Snippet<'a, 'static, Never, Never>,
) -> Snippet<'a, 'b, Never, Never> { ) -> Snippet<'a, 'b, Never, Never> {
x x
} }
} }

View File

@@ -7,7 +7,7 @@ use std::iter;
/// Create a new vector consisting of the provided vector with the /// Create a new vector consisting of the provided vector with the
/// element appended. See [pushed_ref] to use it with a slice /// element appended. See [pushed_ref] to use it with a slice
pub fn pushed<I: IntoIterator, C: FromIterator<I::Item>>(vec: I, t: I::Item) -> C { pub fn pushed<I: IntoIterator, C: FromIterator<I::Item>>(vec: I, t: I::Item) -> C {
vec.into_iter().chain(iter::once(t)).collect() vec.into_iter().chain(iter::once(t)).collect()
} }
/// Pure version of [Vec::push] /// Pure version of [Vec::push]
@@ -15,21 +15,21 @@ pub fn pushed<I: IntoIterator, C: FromIterator<I::Item>>(vec: I, t: I::Item) ->
/// Create a new vector consisting of the provided slice with the /// Create a new vector consisting of the provided slice with the
/// element appended. See [pushed] for the owned version /// element appended. See [pushed] for the owned version
pub fn pushed_ref<'a, T: Clone + 'a, C: FromIterator<T>>( pub fn pushed_ref<'a, T: Clone + 'a, C: FromIterator<T>>(
vec: impl IntoIterator<Item = &'a T>, vec: impl IntoIterator<Item = &'a T>,
t: T, t: T,
) -> C { ) -> C {
vec.into_iter().cloned().chain(iter::once(t)).collect() vec.into_iter().cloned().chain(iter::once(t)).collect()
} }
/// Push an element on the adhoc stack, pass it to the callback, then pop the /// Push an element on the adhoc stack, pass it to the callback, then pop the
/// element out again. /// element out again.
pub fn with_pushed<T, U>( pub fn with_pushed<T, U>(
vec: &mut Vec<T>, vec: &mut Vec<T>,
item: T, item: T,
cb: impl for<'a> FnOnce(&'a mut Vec<T>) -> U, cb: impl for<'a> FnOnce(&'a mut Vec<T>) -> U,
) -> (T, U) { ) -> (T, U) {
vec.push(item); vec.push(item);
let out = cb(vec); let out = cb(vec);
let item = vec.pop().expect("top element stolen by callback"); let item = vec.pop().expect("top element stolen by callback");
(item, out) (item, out)
} }

View File

@@ -3,84 +3,84 @@ use std::cell::RefCell;
use std::marker::PhantomData; use std::marker::PhantomData;
use std::ops::{BitAnd, Deref}; use std::ops::{BitAnd, Deref};
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::mpsc::{sync_channel, SyncSender}; use std::sync::mpsc::{SyncSender, sync_channel};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::{mem, thread}; use std::{mem, thread};
use derive_destructure::destructure; use derive_destructure::destructure;
use dyn_clone::{clone_box, DynClone}; use dyn_clone::{DynClone, clone_box};
use hashbrown::HashMap; use hashbrown::HashMap;
use orchid_api_traits::{Channel, Coding, Decode, Encode, MsgSet, Request}; use orchid_api_traits::{Channel, Coding, Decode, Encode, MsgSet, Request};
use trait_set::trait_set; use trait_set::trait_set;
pub struct Receipt; pub struct Receipt;
impl Receipt { impl Receipt {
pub fn off_thread(name: String, cb: impl FnOnce() -> Self + Send + 'static) -> Self { pub fn off_thread(name: String, cb: impl FnOnce() -> Self + Send + 'static) -> Self {
thread::Builder::new().name(name).spawn(cb).unwrap(); thread::Builder::new().name(name).spawn(cb).unwrap();
Self Self
} }
} }
trait_set! { trait_set! {
pub trait SendFn<T: MsgSet> = for<'a> FnMut(&'a [u8], ReqNot<T>) + DynClone + Send + 'static; pub trait SendFn<T: MsgSet> = for<'a> FnMut(&'a [u8], ReqNot<T>) + DynClone + Send + 'static;
pub trait ReqFn<T: MsgSet> = pub trait ReqFn<T: MsgSet> =
FnMut(RequestHandle<T>, <T::In as Channel>::Req) -> Receipt + DynClone + Send + Sync + 'static; FnMut(RequestHandle<T>, <T::In as Channel>::Req) -> Receipt + DynClone + Send + Sync + 'static;
pub trait NotifFn<T: MsgSet> = pub trait NotifFn<T: MsgSet> =
for<'a> FnMut(<T::In as Channel>::Notif, ReqNot<T>) + DynClone + Send + Sync + 'static; for<'a> FnMut(<T::In as Channel>::Notif, ReqNot<T>) + DynClone + Send + Sync + 'static;
} }
fn get_id(message: &[u8]) -> (u64, &[u8]) { fn get_id(message: &[u8]) -> (u64, &[u8]) {
(u64::from_be_bytes(message[..8].to_vec().try_into().unwrap()), &message[8..]) (u64::from_be_bytes(message[..8].to_vec().try_into().unwrap()), &message[8..])
} }
pub trait ReqHandlish { pub trait ReqHandlish {
fn defer_drop(&self, val: impl Any + 'static); fn defer_drop(&self, val: impl Any + 'static);
} }
#[derive(destructure)] #[derive(destructure)]
pub struct RequestHandle<MS: MsgSet> { pub struct RequestHandle<MS: MsgSet> {
defer_drop: RefCell<Vec<Box<dyn Any>>>, defer_drop: RefCell<Vec<Box<dyn Any>>>,
fulfilled: AtomicBool, fulfilled: AtomicBool,
id: u64, id: u64,
parent: ReqNot<MS>, parent: ReqNot<MS>,
} }
impl<MS: MsgSet + 'static> RequestHandle<MS> { impl<MS: MsgSet + 'static> RequestHandle<MS> {
fn new(parent: ReqNot<MS>, id: u64) -> Self { fn new(parent: ReqNot<MS>, id: u64) -> Self {
Self { defer_drop: RefCell::default(), fulfilled: false.into(), parent, id } Self { defer_drop: RefCell::default(), fulfilled: false.into(), parent, id }
} }
pub fn reqnot(&self) -> ReqNot<MS> { self.parent.clone() } pub fn reqnot(&self) -> ReqNot<MS> { self.parent.clone() }
pub fn handle<U: Request>(&self, _: &U, rep: &U::Response) -> Receipt { self.respond(rep) } pub fn handle<U: Request>(&self, _: &U, rep: &U::Response) -> Receipt { self.respond(rep) }
pub fn will_handle_as<U: Request>(&self, _: &U) -> ReqTypToken<U> { ReqTypToken(PhantomData) } pub fn will_handle_as<U: Request>(&self, _: &U) -> ReqTypToken<U> { ReqTypToken(PhantomData) }
pub fn handle_as<U: Request>(&self, _: ReqTypToken<U>, rep: &U::Response) -> Receipt { pub fn handle_as<U: Request>(&self, _: ReqTypToken<U>, rep: &U::Response) -> Receipt {
self.respond(rep) self.respond(rep)
} }
pub fn respond(&self, response: &impl Encode) -> Receipt { pub fn respond(&self, response: &impl Encode) -> Receipt {
assert!(!self.fulfilled.swap(true, Ordering::Relaxed), "Already responded to {}", self.id); assert!(!self.fulfilled.swap(true, Ordering::Relaxed), "Already responded to {}", self.id);
let mut buf = (!self.id).to_be_bytes().to_vec(); let mut buf = (!self.id).to_be_bytes().to_vec();
response.encode(&mut buf); response.encode(&mut buf);
let mut send = clone_box(&*self.reqnot().0.lock().unwrap().send); let mut send = clone_box(&*self.reqnot().0.lock().unwrap().send);
(send)(&buf, self.parent.clone()); (send)(&buf, self.parent.clone());
Receipt Receipt
} }
} }
impl<MS: MsgSet> ReqHandlish for RequestHandle<MS> { impl<MS: MsgSet> ReqHandlish for RequestHandle<MS> {
fn defer_drop(&self, val: impl Any) { self.defer_drop.borrow_mut().push(Box::new(val)) } fn defer_drop(&self, val: impl Any) { self.defer_drop.borrow_mut().push(Box::new(val)) }
} }
impl<MS: MsgSet> Drop for RequestHandle<MS> { impl<MS: MsgSet> Drop for RequestHandle<MS> {
fn drop(&mut self) { fn drop(&mut self) {
let done = self.fulfilled.load(Ordering::Relaxed); let done = self.fulfilled.load(Ordering::Relaxed);
debug_assert!(done, "Request {} dropped without response", self.id) debug_assert!(done, "Request {} dropped without response", self.id)
} }
} }
pub struct ReqTypToken<T>(PhantomData<T>); pub struct ReqTypToken<T>(PhantomData<T>);
pub struct ReqNotData<T: MsgSet> { pub struct ReqNotData<T: MsgSet> {
id: u64, id: u64,
send: Box<dyn SendFn<T>>, send: Box<dyn SendFn<T>>,
notif: Box<dyn NotifFn<T>>, notif: Box<dyn NotifFn<T>>,
req: Box<dyn ReqFn<T>>, req: Box<dyn ReqFn<T>>,
responses: HashMap<u64, SyncSender<Vec<u8>>>, responses: HashMap<u64, SyncSender<Vec<u8>>>,
} }
/// Wraps a raw message buffer to save on copying. /// Wraps a raw message buffer to save on copying.
@@ -88,180 +88,180 @@ pub struct ReqNotData<T: MsgSet> {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct RawReply(Vec<u8>); pub struct RawReply(Vec<u8>);
impl Deref for RawReply { impl Deref for RawReply {
type Target = [u8]; type Target = [u8];
fn deref(&self) -> &Self::Target { get_id(&self.0[..]).1 } fn deref(&self) -> &Self::Target { get_id(&self.0[..]).1 }
} }
pub struct ReqNot<T: MsgSet>(Arc<Mutex<ReqNotData<T>>>); pub struct ReqNot<T: MsgSet>(Arc<Mutex<ReqNotData<T>>>);
impl<T: MsgSet> ReqNot<T> { impl<T: MsgSet> ReqNot<T> {
pub fn new(send: impl SendFn<T>, notif: impl NotifFn<T>, req: impl ReqFn<T>) -> Self { pub fn new(send: impl SendFn<T>, notif: impl NotifFn<T>, req: impl ReqFn<T>) -> Self {
Self(Arc::new(Mutex::new(ReqNotData { Self(Arc::new(Mutex::new(ReqNotData {
id: 1, id: 1,
send: Box::new(send), send: Box::new(send),
notif: Box::new(notif), notif: Box::new(notif),
req: Box::new(req), req: Box::new(req),
responses: HashMap::new(), responses: HashMap::new(),
}))) })))
} }
/// Can be called from a polling thread or dispatched in any other way /// Can be called from a polling thread or dispatched in any other way
pub fn receive(&self, message: &[u8]) { pub fn receive(&self, message: &[u8]) {
let mut g = self.0.lock().unwrap(); let mut g = self.0.lock().unwrap();
let (id, payload) = get_id(message); let (id, payload) = get_id(message);
if id == 0 { if id == 0 {
let mut notif = clone_box(&*g.notif); let mut notif = clone_box(&*g.notif);
mem::drop(g); mem::drop(g);
notif(<T::In as Channel>::Notif::decode(&mut &payload[..]), self.clone()) notif(<T::In as Channel>::Notif::decode(&mut &payload[..]), self.clone())
} else if 0 < id.bitand(1 << 63) { } else if 0 < id.bitand(1 << 63) {
let sender = g.responses.remove(&!id).expect("Received response for invalid message"); let sender = g.responses.remove(&!id).expect("Received response for invalid message");
sender.send(message.to_vec()).unwrap(); sender.send(message.to_vec()).unwrap();
} else { } else {
let message = <T::In as Channel>::Req::decode(&mut &payload[..]); let message = <T::In as Channel>::Req::decode(&mut &payload[..]);
let mut req = clone_box(&*g.req); let mut req = clone_box(&*g.req);
mem::drop(g); mem::drop(g);
let rn = self.clone(); let rn = self.clone();
thread::Builder::new() thread::Builder::new()
.name(format!("request {id}")) .name(format!("request {id}"))
.spawn(move || req(RequestHandle::new(rn, id), message)) .spawn(move || req(RequestHandle::new(rn, id), message))
.unwrap(); .unwrap();
} }
} }
pub fn notify<N: Coding + Into<<T::Out as Channel>::Notif>>(&self, notif: N) { pub fn notify<N: Coding + Into<<T::Out as Channel>::Notif>>(&self, notif: N) {
let mut send = clone_box(&*self.0.lock().unwrap().send); let mut send = clone_box(&*self.0.lock().unwrap().send);
let mut buf = vec![0; 8]; let mut buf = vec![0; 8];
let msg: <T::Out as Channel>::Notif = notif.into(); let msg: <T::Out as Channel>::Notif = notif.into();
msg.encode(&mut buf); msg.encode(&mut buf);
send(&buf, self.clone()) send(&buf, self.clone())
} }
} }
pub trait DynRequester: Send + Sync { pub trait DynRequester: Send + Sync {
type Transfer; type Transfer;
/// Encode and send a request, then receive the response buffer. /// Encode and send a request, then receive the response buffer.
fn raw_request(&self, data: Self::Transfer) -> RawReply; fn raw_request(&self, data: Self::Transfer) -> RawReply;
} }
pub struct MappedRequester<'a, T>(Box<dyn Fn(T) -> RawReply + Send + Sync + 'a>); pub struct MappedRequester<'a, T>(Box<dyn Fn(T) -> RawReply + Send + Sync + 'a>);
impl<'a, T> MappedRequester<'a, T> { impl<'a, T> MappedRequester<'a, T> {
fn new<U: DynRequester + 'a>(req: U) -> Self fn new<U: DynRequester + 'a>(req: U) -> Self
where T: Into<U::Transfer> { where T: Into<U::Transfer> {
MappedRequester(Box::new(move |t| req.raw_request(t.into()))) MappedRequester(Box::new(move |t| req.raw_request(t.into())))
} }
} }
impl<T> DynRequester for MappedRequester<'_, T> { impl<T> DynRequester for MappedRequester<'_, T> {
type Transfer = T; type Transfer = T;
fn raw_request(&self, data: Self::Transfer) -> RawReply { self.0(data) } fn raw_request(&self, data: Self::Transfer) -> RawReply { self.0(data) }
} }
impl<T: MsgSet> DynRequester for ReqNot<T> { impl<T: MsgSet> DynRequester for ReqNot<T> {
type Transfer = <T::Out as Channel>::Req; type Transfer = <T::Out as Channel>::Req;
fn raw_request(&self, req: Self::Transfer) -> RawReply { fn raw_request(&self, req: Self::Transfer) -> RawReply {
let mut g = self.0.lock().unwrap(); let mut g = self.0.lock().unwrap();
let id = g.id; let id = g.id;
g.id += 1; g.id += 1;
let mut buf = id.to_be_bytes().to_vec(); let mut buf = id.to_be_bytes().to_vec();
req.encode(&mut buf); req.encode(&mut buf);
let (send, recv) = sync_channel(1); let (send, recv) = sync_channel(1);
g.responses.insert(id, send); g.responses.insert(id, send);
let mut send = clone_box(&*g.send); let mut send = clone_box(&*g.send);
mem::drop(g); mem::drop(g);
send(&buf, self.clone()); send(&buf, self.clone());
RawReply(recv.recv().unwrap()) RawReply(recv.recv().unwrap())
} }
} }
pub trait Requester: DynRequester { pub trait Requester: DynRequester {
#[must_use = "These types are subject to change with protocol versions. \ #[must_use = "These types are subject to change with protocol versions. \
If you don't want to use the return value, At a minimum, force the type."] If you don't want to use the return value, At a minimum, force the type."]
fn request<R: Request + Into<Self::Transfer>>(&self, data: R) -> R::Response; fn request<R: Request + Into<Self::Transfer>>(&self, data: R) -> R::Response;
fn map<'a, U: Into<Self::Transfer>>(self) -> MappedRequester<'a, U> fn map<'a, U: Into<Self::Transfer>>(self) -> MappedRequester<'a, U>
where Self: Sized + 'a { where Self: Sized + 'a {
MappedRequester::new(self) MappedRequester::new(self)
} }
} }
impl<This: DynRequester + ?Sized> Requester for This { impl<This: DynRequester + ?Sized> Requester for This {
fn request<R: Request + Into<Self::Transfer>>(&self, data: R) -> R::Response { fn request<R: Request + Into<Self::Transfer>>(&self, data: R) -> R::Response {
R::Response::decode(&mut &self.raw_request(data.into())[..]) R::Response::decode(&mut &self.raw_request(data.into())[..])
} }
} }
impl<T: MsgSet> Clone for ReqNot<T> { impl<T: MsgSet> Clone for ReqNot<T> {
fn clone(&self) -> Self { Self(self.0.clone()) } fn clone(&self) -> Self { Self(self.0.clone()) }
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use orchid_api_derive::Coding; use orchid_api_derive::Coding;
use orchid_api_traits::{Channel, Request}; use orchid_api_traits::{Channel, Request};
use super::{MsgSet, ReqNot}; use super::{MsgSet, ReqNot};
use crate::clone; use crate::clone;
use crate::reqnot::Requester as _; use crate::reqnot::Requester as _;
#[derive(Clone, Debug, Coding, PartialEq)] #[derive(Clone, Debug, Coding, PartialEq)]
pub struct TestReq(u8); pub struct TestReq(u8);
impl Request for TestReq { impl Request for TestReq {
type Response = u8; type Response = u8;
} }
pub struct TestChan; pub struct TestChan;
impl Channel for TestChan { impl Channel for TestChan {
type Notif = u8; type Notif = u8;
type Req = TestReq; type Req = TestReq;
} }
pub struct TestMsgSet; pub struct TestMsgSet;
impl MsgSet for TestMsgSet { impl MsgSet for TestMsgSet {
type In = TestChan; type In = TestChan;
type Out = TestChan; type Out = TestChan;
} }
#[test] #[test]
fn notification() { fn notification() {
let received = Arc::new(Mutex::new(None)); let received = Arc::new(Mutex::new(None));
let receiver = ReqNot::<TestMsgSet>::new( let receiver = ReqNot::<TestMsgSet>::new(
|_, _| panic!("Should not send anything"), |_, _| panic!("Should not send anything"),
clone!(received; move |notif, _| *received.lock().unwrap() = Some(notif)), clone!(received; move |notif, _| *received.lock().unwrap() = Some(notif)),
|_, _| panic!("Not receiving a request"), |_, _| panic!("Not receiving a request"),
); );
let sender = ReqNot::<TestMsgSet>::new( let sender = ReqNot::<TestMsgSet>::new(
clone!(receiver; move |d, _| receiver.receive(d)), clone!(receiver; move |d, _| receiver.receive(d)),
|_, _| panic!("Should not receive notif"), |_, _| panic!("Should not receive notif"),
|_, _| panic!("Should not receive request"), |_, _| panic!("Should not receive request"),
); );
sender.notify(3); sender.notify(3);
assert_eq!(*received.lock().unwrap(), Some(3)); assert_eq!(*received.lock().unwrap(), Some(3));
sender.notify(4); sender.notify(4);
assert_eq!(*received.lock().unwrap(), Some(4)); assert_eq!(*received.lock().unwrap(), Some(4));
} }
#[test] #[test]
fn request() { fn request() {
let receiver = Arc::new(Mutex::<Option<ReqNot<TestMsgSet>>>::new(None)); let receiver = Arc::new(Mutex::<Option<ReqNot<TestMsgSet>>>::new(None));
let sender = Arc::new(ReqNot::<TestMsgSet>::new( let sender = Arc::new(ReqNot::<TestMsgSet>::new(
{ {
let receiver = receiver.clone(); let receiver = receiver.clone();
move |d, _| receiver.lock().unwrap().as_ref().unwrap().receive(d) move |d, _| receiver.lock().unwrap().as_ref().unwrap().receive(d)
}, },
|_, _| panic!("Should not receive notif"), |_, _| panic!("Should not receive notif"),
|_, _| panic!("Should not receive request"), |_, _| panic!("Should not receive request"),
)); ));
*receiver.lock().unwrap() = Some(ReqNot::new( *receiver.lock().unwrap() = Some(ReqNot::new(
{ {
let sender = sender.clone(); let sender = sender.clone();
move |d, _| sender.receive(d) move |d, _| sender.receive(d)
}, },
|_, _| panic!("Not receiving notifs"), |_, _| panic!("Not receiving notifs"),
|hand, req| { |hand, req| {
assert_eq!(req, TestReq(5)); assert_eq!(req, TestReq(5));
hand.respond(&6u8) hand.respond(&6u8)
}, },
)); ));
let response = sender.request(TestReq(5)); let response = sender.request(TestReq(5));
assert_eq!(response, 6); assert_eq!(response, 6);
} }
} }

View File

@@ -8,20 +8,20 @@ use trait_set::trait_set;
use super::boxed_iter::BoxedIter; use super::boxed_iter::BoxedIter;
trait_set! { trait_set! {
trait Payload<'a, T> = Fn() -> BoxedIter<'a, T> + 'a; trait Payload<'a, T> = Fn() -> BoxedIter<'a, T> + 'a;
} }
/// Dynamic iterator building callback. Given how many trait objects this /// Dynamic iterator building callback. Given how many trait objects this
/// involves, it may actually be slower than C#. /// involves, it may actually be slower than C#.
pub struct Sequence<'a, T: 'a>(Rc<dyn Payload<'a, T>>); pub struct Sequence<'a, T: 'a>(Rc<dyn Payload<'a, T>>);
impl<'a, T: 'a> Sequence<'a, T> { impl<'a, T: 'a> Sequence<'a, T> {
/// Construct from a concrete function returning a concrete iterator /// Construct from a concrete function returning a concrete iterator
pub fn new<I: IntoIterator<Item = T> + 'a>(f: impl Fn() -> I + 'a) -> Self { pub fn new<I: IntoIterator<Item = T> + 'a>(f: impl Fn() -> I + 'a) -> Self {
Self(Rc::new(move || Box::new(f().into_iter()))) Self(Rc::new(move || Box::new(f().into_iter())))
} }
/// Get an iterator from the function /// Get an iterator from the function
pub fn iter(&self) -> BoxedIter<'_, T> { (self.0)() } pub fn iter(&self) -> BoxedIter<'_, T> { (self.0)() }
} }
impl<'a, T: 'a> Clone for Sequence<'a, T> { impl<'a, T: 'a> Clone for Sequence<'a, T> {
fn clone(&self) -> Self { Self(self.0.clone()) } fn clone(&self) -> Self { Self(self.0.clone()) }
} }

View File

@@ -10,88 +10,88 @@ use crate::boxed_iter::BoxedIter;
/// are technically usable for this purpose, they're very easy to confuse /// are technically usable for this purpose, they're very easy to confuse
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Side { pub enum Side {
/// Left, low, or high-to-low in the case of sequences /// Left, low, or high-to-low in the case of sequences
Left, Left,
/// Right, high, or low-to-high in the case of sequences /// Right, high, or low-to-high in the case of sequences
Right, Right,
} }
impl fmt::Display for Side { impl fmt::Display for Side {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
Self::Left => write!(f, "Left"), Self::Left => write!(f, "Left"),
Self::Right => write!(f, "Right"), Self::Right => write!(f, "Right"),
} }
} }
} }
impl Side { impl Side {
/// Get the side that is not the current one /// Get the side that is not the current one
pub fn opposite(&self) -> Self { pub fn opposite(&self) -> Self {
match self { match self {
Self::Left => Self::Right, Self::Left => Self::Right,
Self::Right => Self::Left, Self::Right => Self::Left,
} }
} }
/// Shorthand for opposite /// Shorthand for opposite
pub fn inv(&self) -> Self { self.opposite() } pub fn inv(&self) -> Self { self.opposite() }
/// take N elements from this end of a slice /// take N elements from this end of a slice
pub fn slice<'a, T>(&self, size: usize, slice: &'a [T]) -> &'a [T] { pub fn slice<'a, T>(&self, size: usize, slice: &'a [T]) -> &'a [T] {
match self { match self {
Side::Left => &slice[..size], Side::Left => &slice[..size],
Side::Right => &slice[slice.len() - size..], Side::Right => &slice[slice.len() - size..],
} }
} }
/// ignore N elements from this end of a slice /// ignore N elements from this end of a slice
pub fn crop<'a, T>(&self, margin: usize, slice: &'a [T]) -> &'a [T] { pub fn crop<'a, T>(&self, margin: usize, slice: &'a [T]) -> &'a [T] {
self.opposite().slice(slice.len() - margin, slice) self.opposite().slice(slice.len() - margin, slice)
} }
/// ignore N elements from this end and M elements from the other end /// ignore N elements from this end and M elements from the other end
/// of a slice /// of a slice
pub fn crop_both<'a, T>(&self, margin: usize, opposite: usize, slice: &'a [T]) -> &'a [T] { pub fn crop_both<'a, T>(&self, margin: usize, opposite: usize, slice: &'a [T]) -> &'a [T] {
self.crop(margin, self.opposite().crop(opposite, slice)) self.crop(margin, self.opposite().crop(opposite, slice))
} }
/// Pick this side from a pair of things /// Pick this side from a pair of things
pub fn pick<T>(&self, pair: (T, T)) -> T { pub fn pick<T>(&self, pair: (T, T)) -> T {
match self { match self {
Side::Left => pair.0, Side::Left => pair.0,
Side::Right => pair.1, Side::Right => pair.1,
} }
} }
/// Make a pair with the first element on this side /// Make a pair with the first element on this side
pub fn pair<T>(&self, this: T, opposite: T) -> (T, T) { pub fn pair<T>(&self, this: T, opposite: T) -> (T, T) {
match self { match self {
Side::Left => (this, opposite), Side::Left => (this, opposite),
Side::Right => (opposite, this), Side::Right => (opposite, this),
} }
} }
/// Walk a double ended iterator (assumed to be left-to-right) in this /// Walk a double ended iterator (assumed to be left-to-right) in this
/// direction /// direction
pub fn walk<'a, I: DoubleEndedIterator + 'a>(&self, iter: I) -> BoxedIter<'a, I::Item> { pub fn walk<'a, I: DoubleEndedIterator + 'a>(&self, iter: I) -> BoxedIter<'a, I::Item> {
match self { match self {
Side::Right => Box::new(iter) as BoxedIter<I::Item>, Side::Right => Box::new(iter) as BoxedIter<I::Item>,
Side::Left => Box::new(iter.rev()), Side::Left => Box::new(iter.rev()),
} }
} }
} }
impl Not for Side { impl Not for Side {
type Output = Side; type Output = Side;
fn not(self) -> Self::Output { self.opposite() } fn not(self) -> Self::Output { self.opposite() }
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use itertools::Itertools; use itertools::Itertools;
use super::*; use super::*;
/// I apparently have a tendency to mix these up so it's best if /// I apparently have a tendency to mix these up so it's best if
/// the sides are explicitly stated /// the sides are explicitly stated
#[test] #[test]
fn test_walk() { fn test_walk() {
assert_eq!(Side::Right.walk(0..4).collect_vec(), vec![0, 1, 2, 3], "can walk a range"); assert_eq!(Side::Right.walk(0..4).collect_vec(), vec![0, 1, 2, 3], "can walk a range");
assert_eq!(Side::Left.walk(0..4).collect_vec(), vec![3, 2, 1, 0], "can walk a range backwards") assert_eq!(Side::Left.walk(0..4).collect_vec(), vec![3, 2, 1, 0], "can walk a range backwards")
} }
} }

View File

@@ -3,4 +3,4 @@ pub use api::Paren;
use crate::api; use crate::api;
pub const PARENS: &[(char, char, Paren)] = pub const PARENS: &[(char, char, Paren)] =
&[('(', ')', Paren::Round), ('[', ']', Paren::Square), ('{', '}', Paren::Curly)]; &[('(', ')', Paren::Round), ('[', ']', Paren::Square), ('{', '}', Paren::Curly)];

View File

@@ -6,320 +6,319 @@ use std::marker::PhantomData;
use std::ops::Range; use std::ops::Range;
use std::sync::Arc; use std::sync::Arc;
pub use api::PhKind;
use itertools::Itertools; use itertools::Itertools;
use never::Never; use never::Never;
use ordered_float::NotNan; use ordered_float::NotNan;
use trait_set::trait_set; use trait_set::trait_set;
use crate::{api, match_mapping};
use crate::error::OrcErrv; use crate::error::OrcErrv;
use crate::interner::Tok; use crate::interner::Tok;
use crate::location::Pos; use crate::location::Pos;
use crate::name::PathSlice; use crate::name::PathSlice;
use crate::parse::Snippet; use crate::parse::Snippet;
use crate::tokens::PARENS; use crate::tokens::PARENS;
use crate::{api, match_mapping};
pub use api::PhKind as PhKind;
trait_set! { trait_set! {
pub trait RecurCB<'a, A: AtomRepr, X: ExtraTok> = Fn(TokTree<'a, A, X>) -> TokTree<'a, A, X>; pub trait RecurCB<'a, A: AtomRepr, X: ExtraTok> = Fn(TokTree<'a, A, X>) -> TokTree<'a, A, X>;
pub trait ExtraTok = Display + Clone + fmt::Debug; pub trait ExtraTok = Display + Clone + fmt::Debug;
} }
pub fn recur<'a, A: AtomRepr, X: ExtraTok>( pub fn recur<'a, A: AtomRepr, X: ExtraTok>(
tt: TokTree<'a, A, X>, tt: TokTree<'a, A, X>,
f: &impl Fn(TokTree<'a, A, X>, &dyn RecurCB<'a, A, X>) -> TokTree<'a, A, X>, f: &impl Fn(TokTree<'a, A, X>, &dyn RecurCB<'a, A, X>) -> TokTree<'a, A, X>,
) -> TokTree<'a, A, X> { ) -> TokTree<'a, A, X> {
f(tt, &|TokTree { range, tok }| { f(tt, &|TokTree { range, tok }| {
let tok = match tok { let tok = match tok {
tok @ (Token::Atom(_) | Token::BR | Token::Bottom(_) | Token::Comment(_) | Token::NS) => tok, tok @ (Token::Atom(_) | Token::BR | Token::Bottom(_) | Token::Comment(_) | Token::NS) => tok,
tok @ (Token::Name(_) | Token::Slot(_) | Token::X(_) | Token::Ph(_) | Token::Macro(_)) => tok, tok @ (Token::Name(_) | Token::Slot(_) | Token::X(_) | Token::Ph(_) | Token::Macro(_)) => tok,
Token::LambdaHead(arg) => Token::LambdaHead(arg) =>
Token::LambdaHead(arg.into_iter().map(|tt| recur(tt, f)).collect_vec()), Token::LambdaHead(arg.into_iter().map(|tt| recur(tt, f)).collect_vec()),
Token::S(p, b) => Token::S(p, b.into_iter().map(|tt| recur(tt, f)).collect_vec()), Token::S(p, b) => Token::S(p, b.into_iter().map(|tt| recur(tt, f)).collect_vec()),
}; };
TokTree { range, tok } TokTree { range, tok }
}) })
} }
pub trait AtomRepr: fmt::Display + Clone + fmt::Debug { pub trait AtomRepr: fmt::Display + Clone + fmt::Debug {
type Ctx: ?Sized; type Ctx: ?Sized;
fn from_api(api: &api::Atom, pos: Pos, ctx: &mut Self::Ctx) -> Self; fn from_api(api: &api::Atom, pos: Pos, ctx: &mut Self::Ctx) -> Self;
fn to_api(&self) -> orchid_api::Atom; fn to_api(&self) -> orchid_api::Atom;
} }
impl AtomRepr for Never { impl AtomRepr for Never {
type Ctx = Never; type Ctx = Never;
fn from_api(_: &api::Atom, _: Pos, _: &mut Self::Ctx) -> Self { panic!() } fn from_api(_: &api::Atom, _: Pos, _: &mut Self::Ctx) -> Self { panic!() }
fn to_api(&self) -> orchid_api::Atom { match *self {} } fn to_api(&self) -> orchid_api::Atom { match *self {} }
} }
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)] #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct TokHandle<'a>(api::TreeTicket, PhantomData<&'a ()>); pub struct TokHandle<'a>(api::TreeTicket, PhantomData<&'a ()>);
impl TokHandle<'static> { impl TokHandle<'static> {
pub fn new(tt: api::TreeTicket) -> Self { TokHandle(tt, PhantomData) } pub fn new(tt: api::TreeTicket) -> Self { TokHandle(tt, PhantomData) }
} }
impl TokHandle<'_> { impl TokHandle<'_> {
pub fn ticket(self) -> api::TreeTicket { self.0 } pub fn ticket(self) -> api::TreeTicket { self.0 }
} }
impl Display for TokHandle<'_> { impl Display for TokHandle<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Handle({})", self.0.0) } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Handle({})", self.0.0) }
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct TokTree<'a, A: AtomRepr, X: ExtraTok> { pub struct TokTree<'a, A: AtomRepr, X: ExtraTok> {
pub tok: Token<'a, A, X>, pub tok: Token<'a, A, X>,
pub range: Range<u32>, pub range: Range<u32>,
} }
impl<'a, A: AtomRepr, X: ExtraTok> TokTree<'a, A, X> { impl<'a, A: AtomRepr, X: ExtraTok> TokTree<'a, A, X> {
pub fn from_api(tt: &api::TokenTree, ctx: &mut A::Ctx) -> Self { pub fn from_api(tt: &api::TokenTree, ctx: &mut A::Ctx) -> Self {
let tok = match_mapping!(&tt.token, api::Token => Token::<'a, A, X> { let tok = match_mapping!(&tt.token, api::Token => Token::<'a, A, X> {
BR, NS, BR, NS,
Atom(a => A::from_api(a, Pos::Range(tt.range.clone()), ctx)), Atom(a => A::from_api(a, Pos::Range(tt.range.clone()), ctx)),
Bottom(e => OrcErrv::from_api(e)), Bottom(e => OrcErrv::from_api(e)),
LambdaHead(arg => ttv_from_api(arg, ctx)), LambdaHead(arg => ttv_from_api(arg, ctx)),
Name(n => Tok::from_api(*n)), Name(n => Tok::from_api(*n)),
S(*par, b => ttv_from_api(b, ctx)), S(*par, b => ttv_from_api(b, ctx)),
Comment(c.clone()), Comment(c.clone()),
Slot(id => TokHandle::new(*id)), Slot(id => TokHandle::new(*id)),
Ph(ph => Ph::from_api(ph)), Ph(ph => Ph::from_api(ph)),
Macro(*prio) Macro(*prio)
}); });
Self { range: tt.range.clone(), tok } Self { range: tt.range.clone(), tok }
} }
pub fn to_api( pub fn to_api(
&self, &self,
do_extra: &mut impl FnMut(&X, Range<u32>) -> api::TokenTree, do_extra: &mut impl FnMut(&X, Range<u32>) -> api::TokenTree,
) -> api::TokenTree { ) -> api::TokenTree {
let token = match_mapping!(&self.tok, Token => api::Token { let token = match_mapping!(&self.tok, Token => api::Token {
Atom(a.to_api()), Atom(a.to_api()),
BR, BR,
NS, NS,
Bottom(e.to_api()), Bottom(e.to_api()),
Comment(c.clone()), Comment(c.clone()),
LambdaHead(arg => ttv_to_api(arg, do_extra)), LambdaHead(arg => ttv_to_api(arg, do_extra)),
Name(n.to_api()), Name(n.to_api()),
Slot(tt.ticket()), Slot(tt.ticket()),
S(*p, b => ttv_to_api(b, do_extra)), S(*p, b => ttv_to_api(b, do_extra)),
Ph(ph.to_api()), Ph(ph.to_api()),
Macro(*prio), Macro(*prio),
} { } {
Token::X(x) => return do_extra(x, self.range.clone()) Token::X(x) => return do_extra(x, self.range.clone())
}); });
api::TokenTree { range: self.range.clone(), token } api::TokenTree { range: self.range.clone(), token }
} }
pub fn into_api( pub fn into_api(
self, self,
do_extra: &mut impl FnMut(X, Range<u32>) -> api::TokenTree, do_extra: &mut impl FnMut(X, Range<u32>) -> api::TokenTree,
) -> api::TokenTree { ) -> api::TokenTree {
let token = match self.tok { let token = match self.tok {
Token::Atom(a) => api::Token::Atom(a.to_api()), Token::Atom(a) => api::Token::Atom(a.to_api()),
Token::BR => api::Token::BR, Token::BR => api::Token::BR,
Token::NS => api::Token::NS, Token::NS => api::Token::NS,
Token::Bottom(e) => api::Token::Bottom(e.to_api()), Token::Bottom(e) => api::Token::Bottom(e.to_api()),
Token::Comment(c) => api::Token::Comment(c.clone()), Token::Comment(c) => api::Token::Comment(c.clone()),
Token::LambdaHead(arg) => api::Token::LambdaHead(ttv_into_api(arg, do_extra)), Token::LambdaHead(arg) => api::Token::LambdaHead(ttv_into_api(arg, do_extra)),
Token::Name(n) => api::Token::Name(n.to_api()), Token::Name(n) => api::Token::Name(n.to_api()),
Token::Slot(tt) => api::Token::Slot(tt.ticket()), Token::Slot(tt) => api::Token::Slot(tt.ticket()),
Token::S(p, b) => api::Token::S(p, ttv_into_api(b, do_extra)), Token::S(p, b) => api::Token::S(p, ttv_into_api(b, do_extra)),
Token::Ph(Ph { kind, name }) => Token::Ph(Ph { kind, name }) =>
api::Token::Ph(api::Placeholder { name: name.to_api(), kind }), api::Token::Ph(api::Placeholder { name: name.to_api(), kind }),
Token::X(x) => return do_extra(x, self.range.clone()), Token::X(x) => return do_extra(x, self.range.clone()),
Token::Macro(prio) => api::Token::Macro(prio), Token::Macro(prio) => api::Token::Macro(prio),
}; };
api::TokenTree { range: self.range.clone(), token } api::TokenTree { range: self.range.clone(), token }
} }
pub fn is_kw(&self, tk: Tok<String>) -> bool { self.tok.is_kw(tk) } pub fn is_kw(&self, tk: Tok<String>) -> bool { self.tok.is_kw(tk) }
pub fn as_name(&self) -> Option<Tok<String>> { pub fn as_name(&self) -> Option<Tok<String>> {
if let Token::Name(n) = &self.tok { Some(n.clone()) } else { None } if let Token::Name(n) = &self.tok { Some(n.clone()) } else { None }
} }
pub fn as_s(&self, par: Paren) -> Option<Snippet<'_, 'a, A, X>> { pub fn as_s(&self, par: Paren) -> Option<Snippet<'_, 'a, A, X>> {
self.tok.as_s(par).map(|slc| Snippet::new(self, slc)) self.tok.as_s(par).map(|slc| Snippet::new(self, slc))
} }
pub fn lambda(arg: Vec<Self>, mut body: Vec<Self>) -> Self { pub fn lambda(arg: Vec<Self>, mut body: Vec<Self>) -> Self {
let arg_range = ttv_range(&arg); let arg_range = ttv_range(&arg);
let s_range = arg_range.start..body.last().expect("Lambda with empty body!").range.end; let s_range = arg_range.start..body.last().expect("Lambda with empty body!").range.end;
body.insert(0, Token::LambdaHead(arg).at(arg_range)); body.insert(0, Token::LambdaHead(arg).at(arg_range));
Token::S(Paren::Round, body).at(s_range) Token::S(Paren::Round, body).at(s_range)
} }
} }
impl<A: AtomRepr, X: ExtraTok> Display for TokTree<'_, A, X> { impl<A: AtomRepr, X: ExtraTok> Display for TokTree<'_, A, X> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.tok) } fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.tok) }
} }
pub fn ttv_from_api<A: AtomRepr, X: ExtraTok>( pub fn ttv_from_api<A: AtomRepr, X: ExtraTok>(
tokv: impl IntoIterator<Item: Borrow<api::TokenTree>>, tokv: impl IntoIterator<Item: Borrow<api::TokenTree>>,
ctx: &mut A::Ctx, ctx: &mut A::Ctx,
) -> Vec<TokTree<'static, A, X>> { ) -> Vec<TokTree<'static, A, X>> {
tokv.into_iter().map(|t| TokTree::<A, X>::from_api(t.borrow(), ctx)).collect() tokv.into_iter().map(|t| TokTree::<A, X>::from_api(t.borrow(), ctx)).collect()
} }
pub fn ttv_to_api<'a, A: AtomRepr, X: ExtraTok>( pub fn ttv_to_api<'a, A: AtomRepr, X: ExtraTok>(
tokv: impl IntoIterator<Item: Borrow<TokTree<'a, A, X>>>, tokv: impl IntoIterator<Item: Borrow<TokTree<'a, A, X>>>,
do_extra: &mut impl FnMut(&X, Range<u32>) -> api::TokenTree, do_extra: &mut impl FnMut(&X, Range<u32>) -> api::TokenTree,
) -> Vec<api::TokenTree> { ) -> Vec<api::TokenTree> {
tokv.into_iter().map(|tok| Borrow::<TokTree<A, X>>::borrow(&tok).to_api(do_extra)).collect_vec() tokv.into_iter().map(|tok| Borrow::<TokTree<A, X>>::borrow(&tok).to_api(do_extra)).collect_vec()
} }
pub fn ttv_into_api<'a, A: AtomRepr, X: ExtraTok>( pub fn ttv_into_api<'a, A: AtomRepr, X: ExtraTok>(
tokv: impl IntoIterator<Item = TokTree<'a, A, X>>, tokv: impl IntoIterator<Item = TokTree<'a, A, X>>,
do_extra: &mut impl FnMut(X, Range<u32>) -> api::TokenTree, do_extra: &mut impl FnMut(X, Range<u32>) -> api::TokenTree,
) -> Vec<api::TokenTree> { ) -> Vec<api::TokenTree> {
tokv.into_iter().map(|t| t.into_api(do_extra)).collect_vec() tokv.into_iter().map(|t| t.into_api(do_extra)).collect_vec()
} }
/// This takes a position and not a range because it assigns the range to /// This takes a position and not a range because it assigns the range to
/// multiple leaf tokens, which is only valid if it's a zero-width range /// multiple leaf tokens, which is only valid if it's a zero-width range
pub fn vname_tv<'a: 'b, 'b, A: AtomRepr + 'a, X: ExtraTok + 'a>( pub fn vname_tv<'a: 'b, 'b, A: AtomRepr + 'a, X: ExtraTok + 'a>(
name: &'b PathSlice, name: &'b PathSlice,
pos: u32, pos: u32,
) -> impl Iterator<Item = TokTree<'a, A, X>> + 'b { ) -> impl Iterator<Item = TokTree<'a, A, X>> + 'b {
let (head, tail) = name.split_first().expect("Empty vname"); let (head, tail) = name.split_first().expect("Empty vname");
iter::once(Token::Name(head.clone())) iter::once(Token::Name(head.clone()))
.chain(tail.iter().flat_map(|t| [Token::NS, Token::Name(t.clone())])) .chain(tail.iter().flat_map(|t| [Token::NS, Token::Name(t.clone())]))
.map(move |t| t.at(pos..pos)) .map(move |t| t.at(pos..pos))
} }
pub fn wrap_tokv<'a, A: AtomRepr, X: ExtraTok>( pub fn wrap_tokv<'a, A: AtomRepr, X: ExtraTok>(
items: impl IntoIterator<Item = TokTree<'a, A, X>> items: impl IntoIterator<Item = TokTree<'a, A, X>>,
) -> TokTree<'a, A, X> { ) -> TokTree<'a, A, X> {
let items_v = items.into_iter().collect_vec(); let items_v = items.into_iter().collect_vec();
match items_v.len() { match items_v.len() {
0 => panic!("A tokv with no elements is illegal"), 0 => panic!("A tokv with no elements is illegal"),
1 => items_v.into_iter().next().unwrap(), 1 => items_v.into_iter().next().unwrap(),
_ => { _ => {
let range = items_v.first().unwrap().range.start..items_v.last().unwrap().range.end; let range = items_v.first().unwrap().range.start..items_v.last().unwrap().range.end;
Token::S(api::Paren::Round, items_v).at(range) Token::S(api::Paren::Round, items_v).at(range)
}, },
} }
} }
pub use api::Paren; pub use api::Paren;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum Token<'a, A: AtomRepr, X: ExtraTok> { pub enum Token<'a, A: AtomRepr, X: ExtraTok> {
Comment(Arc<String>), Comment(Arc<String>),
LambdaHead(Vec<TokTree<'a, A, X>>), LambdaHead(Vec<TokTree<'a, A, X>>),
Name(Tok<String>), Name(Tok<String>),
NS, NS,
BR, BR,
S(Paren, Vec<TokTree<'a, A, X>>), S(Paren, Vec<TokTree<'a, A, X>>),
Atom(A), Atom(A),
Bottom(OrcErrv), Bottom(OrcErrv),
Slot(TokHandle<'a>), Slot(TokHandle<'a>),
X(X), X(X),
Ph(Ph), Ph(Ph),
Macro(Option<NotNan<f64>>), Macro(Option<NotNan<f64>>),
} }
impl<'a, A: AtomRepr, X: ExtraTok> Token<'a, A, X> { impl<'a, A: AtomRepr, X: ExtraTok> Token<'a, A, X> {
pub fn at(self, range: Range<u32>) -> TokTree<'a, A, X> { TokTree { range, tok: self } } pub fn at(self, range: Range<u32>) -> TokTree<'a, A, X> { TokTree { range, tok: self } }
pub fn is_kw(&self, tk: Tok<String>) -> bool { pub fn is_kw(&self, tk: Tok<String>) -> bool { matches!(self, Token::Name(n) if *n == tk) }
matches!(self, Token::Name(n) if *n == tk) pub fn as_s(&self, par: Paren) -> Option<&[TokTree<'a, A, X>]> {
} match self {
pub fn as_s(&self, par: Paren) -> Option<&[TokTree<'a, A, X>]> { Self::S(p, b) if *p == par => Some(b),
match self { _ => None,
Self::S(p, b) if *p == par => Some(b), }
_ => None, }
}
}
} }
impl<A: AtomRepr, X: ExtraTok> Display for Token<'_, A, X> { impl<A: AtomRepr, X: ExtraTok> Display for Token<'_, A, X> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
thread_local! { thread_local! {
static PAREN_LEVEL: RefCell<usize> = 0.into(); static PAREN_LEVEL: RefCell<usize> = 0.into();
} }
fn get_indent() -> usize { PAREN_LEVEL.with_borrow(|t| *t) } fn get_indent() -> usize { PAREN_LEVEL.with_borrow(|t| *t) }
fn with_indent<T>(f: impl FnOnce() -> T) -> T { fn with_indent<T>(f: impl FnOnce() -> T) -> T {
PAREN_LEVEL.with_borrow_mut(|t| *t += 1); PAREN_LEVEL.with_borrow_mut(|t| *t += 1);
let r = f(); let r = f();
PAREN_LEVEL.with_borrow_mut(|t| *t -= 1); PAREN_LEVEL.with_borrow_mut(|t| *t -= 1);
r r
} }
match self { match self {
Self::Atom(a) => f.write_str(&indent(&format!("{a} "), get_indent(), false)), Self::Atom(a) => f.write_str(&indent(&format!("{a} "), get_indent(), false)),
Self::BR => write!(f, "\n{}", " ".repeat(get_indent())), Self::BR => write!(f, "\n{}", " ".repeat(get_indent())),
Self::Bottom(err) if err.len() == 1 => write!(f, "Bottom({}) ", err.one().unwrap()), Self::Bottom(err) if err.len() == 1 => write!(f, "Bottom({}) ", err.one().unwrap()),
Self::Bottom(err) => { Self::Bottom(err) => {
write!(f, "Botttom(\n{}) ", indent(&err.to_string(), get_indent() + 1, true)) write!(f, "Botttom(\n{}) ", indent(&err.to_string(), get_indent() + 1, true))
}, },
Self::Comment(c) => write!(f, "--[{c}]-- "), Self::Comment(c) => write!(f, "--[{c}]-- "),
Self::LambdaHead(arg) => with_indent(|| write!(f, "\\ {} . ", ttv_fmt(arg))), Self::LambdaHead(arg) => with_indent(|| write!(f, "\\ {} . ", ttv_fmt(arg))),
Self::NS => f.write_str(":: "), Self::NS => f.write_str(":: "),
Self::Name(n) => write!(f, "{n} "), Self::Name(n) => write!(f, "{n} "),
Self::Slot(th) => write!(f, "{th} "), Self::Slot(th) => write!(f, "{th} "),
Self::Ph(Ph { kind, name }) => match &kind { Self::Ph(Ph { kind, name }) => match &kind {
PhKind::Scalar => write!(f, "${name}"), PhKind::Scalar => write!(f, "${name}"),
PhKind::Vector { at_least_one, priority } => { PhKind::Vector { at_least_one, priority } => {
if *at_least_one { write!(f, ".")? } if *at_least_one {
write!(f, "..${name}")?; write!(f, ".")?
if 0 < *priority { write!(f, "{priority}") } else { Ok(()) } }
} write!(f, "..${name}")?;
} if 0 < *priority { write!(f, "{priority}") } else { Ok(()) }
Self::S(p, b) => { },
let (lp, rp, _) = PARENS.iter().find(|(_, _, par)| par == p).unwrap(); },
write!(f, "{lp} ")?; Self::S(p, b) => {
with_indent(|| f.write_str(&ttv_fmt(b)))?; let (lp, rp, _) = PARENS.iter().find(|(_, _, par)| par == p).unwrap();
write!(f, "{rp} ") write!(f, "{lp} ")?;
}, with_indent(|| f.write_str(&ttv_fmt(b)))?;
Self::X(x) => write!(f, "{x} "), write!(f, "{rp} ")
Self::Macro(None) => write!(f, "macro "), },
Self::Macro(Some(prio)) => write!(f, "macro({prio})"), Self::X(x) => write!(f, "{x} "),
} Self::Macro(None) => write!(f, "macro "),
} Self::Macro(Some(prio)) => write!(f, "macro({prio})"),
}
}
} }
pub fn ttv_range(ttv: &[TokTree<'_, impl AtomRepr, impl ExtraTok>]) -> Range<u32> { pub fn ttv_range(ttv: &[TokTree<'_, impl AtomRepr, impl ExtraTok>]) -> Range<u32> {
assert!(!ttv.is_empty(), "Empty slice has no range"); assert!(!ttv.is_empty(), "Empty slice has no range");
ttv.first().unwrap().range.start..ttv.last().unwrap().range.end ttv.first().unwrap().range.start..ttv.last().unwrap().range.end
} }
pub fn ttv_fmt<'a: 'b, 'b>( pub fn ttv_fmt<'a: 'b, 'b>(
ttv: impl IntoIterator<Item = &'b TokTree<'a, impl AtomRepr + 'b, impl ExtraTok + 'b>>, ttv: impl IntoIterator<Item = &'b TokTree<'a, impl AtomRepr + 'b, impl ExtraTok + 'b>>,
) -> String { ) -> String {
ttv.into_iter().join("") ttv.into_iter().join("")
} }
pub fn indent(s: &str, lvl: usize, first: bool) -> String { pub fn indent(s: &str, lvl: usize, first: bool) -> String {
if first { if first {
s.replace("\n", &("\n".to_string() + &" ".repeat(lvl))) s.replace("\n", &("\n".to_string() + &" ".repeat(lvl)))
} else if let Some((fst, rest)) = s.split_once('\n') { } else if let Some((fst, rest)) = s.split_once('\n') {
fst.to_string() + "\n" + &indent(rest, lvl, true) fst.to_string() + "\n" + &indent(rest, lvl, true)
} else { } else {
s.to_string() s.to_string()
} }
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct Ph { pub struct Ph {
pub name: Tok<String>, pub name: Tok<String>,
pub kind: PhKind, pub kind: PhKind,
} }
impl Ph { impl Ph {
pub fn from_api(api: &api::Placeholder) -> Self { pub fn from_api(api: &api::Placeholder) -> Self {
Self { name: Tok::from_api(api.name), kind: api.kind } Self { name: Tok::from_api(api.name), kind: api.kind }
} }
pub fn to_api(&self) -> api::Placeholder { pub fn to_api(&self) -> api::Placeholder {
api::Placeholder { name: self.name.to_api(), kind: self.kind } api::Placeholder { name: self.name.to_api(), kind: self.kind }
} }
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
#[test] #[test]
fn test_covariance() { fn test_covariance() {
fn _f<'a>(x: Token<'static, Never, Never>) -> Token<'a, Never, Never> { x } fn _f<'a>(x: Token<'static, Never, Never>) -> Token<'a, Never, Never> { x }
} }
#[test] #[test]
fn fail_covariance() { fn fail_covariance() {
// this fails to compile // this fails to compile
// fn _f<'a, 'b>(x: &'a mut &'static ()) -> &'a mut &'b () { x } // fn _f<'a, 'b>(x: &'a mut &'static ()) -> &'a mut &'b () { x }
// this passes because it's covariant // this passes because it's covariant
fn _f<'a, 'b>(x: &'a fn() -> &'static ()) -> &'a fn() -> &'b () { x } fn _f<'a, 'b>(x: &'a fn() -> &'static ()) -> &'a fn() -> &'b () { x }
} }
} }

View File

@@ -11,15 +11,15 @@ derive_destructure = "1.0.0"
dyn-clone = "1.0.17" dyn-clone = "1.0.17"
hashbrown = "0.15.2" hashbrown = "0.15.2"
itertools = "0.14.0" itertools = "0.14.0"
konst = "0.3.9" konst = "0.3.16"
lazy_static = "1.5.0" lazy_static = "1.5.0"
never = "0.1.0" never = "0.1.0"
once_cell = "1.19.0" once_cell = "1.20.2"
orchid-api = { version = "0.1.0", path = "../orchid-api" } orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" } orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
orchid-base = { version = "0.1.0", path = "../orchid-base" } orchid-base = { version = "0.1.0", path = "../orchid-base" }
ordered-float = "4.2.0" ordered-float = "4.6.0"
paste = "1.0.15" paste = "1.0.15"
substack = "1.1.0" substack = "1.1.1"
trait-set = "0.3.0" trait-set = "0.3.0"

View File

@@ -1,13 +1,13 @@
use std::any::{type_name, Any, TypeId}; use std::any::{Any, TypeId, type_name};
use std::fmt; use std::fmt;
use std::io::{Read, Write}; use std::io::{Read, Write};
use std::marker::PhantomData; use std::marker::PhantomData;
use std::ops::Deref; use std::ops::Deref;
use std::sync::{Arc, OnceLock}; use std::sync::{Arc, OnceLock};
use dyn_clone::{clone_box, DynClone}; use dyn_clone::{DynClone, clone_box};
use orchid_api_traits::{enc_vec, Coding, Decode, Encode, Request}; use orchid_api_traits::{Coding, Decode, Encode, Request, enc_vec};
use orchid_base::error::{mk_err, OrcErr, OrcRes}; use orchid_base::error::{OrcErr, OrcRes, mk_err};
use orchid_base::intern; use orchid_base::intern;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::Sym; use orchid_base::name::Sym;
@@ -18,241 +18,247 @@ use trait_set::trait_set;
use crate::api; use crate::api;
// use crate::error::{ProjectError, ProjectResult}; // use crate::error::{ProjectError, ProjectResult};
use crate::expr::{Expr, ExprData, ExprHandle, ExprKind}; use crate::expr::{Expr, ExprData, ExprHandle, ExprKind};
use crate::system::{atom_info_for, downcast_atom, DynSystemCard, SysCtx}; use crate::system::{DynSystemCard, SysCtx, atom_info_for, downcast_atom};
pub trait AtomCard: 'static + Sized { pub trait AtomCard: 'static + Sized {
type Data: Clone + Coding + Sized; type Data: Clone + Coding + Sized;
} }
pub trait AtomicVariant {} pub trait AtomicVariant {}
pub trait Atomic: 'static + Sized { pub trait Atomic: 'static + Sized {
type Variant: AtomicVariant; type Variant: AtomicVariant;
type Data: Clone + Coding + Sized; type Data: Clone + Coding + Sized;
fn reg_reqs() -> MethodSet<Self>; fn reg_reqs() -> MethodSet<Self>;
} }
impl<A: Atomic> AtomCard for A { impl<A: Atomic> AtomCard for A {
type Data = <Self as Atomic>::Data; type Data = <Self as Atomic>::Data;
} }
pub trait AtomicFeatures: Atomic { pub trait AtomicFeatures: Atomic {
fn factory(self) -> AtomFactory; fn factory(self) -> AtomFactory;
type Info: AtomDynfo; type Info: AtomDynfo;
fn info() -> Self::Info; fn info() -> Self::Info;
fn dynfo() -> Box<dyn AtomDynfo>; fn dynfo() -> Box<dyn AtomDynfo>;
} }
pub trait ToAtom { pub trait ToAtom {
fn to_atom_factory(self) -> AtomFactory; fn to_atom_factory(self) -> AtomFactory;
} }
impl<A: AtomicFeatures> ToAtom for A { impl<A: AtomicFeatures> ToAtom for A {
fn to_atom_factory(self) -> AtomFactory { self.factory() } fn to_atom_factory(self) -> AtomFactory { self.factory() }
} }
impl ToAtom for AtomFactory { impl ToAtom for AtomFactory {
fn to_atom_factory(self) -> AtomFactory { self } fn to_atom_factory(self) -> AtomFactory { self }
} }
pub trait AtomicFeaturesImpl<Variant: AtomicVariant> { pub trait AtomicFeaturesImpl<Variant: AtomicVariant> {
fn _factory(self) -> AtomFactory; fn _factory(self) -> AtomFactory;
type _Info: AtomDynfo; type _Info: AtomDynfo;
fn _info() -> Self::_Info; fn _info() -> Self::_Info;
} }
impl<A: Atomic + AtomicFeaturesImpl<A::Variant>> AtomicFeatures for A { impl<A: Atomic + AtomicFeaturesImpl<A::Variant>> AtomicFeatures for A {
fn factory(self) -> AtomFactory { self._factory() } fn factory(self) -> AtomFactory { self._factory() }
type Info = <Self as AtomicFeaturesImpl<A::Variant>>::_Info; type Info = <Self as AtomicFeaturesImpl<A::Variant>>::_Info;
fn info() -> Self::Info { Self::_info() } fn info() -> Self::Info { Self::_info() }
fn dynfo() -> Box<dyn AtomDynfo> { Box::new(Self::info()) } fn dynfo() -> Box<dyn AtomDynfo> { Box::new(Self::info()) }
} }
pub fn get_info<A: AtomCard>( pub fn get_info<A: AtomCard>(
sys: &(impl DynSystemCard + ?Sized), sys: &(impl DynSystemCard + ?Sized),
) -> (api::AtomId, Box<dyn AtomDynfo>) { ) -> (api::AtomId, Box<dyn AtomDynfo>) {
atom_info_for(sys, TypeId::of::<A>()).unwrap_or_else(|| { atom_info_for(sys, TypeId::of::<A>()).unwrap_or_else(|| {
panic!("Atom {} not associated with system {}", type_name::<A>(), sys.name()) panic!("Atom {} not associated with system {}", type_name::<A>(), sys.name())
}) })
} }
#[derive(Clone)] #[derive(Clone)]
pub struct ForeignAtom<'a> { pub struct ForeignAtom<'a> {
pub expr: Option<Arc<ExprHandle>>, pub expr: Option<Arc<ExprHandle>>,
pub _life: PhantomData<&'a ()>, pub _life: PhantomData<&'a ()>,
pub ctx: SysCtx, pub ctx: SysCtx,
pub atom: api::Atom, pub atom: api::Atom,
pub pos: Pos, pub pos: Pos,
} }
impl ForeignAtom<'_> { impl ForeignAtom<'_> {
pub fn oex_opt(self) -> Option<Expr> { pub fn oex_opt(self) -> Option<Expr> {
let (handle, pos) = (self.expr.as_ref()?.clone(), self.pos.clone()); let (handle, pos) = (self.expr.as_ref()?.clone(), self.pos.clone());
let data = ExprData { pos, kind: ExprKind::Atom(ForeignAtom { _life: PhantomData, ..self }) }; let data = ExprData { pos, kind: ExprKind::Atom(ForeignAtom { _life: PhantomData, ..self }) };
Some(Expr { handle: Some(handle), val: OnceLock::from(data) }) Some(Expr { handle: Some(handle), val: OnceLock::from(data) })
} }
} }
impl ForeignAtom<'static> { impl ForeignAtom<'static> {
pub fn oex(self) -> Expr { self.oex_opt().unwrap() } pub fn oex(self) -> Expr { self.oex_opt().unwrap() }
pub(crate) fn new(handle: Arc<ExprHandle>, atom: api::Atom, pos: Pos) -> Self { pub(crate) fn new(handle: Arc<ExprHandle>, atom: api::Atom, pos: Pos) -> Self {
ForeignAtom { _life: PhantomData, atom, ctx: handle.ctx.clone(), expr: Some(handle), pos } ForeignAtom { _life: PhantomData, atom, ctx: handle.ctx.clone(), expr: Some(handle), pos }
} }
pub fn request<M: AtomMethod>(&self, m: M) -> Option<M::Response> { pub fn request<M: AtomMethod>(&self, m: M) -> Option<M::Response> {
let rep = self.ctx.reqnot.request(api::Fwd( let rep = self.ctx.reqnot.request(api::Fwd(
self.atom.clone(), self.atom.clone(),
Sym::parse(M::NAME).unwrap().tok().to_api(), Sym::parse(M::NAME).unwrap().tok().to_api(),
enc_vec(&m) enc_vec(&m),
))?; ))?;
Some(M::Response::decode(&mut &rep[..])) Some(M::Response::decode(&mut &rep[..]))
} }
} }
impl fmt::Display for ForeignAtom<'_> { impl fmt::Display for ForeignAtom<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}::{:?}", if self.expr.is_some() { "Clause" } else { "Tok" }, self.atom) write!(f, "{}::{:?}", if self.expr.is_some() { "Clause" } else { "Tok" }, self.atom)
} }
} }
impl fmt::Debug for ForeignAtom<'_> { impl fmt::Debug for ForeignAtom<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ForeignAtom({self})") } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ForeignAtom({self})") }
} }
impl AtomRepr for ForeignAtom<'_> { impl AtomRepr for ForeignAtom<'_> {
type Ctx = SysCtx; type Ctx = SysCtx;
fn from_api(atom: &api::Atom, pos: Pos, ctx: &mut Self::Ctx) -> Self { fn from_api(atom: &api::Atom, pos: Pos, ctx: &mut Self::Ctx) -> Self {
Self { atom: atom.clone(), _life: PhantomData, ctx: ctx.clone(), expr: None, pos } Self { atom: atom.clone(), _life: PhantomData, ctx: ctx.clone(), expr: None, pos }
} }
fn to_api(&self) -> orchid_api::Atom { self.atom.clone() } fn to_api(&self) -> orchid_api::Atom { self.atom.clone() }
} }
pub struct NotTypAtom(pub Pos, pub Expr, pub Box<dyn AtomDynfo>); pub struct NotTypAtom(pub Pos, pub Expr, pub Box<dyn AtomDynfo>);
impl NotTypAtom { impl NotTypAtom {
pub fn mk_err(&self) -> OrcErr { pub fn mk_err(&self) -> OrcErr {
mk_err( mk_err(
intern!(str: "Not the expected type"), intern!(str: "Not the expected type"),
format!("This expression is not a {}", self.2.name()), format!("This expression is not a {}", self.2.name()),
[self.0.clone().into()], [self.0.clone().into()],
) )
} }
} }
pub trait AtomMethod: Request { pub trait AtomMethod: Request {
const NAME: &str; const NAME: &str;
} }
pub trait Supports<M: AtomMethod>: AtomCard { pub trait Supports<M: AtomMethod>: AtomCard {
fn handle(&self, ctx: SysCtx, req: M) -> <M as Request>::Response; fn handle(&self, ctx: SysCtx, req: M) -> <M as Request>::Response;
} }
trait_set! { trait_set! {
trait AtomReqCb<A> = Fn(&A, SysCtx, &mut dyn Read, &mut dyn Write) + Send + Sync trait AtomReqCb<A> = Fn(&A, SysCtx, &mut dyn Read, &mut dyn Write) + Send + Sync
} }
pub struct AtomReqHandler<A: AtomCard> { pub struct AtomReqHandler<A: AtomCard> {
key: Sym, key: Sym,
cb: Box<dyn AtomReqCb<A>>, cb: Box<dyn AtomReqCb<A>>,
} }
pub struct MethodSet<A: AtomCard> { pub struct MethodSet<A: AtomCard> {
handlers: Vec<AtomReqHandler<A>>, handlers: Vec<AtomReqHandler<A>>,
} }
impl<A: AtomCard> MethodSet<A> { impl<A: AtomCard> MethodSet<A> {
pub fn new() -> Self { Self{ handlers: vec![] } } pub fn new() -> Self { Self { handlers: vec![] } }
pub fn handle<M: AtomMethod>(mut self) -> Self where A: Supports<M> { pub fn handle<M: AtomMethod>(mut self) -> Self
self.handlers.push(AtomReqHandler { where A: Supports<M> {
key: Sym::parse(M::NAME).expect("AtomMethod::NAME cannoot be empty"), self.handlers.push(AtomReqHandler {
cb: Box::new(move | key: Sym::parse(M::NAME).expect("AtomMethod::NAME cannoot be empty"),
a: &A, cb: Box::new(move |a: &A, ctx: SysCtx, req: &mut dyn Read, rep: &mut dyn Write| {
ctx: SysCtx, Supports::<M>::handle(a, ctx, M::decode(req)).encode(rep);
req: &mut dyn Read, }),
rep: &mut dyn Write });
| { self
Supports::<M>::handle(a, ctx, M::decode(req)).encode(rep); }
})
});
self
}
pub(crate) fn dispatch( pub(crate) fn dispatch(
&self, atom: &A, ctx: SysCtx, key: Sym, req: &mut dyn Read, rep: &mut dyn Write &self,
) -> bool { atom: &A,
match self.handlers.iter().find(|h| h.key == key) { ctx: SysCtx,
None => false, key: Sym,
Some(handler) => { req: &mut dyn Read,
(handler.cb)(atom, ctx, req, rep); rep: &mut dyn Write,
true ) -> bool {
}, match self.handlers.iter().find(|h| h.key == key) {
} None => false,
} Some(handler) => {
(handler.cb)(atom, ctx, req, rep);
true
},
}
}
} }
impl<A: AtomCard> Default for MethodSet<A> { impl<A: AtomCard> Default for MethodSet<A> {
fn default() -> Self { fn default() -> Self { Self::new() }
Self::new()
}
} }
#[derive(Clone)] #[derive(Clone)]
pub struct TypAtom<'a, A: AtomicFeatures> { pub struct TypAtom<'a, A: AtomicFeatures> {
pub data: ForeignAtom<'a>, pub data: ForeignAtom<'a>,
pub value: A::Data, pub value: A::Data,
} }
impl<A: AtomicFeatures> TypAtom<'static, A> { impl<A: AtomicFeatures> TypAtom<'static, A> {
pub fn downcast(expr: Arc<ExprHandle>) -> Result<Self, NotTypAtom> { pub fn downcast(expr: Arc<ExprHandle>) -> Result<Self, NotTypAtom> {
match Expr::new(expr).foreign_atom() { match Expr::new(expr).foreign_atom() {
Err(oe) => Err(NotTypAtom(oe.get_data().pos.clone(), oe, Box::new(A::info()))), Err(oe) => Err(NotTypAtom(oe.get_data().pos.clone(), oe, Box::new(A::info()))),
Ok(atm) => match downcast_atom::<A>(atm) { Ok(atm) => match downcast_atom::<A>(atm) {
Err(fa) => Err(NotTypAtom(fa.pos.clone(), fa.oex(), Box::new(A::info()))), Err(fa) => Err(NotTypAtom(fa.pos.clone(), fa.oex(), Box::new(A::info()))),
Ok(tatom) => Ok(tatom), Ok(tatom) => Ok(tatom),
}, },
} }
} }
} }
impl<A: AtomicFeatures> TypAtom<'_, A> { impl<A: AtomicFeatures> TypAtom<'_, A> {
pub fn request<M: AtomMethod>(&self, req: M) -> M::Response where A: Supports<M> { pub fn request<M: AtomMethod>(&self, req: M) -> M::Response
M::Response::decode( where A: Supports<M> {
&mut &self.data.ctx.reqnot.request(api::Fwd( M::Response::decode(
self.data.atom.clone(), &mut &self
Sym::parse(M::NAME).unwrap().tok().to_api(), .data
enc_vec(&req) .ctx
)).unwrap()[..] .reqnot
) .request(api::Fwd(
} self.data.atom.clone(),
Sym::parse(M::NAME).unwrap().tok().to_api(),
enc_vec(&req),
))
.unwrap()[..],
)
}
} }
impl<A: AtomicFeatures> Deref for TypAtom<'_, A> { impl<A: AtomicFeatures> Deref for TypAtom<'_, A> {
type Target = A::Data; type Target = A::Data;
fn deref(&self) -> &Self::Target { &self.value } fn deref(&self) -> &Self::Target { &self.value }
} }
pub struct AtomCtx<'a>(pub &'a [u8], pub Option<api::AtomId>, pub SysCtx); pub struct AtomCtx<'a>(pub &'a [u8], pub Option<api::AtomId>, pub SysCtx);
pub trait AtomDynfo: Send + Sync + 'static { pub trait AtomDynfo: Send + Sync + 'static {
fn tid(&self) -> TypeId; fn tid(&self) -> TypeId;
fn name(&self) -> &'static str; fn name(&self) -> &'static str;
fn decode(&self, ctx: AtomCtx<'_>) -> Box<dyn Any>; fn decode(&self, ctx: AtomCtx<'_>) -> Box<dyn Any>;
fn call(&self, ctx: AtomCtx<'_>, arg: api::ExprTicket) -> Expr; fn call(&self, ctx: AtomCtx<'_>, arg: api::ExprTicket) -> Expr;
fn call_ref(&self, ctx: AtomCtx<'_>, arg: api::ExprTicket) -> Expr; fn call_ref(&self, ctx: AtomCtx<'_>, arg: api::ExprTicket) -> Expr;
fn print(&self, ctx: AtomCtx<'_>) -> String; fn print(&self, ctx: AtomCtx<'_>) -> String;
fn handle_req(&self, ctx: AtomCtx<'_>, key: Sym, req: &mut dyn Read, rep: &mut dyn Write) -> bool; fn handle_req(&self, ctx: AtomCtx<'_>, key: Sym, req: &mut dyn Read, rep: &mut dyn Write)
fn command(&self, ctx: AtomCtx<'_>) -> OrcRes<Option<Expr>>; -> bool;
fn serialize(&self, ctx: AtomCtx<'_>, write: &mut dyn Write) -> Option<Vec<api::ExprTicket>>; fn command(&self, ctx: AtomCtx<'_>) -> OrcRes<Option<Expr>>;
fn deserialize(&self, ctx: SysCtx, data: &[u8], refs: &[api::ExprTicket]) -> api::Atom; fn serialize(&self, ctx: AtomCtx<'_>, write: &mut dyn Write) -> Option<Vec<api::ExprTicket>>;
fn drop(&self, ctx: AtomCtx<'_>); fn deserialize(&self, ctx: SysCtx, data: &[u8], refs: &[api::ExprTicket]) -> api::Atom;
fn drop(&self, ctx: AtomCtx<'_>);
} }
trait_set! { trait_set! {
pub trait AtomFactoryFn = FnOnce(SysCtx) -> api::Atom + DynClone + Send + Sync; pub trait AtomFactoryFn = FnOnce(SysCtx) -> api::Atom + DynClone + Send + Sync;
} }
pub struct AtomFactory(Box<dyn AtomFactoryFn>); pub struct AtomFactory(Box<dyn AtomFactoryFn>);
impl AtomFactory { impl AtomFactory {
pub fn new(f: impl FnOnce(SysCtx) -> api::Atom + Clone + Send + Sync + 'static) -> Self { pub fn new(f: impl FnOnce(SysCtx) -> api::Atom + Clone + Send + Sync + 'static) -> Self {
Self(Box::new(f)) Self(Box::new(f))
} }
pub fn build(self, ctx: SysCtx) -> api::Atom { (self.0)(ctx) } pub fn build(self, ctx: SysCtx) -> api::Atom { (self.0)(ctx) }
} }
impl Clone for AtomFactory { impl Clone for AtomFactory {
fn clone(&self) -> Self { AtomFactory(clone_box(&*self.0)) } fn clone(&self) -> Self { AtomFactory(clone_box(&*self.0)) }
} }
impl fmt::Debug for AtomFactory { impl fmt::Debug for AtomFactory {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "AtomFactory") } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "AtomFactory") }
} }
impl fmt::Display for AtomFactory { impl fmt::Display for AtomFactory {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "AtomFactory") } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "AtomFactory") }
} }
pub fn err_not_callable() -> OrcErr { pub fn err_not_callable() -> OrcErr {
mk_err(intern!(str: "This atom is not callable"), "Attempted to apply value as function", []) mk_err(intern!(str: "This atom is not callable"), "Attempted to apply value as function", [])
} }
pub fn err_not_command() -> OrcErr { pub fn err_not_command() -> OrcErr {
mk_err(intern!(str: "This atom is not a command"), "Settled on an inactionable value", []) mk_err(intern!(str: "This atom is not a command"), "Settled on an inactionable value", [])
} }

View File

@@ -12,8 +12,8 @@ use orchid_base::name::Sym;
use crate::api; use crate::api;
use crate::atom::{ use crate::atom::{
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet, AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
err_not_callable, err_not_command, get_info, err_not_callable, err_not_command, get_info,
}; };
use crate::expr::{Expr, ExprHandle, bot}; use crate::expr::{Expr, ExprHandle, bot};
use crate::system::SysCtx; use crate::system::SysCtx;
@@ -21,197 +21,197 @@ use crate::system::SysCtx;
pub struct OwnedVariant; pub struct OwnedVariant;
impl AtomicVariant for OwnedVariant {} impl AtomicVariant for OwnedVariant {}
impl<A: OwnedAtom + Atomic<Variant = OwnedVariant>> AtomicFeaturesImpl<OwnedVariant> for A { impl<A: OwnedAtom + Atomic<Variant = OwnedVariant>> AtomicFeaturesImpl<OwnedVariant> for A {
fn _factory(self) -> AtomFactory { fn _factory(self) -> AtomFactory {
AtomFactory::new(move |ctx| { AtomFactory::new(move |ctx| {
let rec = OBJ_STORE.add(Box::new(self)); let rec = OBJ_STORE.add(Box::new(self));
let (id, _) = get_info::<A>(ctx.cted.inst().card()); let (id, _) = get_info::<A>(ctx.cted.inst().card());
let mut data = enc_vec(&id); let mut data = enc_vec(&id);
rec.encode(&mut data); rec.encode(&mut data);
api::Atom { drop: Some(api::AtomId(rec.id())), data, owner: ctx.id } api::Atom { drop: Some(api::AtomId(rec.id())), data, owner: ctx.id }
}) })
} }
fn _info() -> Self::_Info { OwnedAtomDynfo(A::reg_reqs()) } fn _info() -> Self::_Info { OwnedAtomDynfo(A::reg_reqs()) }
type _Info = OwnedAtomDynfo<A>; type _Info = OwnedAtomDynfo<A>;
} }
fn with_atom<U>(id: api::AtomId, f: impl FnOnce(IdRecord<'_, Box<dyn DynOwnedAtom>>) -> U) -> U { fn with_atom<U>(id: api::AtomId, f: impl FnOnce(IdRecord<'_, Box<dyn DynOwnedAtom>>) -> U) -> U {
f(OBJ_STORE.get(id.0).unwrap_or_else(|| panic!("Received invalid atom ID: {}", id.0))) f(OBJ_STORE.get(id.0).unwrap_or_else(|| panic!("Received invalid atom ID: {}", id.0)))
} }
pub struct OwnedAtomDynfo<T: OwnedAtom>(MethodSet<T>); pub struct OwnedAtomDynfo<T: OwnedAtom>(MethodSet<T>);
impl<T: OwnedAtom> AtomDynfo for OwnedAtomDynfo<T> { impl<T: OwnedAtom> AtomDynfo for OwnedAtomDynfo<T> {
fn print(&self, AtomCtx(_, id, ctx): AtomCtx<'_>) -> String { fn print(&self, AtomCtx(_, id, ctx): AtomCtx<'_>) -> String {
with_atom(id.unwrap(), |a| a.dyn_print(ctx)) with_atom(id.unwrap(), |a| a.dyn_print(ctx))
} }
fn tid(&self) -> TypeId { TypeId::of::<T>() } fn tid(&self) -> TypeId { TypeId::of::<T>() }
fn name(&self) -> &'static str { type_name::<T>() } fn name(&self) -> &'static str { type_name::<T>() }
fn decode(&self, AtomCtx(data, ..): AtomCtx) -> Box<dyn Any> { fn decode(&self, AtomCtx(data, ..): AtomCtx) -> Box<dyn Any> {
Box::new(<T as AtomCard>::Data::decode(&mut &data[..])) Box::new(<T as AtomCard>::Data::decode(&mut &data[..]))
} }
fn call(&self, AtomCtx(_, id, ctx): AtomCtx, arg: api::ExprTicket) -> Expr { fn call(&self, AtomCtx(_, id, ctx): AtomCtx, arg: api::ExprTicket) -> Expr {
with_atom(id.unwrap(), |a| a.remove().dyn_call(ctx, arg)) with_atom(id.unwrap(), |a| a.remove().dyn_call(ctx, arg))
} }
fn call_ref(&self, AtomCtx(_, id, ctx): AtomCtx, arg: api::ExprTicket) -> Expr { fn call_ref(&self, AtomCtx(_, id, ctx): AtomCtx, arg: api::ExprTicket) -> Expr {
with_atom(id.unwrap(), |a| a.dyn_call_ref(ctx, arg)) with_atom(id.unwrap(), |a| a.dyn_call_ref(ctx, arg))
} }
fn handle_req( fn handle_req(
&self, &self,
AtomCtx(_, id, ctx): AtomCtx, AtomCtx(_, id, ctx): AtomCtx,
key: Sym, key: Sym,
req: &mut dyn Read, req: &mut dyn Read,
rep: &mut dyn Write, rep: &mut dyn Write,
) -> bool { ) -> bool {
with_atom(id.unwrap(), |a| { with_atom(id.unwrap(), |a| {
self.0.dispatch(a.as_any_ref().downcast_ref().unwrap(), ctx, key, req, rep) self.0.dispatch(a.as_any_ref().downcast_ref().unwrap(), ctx, key, req, rep)
}) })
} }
fn command(&self, AtomCtx(_, id, ctx): AtomCtx<'_>) -> OrcRes<Option<Expr>> { fn command(&self, AtomCtx(_, id, ctx): AtomCtx<'_>) -> OrcRes<Option<Expr>> {
with_atom(id.unwrap(), |a| a.remove().dyn_command(ctx)) with_atom(id.unwrap(), |a| a.remove().dyn_command(ctx))
} }
fn drop(&self, AtomCtx(_, id, ctx): AtomCtx) { fn drop(&self, AtomCtx(_, id, ctx): AtomCtx) {
with_atom(id.unwrap(), |a| a.remove().dyn_free(ctx)) with_atom(id.unwrap(), |a| a.remove().dyn_free(ctx))
} }
fn serialize( fn serialize(
&self, &self,
AtomCtx(_, id, ctx): AtomCtx<'_>, AtomCtx(_, id, ctx): AtomCtx<'_>,
write: &mut dyn Write, write: &mut dyn Write,
) -> Option<Vec<api::ExprTicket>> { ) -> Option<Vec<api::ExprTicket>> {
let id = id.unwrap(); let id = id.unwrap();
id.encode(write); id.encode(write);
with_atom(id, |a| a.dyn_serialize(ctx, write)) with_atom(id, |a| a.dyn_serialize(ctx, write))
.map(|v| v.into_iter().map(|t| t.handle.unwrap().tk).collect_vec()) .map(|v| v.into_iter().map(|t| t.handle.unwrap().tk).collect_vec())
} }
fn deserialize(&self, ctx: SysCtx, data: &[u8], refs: &[api::ExprTicket]) -> orchid_api::Atom { fn deserialize(&self, ctx: SysCtx, data: &[u8], refs: &[api::ExprTicket]) -> orchid_api::Atom {
let refs = refs.iter().map(|tk| Expr::new(Arc::new(ExprHandle::from_args(ctx.clone(), *tk)))); let refs = refs.iter().map(|tk| Expr::new(Arc::new(ExprHandle::from_args(ctx.clone(), *tk))));
let obj = T::deserialize(DeserCtxImpl(data, &ctx), T::Refs::from_iter(refs)); let obj = T::deserialize(DeserCtxImpl(data, &ctx), T::Refs::from_iter(refs));
obj._factory().build(ctx) obj._factory().build(ctx)
} }
} }
pub trait DeserializeCtx: Sized { pub trait DeserializeCtx: Sized {
fn read<T: Decode>(&mut self) -> T; fn read<T: Decode>(&mut self) -> T;
fn is_empty(&self) -> bool; fn is_empty(&self) -> bool;
fn assert_empty(self) { assert!(self.is_empty(), "Bytes found after decoding") } fn assert_empty(self) { assert!(self.is_empty(), "Bytes found after decoding") }
fn decode<T: Decode>(mut self) -> T { fn decode<T: Decode>(mut self) -> T {
let t = self.read(); let t = self.read();
self.assert_empty(); self.assert_empty();
t t
} }
fn sys(&self) -> SysCtx; fn sys(&self) -> SysCtx;
} }
struct DeserCtxImpl<'a>(&'a [u8], &'a SysCtx); struct DeserCtxImpl<'a>(&'a [u8], &'a SysCtx);
impl DeserializeCtx for DeserCtxImpl<'_> { impl DeserializeCtx for DeserCtxImpl<'_> {
fn read<T: Decode>(&mut self) -> T { T::decode(&mut self.0) } fn read<T: Decode>(&mut self) -> T { T::decode(&mut self.0) }
fn is_empty(&self) -> bool { self.0.is_empty() } fn is_empty(&self) -> bool { self.0.is_empty() }
fn sys(&self) -> SysCtx { self.1.clone() } fn sys(&self) -> SysCtx { self.1.clone() }
} }
pub trait RefSet { pub trait RefSet {
fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self; fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self;
fn to_vec(self) -> Vec<Expr>; fn to_vec(self) -> Vec<Expr>;
} }
static E_NON_SER: &str = "Never is a stand-in refset for non-serializable atoms"; static E_NON_SER: &str = "Never is a stand-in refset for non-serializable atoms";
impl RefSet for Never { impl RefSet for Never {
fn from_iter<I>(_: I) -> Self { panic!("{E_NON_SER}") } fn from_iter<I>(_: I) -> Self { panic!("{E_NON_SER}") }
fn to_vec(self) -> Vec<Expr> { panic!("{E_NON_SER}") } fn to_vec(self) -> Vec<Expr> { panic!("{E_NON_SER}") }
} }
impl RefSet for () { impl RefSet for () {
fn to_vec(self) -> Vec<Expr> { Vec::new() } fn to_vec(self) -> Vec<Expr> { Vec::new() }
fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self { fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self {
assert_eq!(refs.len(), 0, "Expected no refs") assert_eq!(refs.len(), 0, "Expected no refs")
} }
} }
impl RefSet for Vec<Expr> { impl RefSet for Vec<Expr> {
fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self { refs.collect_vec() } fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self { refs.collect_vec() }
fn to_vec(self) -> Vec<Expr> { self } fn to_vec(self) -> Vec<Expr> { self }
} }
impl<const N: usize> RefSet for [Expr; N] { impl<const N: usize> RefSet for [Expr; N] {
fn to_vec(self) -> Vec<Expr> { self.into_iter().collect_vec() } fn to_vec(self) -> Vec<Expr> { self.into_iter().collect_vec() }
fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self { fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self {
assert_eq!(refs.len(), N, "Wrong number of refs provided"); assert_eq!(refs.len(), N, "Wrong number of refs provided");
refs.collect_vec().try_into().unwrap_or_else(|_: Vec<_>| unreachable!()) refs.collect_vec().try_into().unwrap_or_else(|_: Vec<_>| unreachable!())
} }
} }
/// Atoms that have a [Drop] /// Atoms that have a [Drop]
pub trait OwnedAtom: Atomic<Variant = OwnedVariant> + Send + Sync + Any + Clone + 'static { pub trait OwnedAtom: Atomic<Variant = OwnedVariant> + Send + Sync + Any + Clone + 'static {
/// If serializable, the collection that best stores subexpression references /// If serializable, the collection that best stores subexpression references
/// for this atom. /// for this atom.
/// ///
/// - `()` for no subexppressions, /// - `()` for no subexppressions,
/// - `[Expr; N]` for a static number of subexpressions /// - `[Expr; N]` for a static number of subexpressions
/// - `Vec<Expr>` for a variable number of subexpressions /// - `Vec<Expr>` for a variable number of subexpressions
/// - `Never` if not serializable /// - `Never` if not serializable
/// ///
/// If this isn't `Never`, you must override the default, panicking /// If this isn't `Never`, you must override the default, panicking
/// `serialize` and `deserialize` implementation /// `serialize` and `deserialize` implementation
type Refs: RefSet; type Refs: RefSet;
fn val(&self) -> Cow<'_, Self::Data>; fn val(&self) -> Cow<'_, Self::Data>;
#[allow(unused_variables)] #[allow(unused_variables)]
fn call_ref(&self, arg: ExprHandle) -> Expr { bot([err_not_callable()]) } fn call_ref(&self, arg: ExprHandle) -> Expr { bot([err_not_callable()]) }
fn call(self, arg: ExprHandle) -> Expr { fn call(self, arg: ExprHandle) -> Expr {
let ctx = arg.get_ctx(); let ctx = arg.get_ctx();
let gcl = self.call_ref(arg); let gcl = self.call_ref(arg);
self.free(ctx); self.free(ctx);
gcl gcl
} }
#[allow(unused_variables)] #[allow(unused_variables)]
fn command(self, ctx: SysCtx) -> OrcRes<Option<Expr>> { Err(err_not_command().into()) } fn command(self, ctx: SysCtx) -> OrcRes<Option<Expr>> { Err(err_not_command().into()) }
#[allow(unused_variables)] #[allow(unused_variables)]
fn free(self, ctx: SysCtx) {} fn free(self, ctx: SysCtx) {}
#[allow(unused_variables)] #[allow(unused_variables)]
fn print(&self, ctx: SysCtx) -> String { format!("OwnedAtom({})", type_name::<Self>()) } fn print(&self, ctx: SysCtx) -> String { format!("OwnedAtom({})", type_name::<Self>()) }
#[allow(unused_variables)] #[allow(unused_variables)]
fn serialize(&self, ctx: SysCtx, write: &mut (impl Write + ?Sized)) -> Self::Refs { fn serialize(&self, ctx: SysCtx, write: &mut (impl Write + ?Sized)) -> Self::Refs {
assert!( assert!(
TypeId::of::<Self::Refs>() != TypeId::of::<Never>(), TypeId::of::<Self::Refs>() != TypeId::of::<Never>(),
"The extension scaffold is broken, this function should never be called on Never Refs" "The extension scaffold is broken, this function should never be called on Never Refs"
); );
panic!("Either implement serialize or set Refs to Never for {}", type_name::<Self>()) panic!("Either implement serialize or set Refs to Never for {}", type_name::<Self>())
} }
#[allow(unused_variables)] #[allow(unused_variables)]
fn deserialize(ctx: impl DeserializeCtx, refs: Self::Refs) -> Self { fn deserialize(ctx: impl DeserializeCtx, refs: Self::Refs) -> Self {
assert!( assert!(
TypeId::of::<Self::Refs>() != TypeId::of::<Never>(), TypeId::of::<Self::Refs>() != TypeId::of::<Never>(),
"The extension scaffold is broken, this function should never be called on Never Refs" "The extension scaffold is broken, this function should never be called on Never Refs"
); );
panic!("Either implement deserialize or set Refs to Never for {}", type_name::<Self>()) panic!("Either implement deserialize or set Refs to Never for {}", type_name::<Self>())
} }
} }
pub trait DynOwnedAtom: Send + Sync + 'static { pub trait DynOwnedAtom: Send + Sync + 'static {
fn atom_tid(&self) -> TypeId; fn atom_tid(&self) -> TypeId;
fn as_any_ref(&self) -> &dyn Any; fn as_any_ref(&self) -> &dyn Any;
fn encode(&self, buffer: &mut dyn Write); fn encode(&self, buffer: &mut dyn Write);
fn dyn_call_ref(&self, ctx: SysCtx, arg: api::ExprTicket) -> Expr; fn dyn_call_ref(&self, ctx: SysCtx, arg: api::ExprTicket) -> Expr;
fn dyn_call(self: Box<Self>, ctx: SysCtx, arg: api::ExprTicket) -> Expr; fn dyn_call(self: Box<Self>, ctx: SysCtx, arg: api::ExprTicket) -> Expr;
fn dyn_command(self: Box<Self>, ctx: SysCtx) -> OrcRes<Option<Expr>>; fn dyn_command(self: Box<Self>, ctx: SysCtx) -> OrcRes<Option<Expr>>;
fn dyn_free(self: Box<Self>, ctx: SysCtx); fn dyn_free(self: Box<Self>, ctx: SysCtx);
fn dyn_print(&self, ctx: SysCtx) -> String; fn dyn_print(&self, ctx: SysCtx) -> String;
fn dyn_serialize(&self, ctx: SysCtx, sink: &mut dyn Write) -> Option<Vec<Expr>>; fn dyn_serialize(&self, ctx: SysCtx, sink: &mut dyn Write) -> Option<Vec<Expr>>;
} }
impl<T: OwnedAtom> DynOwnedAtom for T { impl<T: OwnedAtom> DynOwnedAtom for T {
fn atom_tid(&self) -> TypeId { TypeId::of::<T>() } fn atom_tid(&self) -> TypeId { TypeId::of::<T>() }
fn as_any_ref(&self) -> &dyn Any { self } fn as_any_ref(&self) -> &dyn Any { self }
fn encode(&self, buffer: &mut dyn Write) { self.val().as_ref().encode(buffer) } fn encode(&self, buffer: &mut dyn Write) { self.val().as_ref().encode(buffer) }
fn dyn_call_ref(&self, ctx: SysCtx, arg: api::ExprTicket) -> Expr { fn dyn_call_ref(&self, ctx: SysCtx, arg: api::ExprTicket) -> Expr {
self.call_ref(ExprHandle::from_args(ctx, arg)) self.call_ref(ExprHandle::from_args(ctx, arg))
} }
fn dyn_call(self: Box<Self>, ctx: SysCtx, arg: api::ExprTicket) -> Expr { fn dyn_call(self: Box<Self>, ctx: SysCtx, arg: api::ExprTicket) -> Expr {
self.call(ExprHandle::from_args(ctx, arg)) self.call(ExprHandle::from_args(ctx, arg))
} }
fn dyn_command(self: Box<Self>, ctx: SysCtx) -> OrcRes<Option<Expr>> { self.command(ctx) } fn dyn_command(self: Box<Self>, ctx: SysCtx) -> OrcRes<Option<Expr>> { self.command(ctx) }
fn dyn_free(self: Box<Self>, ctx: SysCtx) { self.free(ctx) } fn dyn_free(self: Box<Self>, ctx: SysCtx) { self.free(ctx) }
fn dyn_print(&self, ctx: SysCtx) -> String { self.print(ctx) } fn dyn_print(&self, ctx: SysCtx) -> String { self.print(ctx) }
fn dyn_serialize(&self, ctx: SysCtx, sink: &mut dyn Write) -> Option<Vec<Expr>> { fn dyn_serialize(&self, ctx: SysCtx, sink: &mut dyn Write) -> Option<Vec<Expr>> {
(TypeId::of::<Never>() != TypeId::of::<<Self as OwnedAtom>::Refs>()) (TypeId::of::<Never>() != TypeId::of::<<Self as OwnedAtom>::Refs>())
.then(|| self.serialize(ctx, sink).to_vec()) .then(|| self.serialize(ctx, sink).to_vec())
} }
} }
pub(crate) static OBJ_STORE: IdStore<Box<dyn DynOwnedAtom>> = IdStore::new(); pub(crate) static OBJ_STORE: IdStore<Box<dyn DynOwnedAtom>> = IdStore::new();

View File

@@ -7,8 +7,8 @@ use orchid_base::name::Sym;
use crate::api; use crate::api;
use crate::atom::{ use crate::atom::{
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet, AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
err_not_callable, err_not_command, get_info, err_not_callable, err_not_command, get_info,
}; };
use crate::expr::{Expr, ExprHandle, bot}; use crate::expr::{Expr, ExprHandle, bot};
use crate::system::SysCtx; use crate::system::SysCtx;
@@ -16,65 +16,65 @@ use crate::system::SysCtx;
pub struct ThinVariant; pub struct ThinVariant;
impl AtomicVariant for ThinVariant {} impl AtomicVariant for ThinVariant {}
impl<A: ThinAtom + Atomic<Variant = ThinVariant>> AtomicFeaturesImpl<ThinVariant> for A { impl<A: ThinAtom + Atomic<Variant = ThinVariant>> AtomicFeaturesImpl<ThinVariant> for A {
fn _factory(self) -> AtomFactory { fn _factory(self) -> AtomFactory {
AtomFactory::new(move |ctx| { AtomFactory::new(move |ctx| {
let (id, _) = get_info::<A>(ctx.cted.inst().card()); let (id, _) = get_info::<A>(ctx.cted.inst().card());
let mut buf = enc_vec(&id); let mut buf = enc_vec(&id);
self.encode(&mut buf); self.encode(&mut buf);
api::Atom { drop: None, data: buf, owner: ctx.id } api::Atom { drop: None, data: buf, owner: ctx.id }
}) })
} }
fn _info() -> Self::_Info { ThinAtomDynfo(Self::reg_reqs()) } fn _info() -> Self::_Info { ThinAtomDynfo(Self::reg_reqs()) }
type _Info = ThinAtomDynfo<Self>; type _Info = ThinAtomDynfo<Self>;
} }
pub struct ThinAtomDynfo<T: ThinAtom>(MethodSet<T>); pub struct ThinAtomDynfo<T: ThinAtom>(MethodSet<T>);
impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> { impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
fn print(&self, AtomCtx(buf, _, ctx): AtomCtx<'_>) -> String { fn print(&self, AtomCtx(buf, _, ctx): AtomCtx<'_>) -> String {
T::decode(&mut &buf[..]).print(ctx) T::decode(&mut &buf[..]).print(ctx)
} }
fn tid(&self) -> TypeId { TypeId::of::<T>() } fn tid(&self) -> TypeId { TypeId::of::<T>() }
fn name(&self) -> &'static str { type_name::<T>() } fn name(&self) -> &'static str { type_name::<T>() }
fn decode(&self, AtomCtx(buf, ..): AtomCtx) -> Box<dyn Any> { Box::new(T::decode(&mut &buf[..])) } fn decode(&self, AtomCtx(buf, ..): AtomCtx) -> Box<dyn Any> { Box::new(T::decode(&mut &buf[..])) }
fn call(&self, AtomCtx(buf, _, ctx): AtomCtx, arg: api::ExprTicket) -> Expr { fn call(&self, AtomCtx(buf, _, ctx): AtomCtx, arg: api::ExprTicket) -> Expr {
T::decode(&mut &buf[..]).call(ExprHandle::from_args(ctx, arg)) T::decode(&mut &buf[..]).call(ExprHandle::from_args(ctx, arg))
} }
fn call_ref(&self, AtomCtx(buf, _, ctx): AtomCtx, arg: api::ExprTicket) -> Expr { fn call_ref(&self, AtomCtx(buf, _, ctx): AtomCtx, arg: api::ExprTicket) -> Expr {
T::decode(&mut &buf[..]).call(ExprHandle::from_args(ctx, arg)) T::decode(&mut &buf[..]).call(ExprHandle::from_args(ctx, arg))
} }
fn handle_req( fn handle_req(
&self, &self,
AtomCtx(buf, _, sys): AtomCtx, AtomCtx(buf, _, sys): AtomCtx,
key: Sym, key: Sym,
req: &mut dyn std::io::Read, req: &mut dyn std::io::Read,
rep: &mut dyn Write, rep: &mut dyn Write,
) -> bool { ) -> bool {
self.0.dispatch(&T::decode(&mut &buf[..]), sys, key, req, rep) self.0.dispatch(&T::decode(&mut &buf[..]), sys, key, req, rep)
} }
fn command(&self, AtomCtx(buf, _, ctx): AtomCtx<'_>) -> OrcRes<Option<Expr>> { fn command(&self, AtomCtx(buf, _, ctx): AtomCtx<'_>) -> OrcRes<Option<Expr>> {
T::decode(&mut &buf[..]).command(ctx) T::decode(&mut &buf[..]).command(ctx)
} }
fn serialize(&self, actx: AtomCtx<'_>, write: &mut dyn Write) -> Option<Vec<api::ExprTicket>> { fn serialize(&self, actx: AtomCtx<'_>, write: &mut dyn Write) -> Option<Vec<api::ExprTicket>> {
T::decode(&mut &actx.0[..]).encode(write); T::decode(&mut &actx.0[..]).encode(write);
Some(Vec::new()) Some(Vec::new())
} }
fn deserialize(&self, ctx: SysCtx, data: &[u8], refs: &[api::ExprTicket]) -> api::Atom { fn deserialize(&self, ctx: SysCtx, data: &[u8], refs: &[api::ExprTicket]) -> api::Atom {
assert!(refs.is_empty(), "Refs found when deserializing thin atom"); assert!(refs.is_empty(), "Refs found when deserializing thin atom");
T::decode(&mut &data[..])._factory().build(ctx) T::decode(&mut &data[..])._factory().build(ctx)
} }
fn drop(&self, AtomCtx(buf, _, ctx): AtomCtx) { fn drop(&self, AtomCtx(buf, _, ctx): AtomCtx) {
let string_self = T::decode(&mut &buf[..]).print(ctx.clone()); let string_self = T::decode(&mut &buf[..]).print(ctx.clone());
writeln!(ctx.logger, "Received drop signal for non-drop atom {string_self:?}"); writeln!(ctx.logger, "Received drop signal for non-drop atom {string_self:?}");
} }
} }
pub trait ThinAtom: pub trait ThinAtom:
AtomCard<Data = Self> + Atomic<Variant = ThinVariant> + Coding + Send + Sync + 'static AtomCard<Data = Self> + Atomic<Variant = ThinVariant> + Coding + Send + Sync + 'static
{ {
#[allow(unused_variables)] #[allow(unused_variables)]
fn call(&self, arg: ExprHandle) -> Expr { bot([err_not_callable()]) } fn call(&self, arg: ExprHandle) -> Expr { bot([err_not_callable()]) }
#[allow(unused_variables)] #[allow(unused_variables)]
fn command(&self, ctx: SysCtx) -> OrcRes<Option<Expr>> { Err(err_not_command().into()) } fn command(&self, ctx: SysCtx) -> OrcRes<Option<Expr>> { Err(err_not_command().into()) }
#[allow(unused_variables)] #[allow(unused_variables)]
fn print(&self, ctx: SysCtx) -> String { format!("ThinAtom({})", type_name::<Self>()) } fn print(&self, ctx: SysCtx) -> String { format!("ThinAtom({})", type_name::<Self>()) }
} }

View File

@@ -1,58 +1,58 @@
use orchid_base::error::{mk_err, OrcErr, OrcRes}; use orchid_base::error::{OrcErr, OrcRes, mk_err};
use orchid_base::intern; use orchid_base::intern;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use crate::atom::{AtomicFeatures, ToAtom, TypAtom}; use crate::atom::{AtomicFeatures, ToAtom, TypAtom};
use crate::expr::{atom, bot, Expr}; use crate::expr::{Expr, atom, bot};
use crate::system::downcast_atom; use crate::system::downcast_atom;
pub trait TryFromExpr: Sized { pub trait TryFromExpr: Sized {
fn try_from_expr(expr: Expr) -> OrcRes<Self>; fn try_from_expr(expr: Expr) -> OrcRes<Self>;
} }
impl TryFromExpr for Expr { impl TryFromExpr for Expr {
fn try_from_expr(expr: Expr) -> OrcRes<Self> { Ok(expr) } fn try_from_expr(expr: Expr) -> OrcRes<Self> { Ok(expr) }
} }
impl<T: TryFromExpr, U: TryFromExpr> TryFromExpr for (T, U) { impl<T: TryFromExpr, U: TryFromExpr> TryFromExpr for (T, U) {
fn try_from_expr(expr: Expr) -> OrcRes<Self> { fn try_from_expr(expr: Expr) -> OrcRes<Self> {
Ok((T::try_from_expr(expr.clone())?, U::try_from_expr(expr)?)) Ok((T::try_from_expr(expr.clone())?, U::try_from_expr(expr)?))
} }
} }
fn err_not_atom(pos: Pos) -> OrcErr { fn err_not_atom(pos: Pos) -> OrcErr {
mk_err(intern!(str: "Expected an atom"), "This expression is not an atom", [pos.into()]) mk_err(intern!(str: "Expected an atom"), "This expression is not an atom", [pos.into()])
} }
fn err_type(pos: Pos) -> OrcErr { fn err_type(pos: Pos) -> OrcErr {
mk_err(intern!(str: "Type error"), "The atom is a different type than expected", [pos.into()]) mk_err(intern!(str: "Type error"), "The atom is a different type than expected", [pos.into()])
} }
impl<A: AtomicFeatures> TryFromExpr for TypAtom<'_, A> { impl<A: AtomicFeatures> TryFromExpr for TypAtom<'_, A> {
fn try_from_expr(expr: Expr) -> OrcRes<Self> { fn try_from_expr(expr: Expr) -> OrcRes<Self> {
(expr.foreign_atom()) (expr.foreign_atom())
.map_err(|ex| err_not_atom(ex.pos.clone()).into()) .map_err(|ex| err_not_atom(ex.pos.clone()).into())
.and_then(|f| downcast_atom(f).map_err(|f| err_type(f.pos).into())) .and_then(|f| downcast_atom(f).map_err(|f| err_type(f.pos).into()))
} }
} }
pub trait ToExpr { pub trait ToExpr {
fn to_expr(self) -> Expr; fn to_expr(self) -> Expr;
} }
impl ToExpr for Expr { impl ToExpr for Expr {
fn to_expr(self) -> Expr { self } fn to_expr(self) -> Expr { self }
} }
impl<T: ToExpr> ToExpr for OrcRes<T> { impl<T: ToExpr> ToExpr for OrcRes<T> {
fn to_expr(self) -> Expr { fn to_expr(self) -> Expr {
match self { match self {
Err(e) => bot(e), Err(e) => bot(e),
Ok(t) => t.to_expr(), Ok(t) => t.to_expr(),
} }
} }
} }
impl<A: ToAtom> ToExpr for A { impl<A: ToAtom> ToExpr for A {
fn to_expr(self) -> Expr { atom(self) } fn to_expr(self) -> Expr { atom(self) }
} }

View File

@@ -6,10 +6,10 @@ use std::{mem, process, thread};
use hashbrown::HashMap; use hashbrown::HashMap;
use itertools::Itertools; use itertools::Itertools;
use orchid_api_traits::{enc_vec, Decode, Encode}; use orchid_api_traits::{Decode, Encode, enc_vec};
use orchid_base::char_filter::{char_filter_match, char_filter_union, mk_char_filter}; use orchid_base::char_filter::{char_filter_match, char_filter_union, mk_char_filter};
use orchid_base::clone; use orchid_base::clone;
use orchid_base::interner::{init_replica, sweep_replica, Tok}; use orchid_base::interner::{Tok, init_replica, sweep_replica};
use orchid_base::logging::Logger; use orchid_base::logging::Logger;
use orchid_base::macros::{mtreev_from_api, mtreev_to_api}; use orchid_base::macros::{mtreev_from_api, mtreev_to_api};
use orchid_base::name::{PathSlice, Sym}; use orchid_base::name::{PathSlice, Sym};
@@ -22,283 +22,283 @@ use crate::api;
use crate::atom::{AtomCtx, AtomDynfo}; use crate::atom::{AtomCtx, AtomDynfo};
use crate::atom_owned::OBJ_STORE; use crate::atom_owned::OBJ_STORE;
use crate::fs::VirtFS; use crate::fs::VirtFS;
use crate::lexer::{err_cascade, err_not_applicable, LexContext}; use crate::lexer::{LexContext, err_cascade, err_not_applicable};
use crate::macros::{apply_rule, RuleCtx}; use crate::macros::{RuleCtx, apply_rule};
use crate::msg::{recv_parent_msg, send_parent_msg}; use crate::msg::{recv_parent_msg, send_parent_msg};
use crate::system::{atom_by_idx, SysCtx}; use crate::system::{SysCtx, atom_by_idx};
use crate::system_ctor::{CtedObj, DynSystemCtor}; use crate::system_ctor::{CtedObj, DynSystemCtor};
use crate::tree::{do_extra, GenTok, GenTokTree, LazyMemberFactory, TIACtxImpl}; use crate::tree::{GenTok, GenTokTree, LazyMemberFactory, TIACtxImpl, do_extra};
pub type ExtReq = RequestHandle<api::ExtMsgSet>; pub type ExtReq = RequestHandle<api::ExtMsgSet>;
pub type ExtReqNot = ReqNot<api::ExtMsgSet>; pub type ExtReqNot = ReqNot<api::ExtMsgSet>;
pub struct ExtensionData { pub struct ExtensionData {
pub name: &'static str, pub name: &'static str,
pub systems: &'static [&'static dyn DynSystemCtor], pub systems: &'static [&'static dyn DynSystemCtor],
} }
impl ExtensionData { impl ExtensionData {
pub fn new(name: &'static str, systems: &'static [&'static dyn DynSystemCtor]) -> Self { pub fn new(name: &'static str, systems: &'static [&'static dyn DynSystemCtor]) -> Self {
Self { name, systems } Self { name, systems }
} }
pub fn main(self) { extension_main(self) } pub fn main(self) { extension_main(self) }
} }
pub enum MemberRecord { pub enum MemberRecord {
Gen(Sym, LazyMemberFactory), Gen(Sym, LazyMemberFactory),
Res, Res,
} }
pub struct SystemRecord { pub struct SystemRecord {
cted: CtedObj, cted: CtedObj,
vfses: HashMap<api::VfsId, &'static dyn VirtFS>, vfses: HashMap<api::VfsId, &'static dyn VirtFS>,
declfs: api::EagerVfs, declfs: api::EagerVfs,
lazy_members: HashMap<api::TreeId, MemberRecord>, lazy_members: HashMap<api::TreeId, MemberRecord>,
} }
pub fn with_atom_record<T>( pub fn with_atom_record<T>(
get_sys_ctx: &impl Fn(api::SysId, ReqNot<api::ExtMsgSet>) -> SysCtx, get_sys_ctx: &impl Fn(api::SysId, ReqNot<api::ExtMsgSet>) -> SysCtx,
reqnot: ReqNot<api::ExtMsgSet>, reqnot: ReqNot<api::ExtMsgSet>,
atom: &api::Atom, atom: &api::Atom,
cb: impl FnOnce(Box<dyn AtomDynfo>, SysCtx, api::AtomId, &[u8]) -> T, cb: impl FnOnce(Box<dyn AtomDynfo>, SysCtx, api::AtomId, &[u8]) -> T,
) -> T { ) -> T {
let mut data = &atom.data[..]; let mut data = &atom.data[..];
let ctx = get_sys_ctx(atom.owner, reqnot); let ctx = get_sys_ctx(atom.owner, reqnot);
let inst = ctx.cted.inst(); let inst = ctx.cted.inst();
let id = api::AtomId::decode(&mut data); let id = api::AtomId::decode(&mut data);
let atom_record = atom_by_idx(inst.card(), id).expect("Atom ID reserved"); let atom_record = atom_by_idx(inst.card(), id).expect("Atom ID reserved");
cb(atom_record, ctx, id, data) cb(atom_record, ctx, id, data)
} }
pub fn extension_main(data: ExtensionData) { pub fn extension_main(data: ExtensionData) {
if thread::Builder::new() if thread::Builder::new()
.name(format!("ext-main:{}", data.name)) .name(format!("ext-main:{}", data.name))
.spawn(|| extension_main_logic(data)) .spawn(|| extension_main_logic(data))
.unwrap() .unwrap()
.join() .join()
.is_err() .is_err()
{ {
process::exit(-1) process::exit(-1)
} }
} }
fn extension_main_logic(data: ExtensionData) { fn extension_main_logic(data: ExtensionData) {
let api::HostHeader { log_strategy } = api::HostHeader::decode(&mut std::io::stdin().lock()); let api::HostHeader { log_strategy } = api::HostHeader::decode(&mut std::io::stdin().lock());
let mut buf = Vec::new(); let mut buf = Vec::new();
let decls = (data.systems.iter().enumerate()) let decls = (data.systems.iter().enumerate())
.map(|(id, sys)| (u16::try_from(id).expect("more than u16max system ctors"), sys)) .map(|(id, sys)| (u16::try_from(id).expect("more than u16max system ctors"), sys))
.map(|(id, sys)| sys.decl(api::SysDeclId(NonZero::new(id + 1).unwrap()))) .map(|(id, sys)| sys.decl(api::SysDeclId(NonZero::new(id + 1).unwrap())))
.collect_vec(); .collect_vec();
let systems = Arc::new(Mutex::new(HashMap::<api::SysId, SystemRecord>::new())); let systems = Arc::new(Mutex::new(HashMap::<api::SysId, SystemRecord>::new()));
api::ExtensionHeader { name: data.name.to_string(), systems: decls.clone() }.encode(&mut buf); api::ExtensionHeader { name: data.name.to_string(), systems: decls.clone() }.encode(&mut buf);
std::io::stdout().write_all(&buf).unwrap(); std::io::stdout().write_all(&buf).unwrap();
std::io::stdout().flush().unwrap(); std::io::stdout().flush().unwrap();
let exiting = Arc::new(AtomicBool::new(false)); let exiting = Arc::new(AtomicBool::new(false));
let logger = Arc::new(Logger::new(log_strategy)); let logger = Arc::new(Logger::new(log_strategy));
let mk_ctx = clone!(logger, systems; move |id: api::SysId, reqnot: ReqNot<api::ExtMsgSet>| { let mk_ctx = clone!(logger, systems; move |id: api::SysId, reqnot: ReqNot<api::ExtMsgSet>| {
let cted = systems.lock().unwrap()[&id].cted.clone(); let cted = systems.lock().unwrap()[&id].cted.clone();
SysCtx { id, cted, logger: logger.clone(), reqnot } SysCtx { id, cted, logger: logger.clone(), reqnot }
}); });
let rn = ReqNot::<api::ExtMsgSet>::new( let rn = ReqNot::<api::ExtMsgSet>::new(
clone!(logger; move |a, _| { clone!(logger; move |a, _| {
logger.log_buf("Upsending", a); logger.log_buf("Upsending", a);
send_parent_msg(a).unwrap() send_parent_msg(a).unwrap()
}), }),
clone!(systems, exiting, mk_ctx; move |n, reqnot| match n { clone!(systems, exiting, mk_ctx; move |n, reqnot| match n {
api::HostExtNotif::Exit => exiting.store(true, Ordering::Relaxed), api::HostExtNotif::Exit => exiting.store(true, Ordering::Relaxed),
api::HostExtNotif::SystemDrop(api::SystemDrop(sys_id)) => api::HostExtNotif::SystemDrop(api::SystemDrop(sys_id)) =>
mem::drop(systems.lock().unwrap().remove(&sys_id)), mem::drop(systems.lock().unwrap().remove(&sys_id)),
api::HostExtNotif::AtomDrop(api::AtomDrop(sys_id, atom)) => api::HostExtNotif::AtomDrop(api::AtomDrop(sys_id, atom)) =>
OBJ_STORE.get(atom.0).unwrap().remove().dyn_free(mk_ctx(sys_id, reqnot)), OBJ_STORE.get(atom.0).unwrap().remove().dyn_free(mk_ctx(sys_id, reqnot)),
}), }),
clone!(systems, logger; move |hand, req| match req { clone!(systems, logger; move |hand, req| match req {
api::HostExtReq::Ping(ping@api::Ping) => hand.handle(&ping, &()), api::HostExtReq::Ping(ping@api::Ping) => hand.handle(&ping, &()),
api::HostExtReq::Sweep(sweep@api::Sweep) => hand.handle(&sweep, &sweep_replica()), api::HostExtReq::Sweep(sweep@api::Sweep) => hand.handle(&sweep, &sweep_replica()),
api::HostExtReq::SysReq(api::SysReq::NewSystem(new_sys)) => { api::HostExtReq::SysReq(api::SysReq::NewSystem(new_sys)) => {
let i = decls.iter().enumerate().find(|(_, s)| s.id == new_sys.system).unwrap().0; let i = decls.iter().enumerate().find(|(_, s)| s.id == new_sys.system).unwrap().0;
let cted = data.systems[i].new_system(&new_sys); let cted = data.systems[i].new_system(&new_sys);
let mut vfses = HashMap::new(); let mut vfses = HashMap::new();
let lex_filter = cted.inst().dyn_lexers().iter().fold(api::CharFilter(vec![]), |cf, lx| { let lex_filter = cted.inst().dyn_lexers().iter().fold(api::CharFilter(vec![]), |cf, lx| {
let lxcf = mk_char_filter(lx.char_filter().iter().cloned()); let lxcf = mk_char_filter(lx.char_filter().iter().cloned());
char_filter_union(&cf, &lxcf) char_filter_union(&cf, &lxcf)
}); });
let mut lazy_mems = HashMap::new(); let mut lazy_mems = HashMap::new();
let ctx = SysCtx{ let ctx = SysCtx{
cted: cted.clone(), cted: cted.clone(),
id: new_sys.id, id: new_sys.id,
logger: logger.clone(), logger: logger.clone(),
reqnot: hand.reqnot() reqnot: hand.reqnot()
}; };
let mut tia_ctx = TIACtxImpl{ let mut tia_ctx = TIACtxImpl{
lazy: &mut lazy_mems, lazy: &mut lazy_mems,
sys: ctx.clone(), sys: ctx.clone(),
basepath: &[], basepath: &[],
path: Substack::Bottom, path: Substack::Bottom,
}; };
let const_root = (cted.inst().dyn_env().into_iter()) let const_root = (cted.inst().dyn_env().into_iter())
.map(|(k, v)| (k.to_api(), v.into_api(&mut tia_ctx))) .map(|(k, v)| (k.to_api(), v.into_api(&mut tia_ctx)))
.collect(); .collect();
systems.lock().unwrap().insert(new_sys.id, SystemRecord { systems.lock().unwrap().insert(new_sys.id, SystemRecord {
declfs: cted.inst().dyn_vfs().to_api_rec(&mut vfses), declfs: cted.inst().dyn_vfs().to_api_rec(&mut vfses),
vfses, vfses,
cted, cted,
lazy_members: lazy_mems lazy_members: lazy_mems
}); });
hand.handle(&new_sys, &api::SystemInst { hand.handle(&new_sys, &api::SystemInst {
lex_filter, lex_filter,
const_root, const_root,
line_types: vec![] line_types: vec![]
}) })
} }
api::HostExtReq::GetMember(get_tree@api::GetMember(sys_id, tree_id)) => { api::HostExtReq::GetMember(get_tree@api::GetMember(sys_id, tree_id)) => {
let mut systems_g = systems.lock().unwrap(); let mut systems_g = systems.lock().unwrap();
let sys = systems_g.get_mut(&sys_id).expect("System not found"); let sys = systems_g.get_mut(&sys_id).expect("System not found");
let lazy = &mut sys.lazy_members; let lazy = &mut sys.lazy_members;
let (path, cb) = match lazy.insert(tree_id, MemberRecord::Res) { let (path, cb) = match lazy.insert(tree_id, MemberRecord::Res) {
None => panic!("Tree for ID not found"), None => panic!("Tree for ID not found"),
Some(MemberRecord::Res) => panic!("This tree has already been transmitted"), Some(MemberRecord::Res) => panic!("This tree has already been transmitted"),
Some(MemberRecord::Gen(path, cb)) => (path, cb), Some(MemberRecord::Gen(path, cb)) => (path, cb),
}; };
let tree = cb.build(path.clone()); let tree = cb.build(path.clone());
hand.handle(&get_tree, &tree.into_api(&mut TIACtxImpl{ hand.handle(&get_tree, &tree.into_api(&mut TIACtxImpl{
sys: SysCtx::new(sys_id, &sys.cted, &logger, hand.reqnot()), sys: SysCtx::new(sys_id, &sys.cted, &logger, hand.reqnot()),
path: Substack::Bottom, path: Substack::Bottom,
basepath: &path, basepath: &path,
lazy, lazy,
})) }))
} }
api::HostExtReq::VfsReq(api::VfsReq::GetVfs(get_vfs@api::GetVfs(sys_id))) => { api::HostExtReq::VfsReq(api::VfsReq::GetVfs(get_vfs@api::GetVfs(sys_id))) => {
let systems_g = systems.lock().unwrap(); let systems_g = systems.lock().unwrap();
hand.handle(&get_vfs, &systems_g[&sys_id].declfs) hand.handle(&get_vfs, &systems_g[&sys_id].declfs)
} }
api::HostExtReq::SysReq(api::SysReq::SysFwded(fwd)) => { api::HostExtReq::SysReq(api::SysReq::SysFwded(fwd)) => {
let api::SysFwded(sys_id, payload) = fwd; let api::SysFwded(sys_id, payload) = fwd;
let ctx = mk_ctx(sys_id, hand.reqnot()); let ctx = mk_ctx(sys_id, hand.reqnot());
let sys = ctx.cted.inst(); let sys = ctx.cted.inst();
sys.dyn_request(hand, payload) sys.dyn_request(hand, payload)
} }
api::HostExtReq::VfsReq(api::VfsReq::VfsRead(vfs_read)) => { api::HostExtReq::VfsReq(api::VfsReq::VfsRead(vfs_read)) => {
let api::VfsRead(sys_id, vfs_id, path) = &vfs_read; let api::VfsRead(sys_id, vfs_id, path) = &vfs_read;
let systems_g = systems.lock().unwrap(); let systems_g = systems.lock().unwrap();
let path = path.iter().map(|t| Tok::from_api(*t)).collect_vec(); let path = path.iter().map(|t| Tok::from_api(*t)).collect_vec();
hand.handle(&vfs_read, &systems_g[sys_id].vfses[vfs_id].load(PathSlice::new(&path))) hand.handle(&vfs_read, &systems_g[sys_id].vfses[vfs_id].load(PathSlice::new(&path)))
} }
api::HostExtReq::LexExpr(lex @ api::LexExpr{ sys, text, pos, id }) => { api::HostExtReq::LexExpr(lex @ api::LexExpr{ sys, text, pos, id }) => {
let systems_g = systems.lock().unwrap(); let systems_g = systems.lock().unwrap();
let lexers = systems_g[&sys].cted.inst().dyn_lexers(); let lexers = systems_g[&sys].cted.inst().dyn_lexers();
mem::drop(systems_g); mem::drop(systems_g);
let text = Tok::from_api(text); let text = Tok::from_api(text);
let ctx = LexContext { sys, id, pos, reqnot: hand.reqnot(), text: &text }; let ctx = LexContext { sys, id, pos, reqnot: hand.reqnot(), text: &text };
let trigger_char = text.chars().nth(pos as usize).unwrap(); let trigger_char = text.chars().nth(pos as usize).unwrap();
for lx in lexers.iter().filter(|l| char_filter_match(l.char_filter(), trigger_char)) { for lx in lexers.iter().filter(|l| char_filter_match(l.char_filter(), trigger_char)) {
match lx.lex(&text[pos as usize..], &ctx) { match lx.lex(&text[pos as usize..], &ctx) {
Err(e) if e.any(|e| *e == err_not_applicable()) => continue, Err(e) if e.any(|e| *e == err_not_applicable()) => continue,
Err(e) => { Err(e) => {
let eopt = e.keep_only(|e| *e != err_cascade()).map(|e| Err(e.to_api())); let eopt = e.keep_only(|e| *e != err_cascade()).map(|e| Err(e.to_api()));
return hand.handle(&lex, &eopt) return hand.handle(&lex, &eopt)
}, },
Ok((s, expr)) => { Ok((s, expr)) => {
let ctx = mk_ctx(sys, hand.reqnot()); let ctx = mk_ctx(sys, hand.reqnot());
let expr = expr.to_api(&mut |f, r| do_extra(f, r, ctx.clone())); let expr = expr.to_api(&mut |f, r| do_extra(f, r, ctx.clone()));
let pos = (text.len() - s.len()) as u32; let pos = (text.len() - s.len()) as u32;
return hand.handle(&lex, &Some(Ok(api::LexedExpr{ pos, expr }))) return hand.handle(&lex, &Some(Ok(api::LexedExpr{ pos, expr })))
} }
} }
} }
writeln!(logger, "Got notified about n/a character '{trigger_char}'"); writeln!(logger, "Got notified about n/a character '{trigger_char}'");
hand.handle(&lex, &None) hand.handle(&lex, &None)
}, },
api::HostExtReq::ParseLine(pline) => { api::HostExtReq::ParseLine(pline) => {
let api::ParseLine{ exported, comments, sys, line } = &pline; let api::ParseLine{ exported, comments, sys, line } = &pline;
let mut ctx = mk_ctx(*sys, hand.reqnot()); let mut ctx = mk_ctx(*sys, hand.reqnot());
let parsers = ctx.cted.inst().dyn_parsers(); let parsers = ctx.cted.inst().dyn_parsers();
let comments = comments.iter().map(Comment::from_api).collect(); let comments = comments.iter().map(Comment::from_api).collect();
let line: Vec<GenTokTree> = ttv_from_api(line, &mut ctx); let line: Vec<GenTokTree> = ttv_from_api(line, &mut ctx);
let snip = Snippet::new(line.first().expect("Empty line"), &line); let snip = Snippet::new(line.first().expect("Empty line"), &line);
let (head, tail) = snip.pop_front().unwrap(); let (head, tail) = snip.pop_front().unwrap();
let name = if let GenTok::Name(n) = &head.tok { n } else { panic!("No line head") }; let name = if let GenTok::Name(n) = &head.tok { n } else { panic!("No line head") };
let parser = parsers.iter().find(|p| p.line_head() == **name).expect("No parser candidate"); let parser = parsers.iter().find(|p| p.line_head() == **name).expect("No parser candidate");
let o_line = match parser.parse(*exported, comments, tail) { let o_line = match parser.parse(*exported, comments, tail) {
Err(e) => Err(e.to_api()), Err(e) => Err(e.to_api()),
Ok(t) => Ok(ttv_to_api(t, &mut |f, range| { Ok(t) => Ok(ttv_to_api(t, &mut |f, range| {
api::TokenTree{ range, token: api::Token::Atom(f.clone().build(ctx.clone())) } api::TokenTree{ range, token: api::Token::Atom(f.clone().build(ctx.clone())) }
})), })),
}; };
hand.handle(&pline, &o_line) hand.handle(&pline, &o_line)
} }
api::HostExtReq::AtomReq(atom_req) => { api::HostExtReq::AtomReq(atom_req) => {
let atom = atom_req.get_atom(); let atom = atom_req.get_atom();
with_atom_record(&mk_ctx, hand.reqnot(), atom, |nfo, ctx, id, buf| { with_atom_record(&mk_ctx, hand.reqnot(), atom, |nfo, ctx, id, buf| {
let actx = AtomCtx(buf, atom.drop, ctx.clone()); let actx = AtomCtx(buf, atom.drop, ctx.clone());
match &atom_req { match &atom_req {
api::AtomReq::SerializeAtom(ser) => { api::AtomReq::SerializeAtom(ser) => {
let mut buf = enc_vec(&id); let mut buf = enc_vec(&id);
let refs_opt = nfo.serialize(actx, &mut buf); let refs_opt = nfo.serialize(actx, &mut buf);
hand.handle(ser, &refs_opt.map(|refs| (buf, refs))) hand.handle(ser, &refs_opt.map(|refs| (buf, refs)))
} }
api::AtomReq::AtomPrint(print@api::AtomPrint(_)) => api::AtomReq::AtomPrint(print@api::AtomPrint(_)) =>
hand.handle(print, &nfo.print(actx)), hand.handle(print, &nfo.print(actx)),
api::AtomReq::Fwded(fwded) => { api::AtomReq::Fwded(fwded) => {
let api::Fwded(_, key, payload) = &fwded; let api::Fwded(_, key, payload) = &fwded;
let mut reply = Vec::new(); let mut reply = Vec::new();
let some = nfo.handle_req(actx, Sym::from_api(*key), &mut &payload[..], &mut reply); let some = nfo.handle_req(actx, Sym::from_api(*key), &mut &payload[..], &mut reply);
hand.handle(fwded, &some.then_some(reply)) hand.handle(fwded, &some.then_some(reply))
} }
api::AtomReq::CallRef(call@api::CallRef(_, arg)) => { api::AtomReq::CallRef(call@api::CallRef(_, arg)) => {
let ret = nfo.call_ref(actx, *arg); let ret = nfo.call_ref(actx, *arg);
hand.handle(call, &ret.api_return(ctx.clone(), &mut |h| hand.defer_drop(h))) hand.handle(call, &ret.api_return(ctx.clone(), &mut |h| hand.defer_drop(h)))
}, },
api::AtomReq::FinalCall(call@api::FinalCall(_, arg)) => { api::AtomReq::FinalCall(call@api::FinalCall(_, arg)) => {
let ret = nfo.call(actx, *arg); let ret = nfo.call(actx, *arg);
hand.handle(call, &ret.api_return(ctx.clone(), &mut |h| hand.defer_drop(h))) hand.handle(call, &ret.api_return(ctx.clone(), &mut |h| hand.defer_drop(h)))
} }
api::AtomReq::Command(cmd@api::Command(_)) => { api::AtomReq::Command(cmd@api::Command(_)) => {
hand.handle(cmd, &match nfo.command(actx) { hand.handle(cmd, &match nfo.command(actx) {
Err(e) => Err(e.to_api()), Err(e) => Err(e.to_api()),
Ok(opt) => Ok(match opt { Ok(opt) => Ok(match opt {
None => api::NextStep::Halt, None => api::NextStep::Halt,
Some(cont) => api::NextStep::Continue( Some(cont) => api::NextStep::Continue(
cont.api_return(ctx.clone(), &mut |h| hand.defer_drop(h)) cont.api_return(ctx.clone(), &mut |h| hand.defer_drop(h))
), ),
}) })
}) })
} }
} }
}) })
}, },
api::HostExtReq::DeserAtom(deser) => { api::HostExtReq::DeserAtom(deser) => {
let api::DeserAtom(sys, buf, refs) = &deser; let api::DeserAtom(sys, buf, refs) = &deser;
let mut read = &mut &buf[..]; let mut read = &mut &buf[..];
let ctx = mk_ctx(*sys, hand.reqnot()); let ctx = mk_ctx(*sys, hand.reqnot());
let id = api::AtomId::decode(&mut read); let id = api::AtomId::decode(&mut read);
let inst = ctx.cted.inst(); let inst = ctx.cted.inst();
let nfo = atom_by_idx(inst.card(), id).expect("Deserializing atom with invalid ID"); let nfo = atom_by_idx(inst.card(), id).expect("Deserializing atom with invalid ID");
hand.handle(&deser, &nfo.deserialize(ctx.clone(), read, refs)) hand.handle(&deser, &nfo.deserialize(ctx.clone(), read, refs))
}, },
orchid_api::HostExtReq::ApplyMacro(am) => { orchid_api::HostExtReq::ApplyMacro(am) => {
let tok = hand.will_handle_as(&am); let tok = hand.will_handle_as(&am);
let sys_ctx = mk_ctx(am.sys, hand.reqnot()); let sys_ctx = mk_ctx(am.sys, hand.reqnot());
let ctx = RuleCtx { let ctx = RuleCtx {
args: (am.params.into_iter()) args: (am.params.into_iter())
.map(|(k, v)| ( .map(|(k, v)| (
Tok::from_api(k), Tok::from_api(k),
mtreev_from_api(&v, &mut |_| panic!("No atom in macro prompt!")) mtreev_from_api(&v, &mut |_| panic!("No atom in macro prompt!"))
)) ))
.collect(), .collect(),
run_id: am.run_id, run_id: am.run_id,
sys: sys_ctx.clone(), sys: sys_ctx.clone(),
}; };
hand.handle_as(tok, &match apply_rule(am.id, ctx) { hand.handle_as(tok, &match apply_rule(am.id, ctx) {
Err(e) => e.keep_only(|e| *e != err_cascade()).map(|e| Err(e.to_api())), Err(e) => e.keep_only(|e| *e != err_cascade()).map(|e| Err(e.to_api())),
Ok(t) => Some(Ok(mtreev_to_api(&t, &mut |a| { Ok(t) => Some(Ok(mtreev_to_api(&t, &mut |a| {
api::MacroToken::Atom(a.clone().build(sys_ctx.clone())) api::MacroToken::Atom(a.clone().build(sys_ctx.clone()))
}))), }))),
}) })
} }
}), }),
); );
init_replica(rn.clone().map()); init_replica(rn.clone().map());
while !exiting.load(Ordering::Relaxed) { while !exiting.load(Ordering::Relaxed) {
let rcvd = recv_parent_msg().unwrap(); let rcvd = recv_parent_msg().unwrap();
rn.receive(&rcvd) rn.receive(&rcvd)
} }
} }

View File

@@ -1,9 +1,8 @@
use std::fmt;
use std::ops::Deref; use std::ops::Deref;
use std::sync::{Arc, OnceLock}; use std::sync::{Arc, OnceLock};
use std::{backtrace, fmt};
use derive_destructure::destructure; use derive_destructure::destructure;
use orchid_base::clone;
use orchid_base::error::{OrcErr, OrcErrv}; use orchid_base::error::{OrcErr, OrcErrv};
use orchid_base::interner::Tok; use orchid_base::interner::Tok;
use orchid_base::location::Pos; use orchid_base::location::Pos;
@@ -17,122 +16,122 @@ use crate::system::SysCtx;
#[derive(destructure)] #[derive(destructure)]
pub struct ExprHandle { pub struct ExprHandle {
pub tk: api::ExprTicket, pub tk: api::ExprTicket,
pub ctx: SysCtx, pub ctx: SysCtx,
} }
impl ExprHandle { impl ExprHandle {
pub(crate) fn from_args(ctx: SysCtx, tk: api::ExprTicket) -> Self { Self { ctx, tk } } pub(crate) fn from_args(ctx: SysCtx, tk: api::ExprTicket) -> Self { Self { ctx, tk } }
pub fn get_ctx(&self) -> SysCtx { self.ctx.clone() } pub fn get_ctx(&self) -> SysCtx { self.ctx.clone() }
} }
impl fmt::Debug for ExprHandle { impl fmt::Debug for ExprHandle {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "ExprHandle({})", self.tk.0) write!(f, "ExprHandle({})", self.tk.0)
} }
} }
impl Clone for ExprHandle { impl Clone for ExprHandle {
fn clone(&self) -> Self { fn clone(&self) -> Self {
self.ctx.reqnot.notify(api::Acquire(self.ctx.id, self.tk)); self.ctx.reqnot.notify(api::Acquire(self.ctx.id, self.tk));
Self { ctx: self.ctx.clone(), tk: self.tk } Self { ctx: self.ctx.clone(), tk: self.tk }
} }
} }
impl Drop for ExprHandle { impl Drop for ExprHandle {
fn drop(&mut self) { self.ctx.reqnot.notify(api::Release(self.ctx.id, self.tk)) } fn drop(&mut self) { self.ctx.reqnot.notify(api::Release(self.ctx.id, self.tk)) }
} }
#[derive(Clone, Debug, destructure)] #[derive(Clone, Debug, destructure)]
pub struct Expr { pub struct Expr {
pub handle: Option<Arc<ExprHandle>>, pub handle: Option<Arc<ExprHandle>>,
pub val: OnceLock<ExprData>, pub val: OnceLock<ExprData>,
} }
impl Expr { impl Expr {
pub fn new(hand: Arc<ExprHandle>) -> Self { Self { handle: Some(hand), val: OnceLock::new() } } pub fn new(hand: Arc<ExprHandle>) -> Self { Self { handle: Some(hand), val: OnceLock::new() } }
pub fn from_data(val: ExprData) -> Self { Self { handle: None, val: OnceLock::from(val) } } pub fn from_data(val: ExprData) -> Self { Self { handle: None, val: OnceLock::from(val) } }
pub fn get_data(&self) -> &ExprData { pub fn get_data(&self) -> &ExprData {
self.val.get_or_init(|| { self.val.get_or_init(|| {
let handle = self.handle.as_ref().expect("Either the value or the handle must be set"); let handle = self.handle.as_ref().expect("Either the value or the handle must be set");
let details = handle.ctx.reqnot.request(api::Inspect { target: handle.tk }); let details = handle.ctx.reqnot.request(api::Inspect { target: handle.tk });
let pos = Pos::from_api(&details.location); let pos = Pos::from_api(&details.location);
let kind = match details.kind { let kind = match details.kind {
api::InspectedKind::Atom(a) => api::InspectedKind::Atom(a) =>
ExprKind::Atom(ForeignAtom::new(handle.clone(), a, pos.clone())), ExprKind::Atom(ForeignAtom::new(handle.clone(), a, pos.clone())),
api::InspectedKind::Bottom(b) => ExprKind::Bottom(OrcErrv::from_api(&b)), api::InspectedKind::Bottom(b) => ExprKind::Bottom(OrcErrv::from_api(&b)),
api::InspectedKind::Opaque => ExprKind::Opaque, api::InspectedKind::Opaque => ExprKind::Opaque,
}; };
ExprData { pos, kind } ExprData { pos, kind }
}) })
} }
pub fn foreign_atom(self) -> Result<ForeignAtom<'static>, Self> { pub fn foreign_atom(self) -> Result<ForeignAtom<'static>, Self> {
match (self.get_data(), &self.handle) { match (self.get_data(), &self.handle) {
(ExprData { kind: ExprKind::Atom(atom), .. }, Some(_)) => Ok(atom.clone()), (ExprData { kind: ExprKind::Atom(atom), .. }, Some(_)) => Ok(atom.clone()),
_ => Err(self), _ => Err(self),
} }
} }
pub fn api_return( pub fn api_return(
self, self,
ctx: SysCtx, ctx: SysCtx,
do_slot: &mut impl FnMut(Arc<ExprHandle>), do_slot: &mut impl FnMut(Arc<ExprHandle>),
) -> api::Expression { ) -> api::Expression {
if let Some(h) = self.handle { if let Some(h) = self.handle {
do_slot(h.clone()); do_slot(h.clone());
api::Expression { location: api::Location::SlotTarget, kind: api::ExpressionKind::Slot(h.tk) } api::Expression { location: api::Location::SlotTarget, kind: api::ExpressionKind::Slot(h.tk) }
} else { } else {
self.val.into_inner().expect("Either value or handle must be set").api_return(ctx, do_slot) self.val.into_inner().expect("Either value or handle must be set").api_return(ctx, do_slot)
} }
} }
pub fn handle(&self) -> Option<Arc<ExprHandle>> { self.handle.clone() } pub fn handle(&self) -> Option<Arc<ExprHandle>> { self.handle.clone() }
} }
impl Deref for Expr { impl Deref for Expr {
type Target = ExprData; type Target = ExprData;
fn deref(&self) -> &Self::Target { self.get_data() } fn deref(&self) -> &Self::Target { self.get_data() }
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct ExprData { pub struct ExprData {
pub pos: Pos, pub pos: Pos,
pub kind: ExprKind, pub kind: ExprKind,
} }
impl ExprData { impl ExprData {
pub fn api_return( pub fn api_return(
self, self,
ctx: SysCtx, ctx: SysCtx,
do_slot: &mut impl FnMut(Arc<ExprHandle>), do_slot: &mut impl FnMut(Arc<ExprHandle>),
) -> api::Expression { ) -> api::Expression {
api::Expression { location: self.pos.to_api(), kind: self.kind.api_return(ctx, do_slot) } api::Expression { location: self.pos.to_api(), kind: self.kind.api_return(ctx, do_slot) }
} }
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum ExprKind { pub enum ExprKind {
Call(Box<Expr>, Box<Expr>), Call(Box<Expr>, Box<Expr>),
Lambda(u64, Box<Expr>), Lambda(u64, Box<Expr>),
Arg(u64), Arg(u64),
Seq(Box<Expr>, Box<Expr>), Seq(Box<Expr>, Box<Expr>),
Const(Tok<Vec<Tok<String>>>), Const(Tok<Vec<Tok<String>>>),
NewAtom(AtomFactory), NewAtom(AtomFactory),
Atom(ForeignAtom<'static>), Atom(ForeignAtom<'static>),
Bottom(OrcErrv), Bottom(OrcErrv),
Opaque, Opaque,
} }
impl ExprKind { impl ExprKind {
pub fn api_return( pub fn api_return(
self, self,
ctx: SysCtx, ctx: SysCtx,
do_slot: &mut impl FnMut(Arc<ExprHandle>), do_slot: &mut impl FnMut(Arc<ExprHandle>),
) -> api::ExpressionKind { ) -> api::ExpressionKind {
use api::ExpressionKind as K; use api::ExpressionKind as K;
match self { match self {
Self::Call(f, x) => Self::Call(f, x) =>
K::Call(Box::new(f.api_return(ctx.clone(), do_slot)), Box::new(x.api_return(ctx, do_slot))), K::Call(Box::new(f.api_return(ctx.clone(), do_slot)), Box::new(x.api_return(ctx, do_slot))),
Self::Seq(a, b) => Self::Seq(a, b) =>
K::Seq(Box::new(a.api_return(ctx.clone(), do_slot)), Box::new(b.api_return(ctx, do_slot))), K::Seq(Box::new(a.api_return(ctx.clone(), do_slot)), Box::new(b.api_return(ctx, do_slot))),
Self::Lambda(arg, body) => K::Lambda(arg, Box::new(body.api_return(ctx, do_slot))), Self::Lambda(arg, body) => K::Lambda(arg, Box::new(body.api_return(ctx, do_slot))),
Self::Arg(arg) => K::Arg(arg), Self::Arg(arg) => K::Arg(arg),
Self::Const(name) => K::Const(name.to_api()), Self::Const(name) => K::Const(name.to_api()),
Self::Bottom(err) => K::Bottom(err.to_api()), Self::Bottom(err) => K::Bottom(err.to_api()),
Self::NewAtom(fac) => K::NewAtom(fac.clone().build(ctx)), Self::NewAtom(fac) => K::NewAtom(fac.clone().build(ctx)),
kind @ (Self::Atom(_) | Self::Opaque) => panic!("{kind:?} should have a token"), kind @ (Self::Atom(_) | Self::Opaque) => panic!("{kind:?} should have a token"),
} }
} }
} }
fn inherit(kind: ExprKind) -> Expr { Expr::from_data(ExprData { pos: Pos::Inherit, kind }) } fn inherit(kind: ExprKind) -> Expr { Expr::from_data(ExprData { pos: Pos::Inherit, kind }) }
@@ -140,35 +139,35 @@ pub fn sym_ref(path: Tok<Vec<Tok<String>>>) -> Expr { inherit(ExprKind::Const(pa
pub fn atom<A: ToAtom>(atom: A) -> Expr { inherit(ExprKind::NewAtom(atom.to_atom_factory())) } pub fn atom<A: ToAtom>(atom: A) -> Expr { inherit(ExprKind::NewAtom(atom.to_atom_factory())) }
pub fn seq(ops: impl IntoIterator<Item = Expr>) -> Expr { pub fn seq(ops: impl IntoIterator<Item = Expr>) -> Expr {
fn recur(mut ops: impl Iterator<Item = Expr>) -> Option<Expr> { fn recur(mut ops: impl Iterator<Item = Expr>) -> Option<Expr> {
let op = ops.next()?; let op = ops.next()?;
Some(match recur(ops) { Some(match recur(ops) {
None => op, None => op,
Some(rec) => inherit(ExprKind::Seq(Box::new(op), Box::new(rec))), Some(rec) => inherit(ExprKind::Seq(Box::new(op), Box::new(rec))),
}) })
} }
recur(ops.into_iter()).expect("Empty list provided to seq!") recur(ops.into_iter()).expect("Empty list provided to seq!")
} }
pub fn arg(n: u64) -> Expr { inherit(ExprKind::Arg(n)) } pub fn arg(n: u64) -> Expr { inherit(ExprKind::Arg(n)) }
pub fn lambda(n: u64, b: impl IntoIterator<Item = Expr>) -> Expr { pub fn lambda(n: u64, b: impl IntoIterator<Item = Expr>) -> Expr {
inherit(ExprKind::Lambda(n, Box::new(call(b)))) inherit(ExprKind::Lambda(n, Box::new(call(b))))
} }
pub fn call(v: impl IntoIterator<Item = Expr>) -> Expr { pub fn call(v: impl IntoIterator<Item = Expr>) -> Expr {
v.into_iter() v.into_iter()
.reduce(|f, x| inherit(ExprKind::Call(Box::new(f), Box::new(x)))) .reduce(|f, x| inherit(ExprKind::Call(Box::new(f), Box::new(x))))
.expect("Empty call expression") .expect("Empty call expression")
} }
pub fn bot(ev: impl IntoIterator<Item = OrcErr>) -> Expr { pub fn bot(ev: impl IntoIterator<Item = OrcErr>) -> Expr {
inherit(ExprKind::Bottom(OrcErrv::new(ev).unwrap())) inherit(ExprKind::Bottom(OrcErrv::new(ev).unwrap()))
} }
pub fn with<I: TryFromExpr, O: ToExpr>( pub fn with<I: TryFromExpr, O: ToExpr>(
expr: Expr, expr: Expr,
cont: impl Fn(I) -> O + Clone + Send + Sync + 'static, cont: impl Fn(I) -> O + Clone + Send + Sync + 'static,
) -> Expr { ) -> Expr {
call([lambda(0, [seq([arg(0), call([Lambda::new(cont).to_expr(), arg(0)])])]), expr]) call([lambda(0, [seq([arg(0), call([Lambda::new(cont).to_expr(), arg(0)])])]), expr])
} }

View File

@@ -7,25 +7,25 @@ use orchid_base::name::PathSlice;
use crate::api; use crate::api;
pub trait VirtFS: Send + Sync + 'static { pub trait VirtFS: Send + Sync + 'static {
fn load(&self, path: &PathSlice) -> api::OrcResult<api::Loaded>; fn load(&self, path: &PathSlice) -> api::OrcResult<api::Loaded>;
} }
pub enum DeclFs { pub enum DeclFs {
Lazy(&'static dyn VirtFS), Lazy(&'static dyn VirtFS),
Mod(&'static [(&'static str, DeclFs)]), Mod(&'static [(&'static str, DeclFs)]),
} }
impl DeclFs { impl DeclFs {
pub fn to_api_rec(&self, vfses: &mut HashMap<api::VfsId, &'static dyn VirtFS>) -> api::EagerVfs { pub fn to_api_rec(&self, vfses: &mut HashMap<api::VfsId, &'static dyn VirtFS>) -> api::EagerVfs {
match self { match self {
DeclFs::Lazy(fs) => { DeclFs::Lazy(fs) => {
let vfsc: u16 = vfses.len().try_into().expect("too many vfses (more than u16::MAX)"); let vfsc: u16 = vfses.len().try_into().expect("too many vfses (more than u16::MAX)");
let id = api::VfsId(NonZero::new(vfsc + 1).unwrap()); let id = api::VfsId(NonZero::new(vfsc + 1).unwrap());
vfses.insert(id, *fs); vfses.insert(id, *fs);
api::EagerVfs::Lazy(id) api::EagerVfs::Lazy(id)
}, },
DeclFs::Mod(children) => api::EagerVfs::Eager( DeclFs::Mod(children) => api::EagerVfs::Eager(
children.iter().map(|(k, v)| (intern(*k).to_api(), v.to_api_rec(vfses))).collect(), children.iter().map(|(k, v)| (intern(*k).to_api(), v.to_api_rec(vfses))).collect(),
), ),
} }
} }
} }

View File

@@ -19,16 +19,16 @@ use crate::expr::{Expr, ExprHandle};
use crate::system::SysCtx; use crate::system::SysCtx;
trait_set! { trait_set! {
trait FunCB = Fn(Vec<Expr>) -> OrcRes<Expr> + Send + Sync + 'static; trait FunCB = Fn(Vec<Expr>) -> OrcRes<Expr> + Send + Sync + 'static;
} }
pub trait ExprFunc<I, O>: Clone + Send + Sync + 'static { pub trait ExprFunc<I, O>: Clone + Send + Sync + 'static {
const ARITY: u8; const ARITY: u8;
fn apply(&self, v: Vec<Expr>) -> OrcRes<Expr>; fn apply(&self, v: Vec<Expr>) -> OrcRes<Expr>;
} }
lazy_static! { lazy_static! {
static ref FUNS: Mutex<HashMap<Sym, (u8, Arc<dyn FunCB>)>> = Mutex::default(); static ref FUNS: Mutex<HashMap<Sym, (u8, Arc<dyn FunCB>)>> = Mutex::default();
} }
/// An Atom representing a partially applied named native function. These /// An Atom representing a partially applied named native function. These
@@ -38,95 +38,96 @@ lazy_static! {
/// See [Lambda] for the non-serializable variant /// See [Lambda] for the non-serializable variant
#[derive(Clone)] #[derive(Clone)]
pub(crate) struct Fun { pub(crate) struct Fun {
path: Sym, path: Sym,
args: Vec<Expr>, args: Vec<Expr>,
arity: u8, arity: u8,
fun: Arc<dyn FunCB>, fun: Arc<dyn FunCB>,
} }
impl Fun { impl Fun {
pub fn new<I, O, F: ExprFunc<I, O>>(path: Sym, f: F) -> Self { pub fn new<I, O, F: ExprFunc<I, O>>(path: Sym, f: F) -> Self {
let mut fung = FUNS.lock().unwrap(); let mut fung = FUNS.lock().unwrap();
let fun = if let Some(x) = fung.get(&path) { let fun = if let Some(x) = fung.get(&path) {
x.1.clone() x.1.clone()
} else { } else {
let fun = Arc::new(move |v| f.apply(v)); let fun = Arc::new(move |v| f.apply(v));
fung.insert(path.clone(), (F::ARITY, fun.clone())); fung.insert(path.clone(), (F::ARITY, fun.clone()));
fun fun
}; };
Self { args: vec![], arity: F::ARITY, path, fun } Self { args: vec![], arity: F::ARITY, path, fun }
} }
} }
impl Atomic for Fun { impl Atomic for Fun {
type Data = (); type Data = ();
type Variant = OwnedVariant; type Variant = OwnedVariant;
fn reg_reqs() -> MethodSet<Self> { MethodSet::new() } fn reg_reqs() -> MethodSet<Self> { MethodSet::new() }
} }
impl OwnedAtom for Fun { impl OwnedAtom for Fun {
type Refs = Vec<Expr>; type Refs = Vec<Expr>;
fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) } fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
fn call_ref(&self, arg: ExprHandle) -> Expr { fn call_ref(&self, arg: ExprHandle) -> Expr {
let new_args = self.args.iter().cloned().chain([Expr::new(Arc::new(arg))]).collect_vec(); let new_args = self.args.iter().cloned().chain([Expr::new(Arc::new(arg))]).collect_vec();
if new_args.len() == self.arity.into() { if new_args.len() == self.arity.into() {
(self.fun)(new_args).to_expr() (self.fun)(new_args).to_expr()
} else { } else {
Self { args: new_args, arity: self.arity, fun: self.fun.clone(), path: self.path.clone() } Self { args: new_args, arity: self.arity, fun: self.fun.clone(), path: self.path.clone() }
.to_expr() .to_expr()
} }
} }
fn call(self, arg: ExprHandle) -> Expr { self.call_ref(arg) } fn call(self, arg: ExprHandle) -> Expr { self.call_ref(arg) }
fn serialize(&self, _: SysCtx, sink: &mut (impl io::Write + ?Sized)) -> Self::Refs { fn serialize(&self, _: SysCtx, sink: &mut (impl io::Write + ?Sized)) -> Self::Refs {
self.path.encode(sink); self.path.encode(sink);
self.args.clone() self.args.clone()
} }
fn deserialize(ctx: impl DeserializeCtx, args: Self::Refs) -> Self { fn deserialize(ctx: impl DeserializeCtx, args: Self::Refs) -> Self {
let path = Sym::new(ctx.decode::<Vec<Tok<String>>>()).unwrap(); let path = Sym::new(ctx.decode::<Vec<Tok<String>>>()).unwrap();
let (arity, fun) = FUNS.lock().unwrap().get(&path).unwrap().clone(); let (arity, fun) = FUNS.lock().unwrap().get(&path).unwrap().clone();
Self { args, arity, path, fun } Self { args, arity, path, fun }
} }
} }
/// An Atom representing a partially applied native lambda. These are not serializable. /// An Atom representing a partially applied native lambda. These are not
/// serializable.
/// ///
/// See [Fun] for the serializable variant /// See [Fun] for the serializable variant
#[derive(Clone)] #[derive(Clone)]
pub struct Lambda { pub struct Lambda {
args: Vec<Expr>, args: Vec<Expr>,
arity: u8, arity: u8,
fun: Arc<dyn FunCB>, fun: Arc<dyn FunCB>,
} }
impl Lambda { impl Lambda {
pub fn new<I, O, F: ExprFunc<I, O>>(f: F) -> Self { pub fn new<I, O, F: ExprFunc<I, O>>(f: F) -> Self {
let fun = Arc::new(move |v| f.apply(v)); let fun = Arc::new(move |v| f.apply(v));
Self { args: vec![], arity: F::ARITY, fun } Self { args: vec![], arity: F::ARITY, fun }
} }
} }
impl Atomic for Lambda { impl Atomic for Lambda {
type Data = (); type Data = ();
type Variant = OwnedVariant; type Variant = OwnedVariant;
fn reg_reqs() -> MethodSet<Self> { MethodSet::new() } fn reg_reqs() -> MethodSet<Self> { MethodSet::new() }
} }
impl OwnedAtom for Lambda { impl OwnedAtom for Lambda {
type Refs = Never; type Refs = Never;
fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) } fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
fn call_ref(&self, arg: ExprHandle) -> Expr { fn call_ref(&self, arg: ExprHandle) -> Expr {
let new_args = self.args.iter().cloned().chain([Expr::new(Arc::new(arg))]).collect_vec(); let new_args = self.args.iter().cloned().chain([Expr::new(Arc::new(arg))]).collect_vec();
if new_args.len() == self.arity.into() { if new_args.len() == self.arity.into() {
(self.fun)(new_args).to_expr() (self.fun)(new_args).to_expr()
} else { } else {
Self { args: new_args, arity: self.arity, fun: self.fun.clone() }.to_expr() Self { args: new_args, arity: self.arity, fun: self.fun.clone() }.to_expr()
} }
} }
fn call(self, arg: ExprHandle) -> Expr { self.call_ref(arg) } fn call(self, arg: ExprHandle) -> Expr { self.call_ref(arg) }
} }
mod expr_func_derives { mod expr_func_derives {
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use super::ExprFunc; use super::ExprFunc;
use crate::conv::{ToExpr, TryFromExpr}; use crate::conv::{ToExpr, TryFromExpr};
use crate::func_atom::Expr; use crate::func_atom::Expr;
macro_rules! expr_func_derive { macro_rules! expr_func_derive {
($arity: tt, $($t:ident),*) => { ($arity: tt, $($t:ident),*) => {
paste::paste!{ paste::paste!{
impl< impl<
@@ -144,18 +145,18 @@ mod expr_func_derives {
} }
}; };
} }
expr_func_derive!(1, A); expr_func_derive!(1, A);
expr_func_derive!(2, A, B); expr_func_derive!(2, A, B);
expr_func_derive!(3, A, B, C); expr_func_derive!(3, A, B, C);
expr_func_derive!(4, A, B, C, D); expr_func_derive!(4, A, B, C, D);
expr_func_derive!(5, A, B, C, D, E); expr_func_derive!(5, A, B, C, D, E);
expr_func_derive!(6, A, B, C, D, E, F); expr_func_derive!(6, A, B, C, D, E, F);
expr_func_derive!(7, A, B, C, D, E, F, G); expr_func_derive!(7, A, B, C, D, E, F, G);
expr_func_derive!(8, A, B, C, D, E, F, G, H); expr_func_derive!(8, A, B, C, D, E, F, G, H);
expr_func_derive!(9, A, B, C, D, E, F, G, H, I); expr_func_derive!(9, A, B, C, D, E, F, G, H, I);
expr_func_derive!(10, A, B, C, D, E, F, G, H, I, J); expr_func_derive!(10, A, B, C, D, E, F, G, H, I, J);
expr_func_derive!(11, A, B, C, D, E, F, G, H, I, J, K); expr_func_derive!(11, A, B, C, D, E, F, G, H, I, J, K);
expr_func_derive!(12, A, B, C, D, E, F, G, H, I, J, K, L); expr_func_derive!(12, A, B, C, D, E, F, G, H, I, J, K, L);
expr_func_derive!(13, A, B, C, D, E, F, G, H, I, J, K, L, M); expr_func_derive!(13, A, B, C, D, E, F, G, H, I, J, K, L, M);
expr_func_derive!(14, A, B, C, D, E, F, G, H, I, J, K, L, M, N); expr_func_derive!(14, A, B, C, D, E, F, G, H, I, J, K, L, M, N);
} }

View File

@@ -1,6 +1,6 @@
use std::ops::{Range, RangeInclusive}; use std::ops::{Range, RangeInclusive};
use orchid_base::error::{mk_err, OrcErr, OrcRes}; use orchid_base::error::{OrcErr, OrcRes, mk_err};
use orchid_base::intern; use orchid_base::intern;
use orchid_base::interner::Tok; use orchid_base::interner::Tok;
use orchid_base::location::Pos; use orchid_base::location::Pos;
@@ -11,58 +11,58 @@ use crate::api;
use crate::tree::{GenTok, GenTokTree}; use crate::tree::{GenTok, GenTokTree};
pub fn err_cascade() -> OrcErr { pub fn err_cascade() -> OrcErr {
mk_err( mk_err(
intern!(str: "An error cascading from a recursive call"), intern!(str: "An error cascading from a recursive call"),
"This error should not surface. If you are seeing it, something is wrong", "This error should not surface. If you are seeing it, something is wrong",
[Pos::None.into()], [Pos::None.into()],
) )
} }
pub fn err_not_applicable() -> OrcErr { pub fn err_not_applicable() -> OrcErr {
mk_err( mk_err(
intern!(str: "Pseudo-error to communicate that the current branch in a dispatch doesn't apply"), intern!(str: "Pseudo-error to communicate that the current branch in a dispatch doesn't apply"),
&*err_cascade().message, &*err_cascade().message,
[Pos::None.into()], [Pos::None.into()],
) )
} }
pub struct LexContext<'a> { pub struct LexContext<'a> {
pub text: &'a Tok<String>, pub text: &'a Tok<String>,
pub sys: api::SysId, pub sys: api::SysId,
pub id: api::ParsId, pub id: api::ParsId,
pub pos: u32, pub pos: u32,
pub reqnot: ReqNot<api::ExtMsgSet>, pub reqnot: ReqNot<api::ExtMsgSet>,
} }
impl<'a> LexContext<'a> { impl<'a> LexContext<'a> {
pub fn recurse(&self, tail: &'a str) -> OrcRes<(&'a str, GenTokTree<'a>)> { pub fn recurse(&self, tail: &'a str) -> OrcRes<(&'a str, GenTokTree<'a>)> {
let start = self.pos(tail); let start = self.pos(tail);
let lx = let lx =
self.reqnot.request(api::SubLex { pos: start, id: self.id }).ok_or_else(err_cascade)?; self.reqnot.request(api::SubLex { pos: start, id: self.id }).ok_or_else(err_cascade)?;
Ok((&self.text[lx.pos as usize..], GenTok::Slot(TokHandle::new(lx.ticket)).at(start..lx.pos))) Ok((&self.text[lx.pos as usize..], GenTok::Slot(TokHandle::new(lx.ticket)).at(start..lx.pos)))
} }
pub fn pos(&self, tail: &'a str) -> u32 { (self.text.len() - tail.len()) as u32 } pub fn pos(&self, tail: &'a str) -> u32 { (self.text.len() - tail.len()) as u32 }
pub fn tok_ran(&self, len: u32, tail: &'a str) -> Range<u32> { pub fn tok_ran(&self, len: u32, tail: &'a str) -> Range<u32> {
self.pos(tail) - len..self.pos(tail) self.pos(tail) - len..self.pos(tail)
} }
} }
pub trait Lexer: Send + Sync + Sized + Default + 'static { pub trait Lexer: Send + Sync + Sized + Default + 'static {
const CHAR_FILTER: &'static [RangeInclusive<char>]; const CHAR_FILTER: &'static [RangeInclusive<char>];
fn lex<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)>; fn lex<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)>;
} }
pub trait DynLexer: Send + Sync + 'static { pub trait DynLexer: Send + Sync + 'static {
fn char_filter(&self) -> &'static [RangeInclusive<char>]; fn char_filter(&self) -> &'static [RangeInclusive<char>];
fn lex<'a>(&self, tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)>; fn lex<'a>(&self, tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)>;
} }
impl<T: Lexer> DynLexer for T { impl<T: Lexer> DynLexer for T {
fn char_filter(&self) -> &'static [RangeInclusive<char>] { T::CHAR_FILTER } fn char_filter(&self) -> &'static [RangeInclusive<char>] { T::CHAR_FILTER }
fn lex<'a>(&self, tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> { fn lex<'a>(&self, tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> {
T::lex(tail, ctx) T::lex(tail, ctx)
} }
} }
pub type LexerObj = &'static dyn DynLexer; pub type LexerObj = &'static dyn DynLexer;

View File

@@ -9,10 +9,10 @@ pub mod expr;
pub mod fs; pub mod fs;
pub mod func_atom; pub mod func_atom;
pub mod lexer; pub mod lexer;
pub mod macros;
pub mod msg; pub mod msg;
pub mod other_system; pub mod other_system;
pub mod parser; pub mod parser;
pub mod system; pub mod system;
pub mod system_ctor; pub mod system_ctor;
pub mod tree; pub mod tree;
pub mod macros;

View File

@@ -1,101 +1,111 @@
use std::num::NonZero;
use std::sync::RwLock;
use ahash::HashMap; use ahash::HashMap;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use never::Never; use never::Never;
use orchid_base::{error::OrcRes, interner::{intern, Tok}, location::Pos, macros::{mtreev_from_api, mtreev_to_api, MTree}, parse::Comment, reqnot::Requester}; use orchid_base::error::OrcRes;
use orchid_base::interner::{Tok, intern};
use orchid_base::location::Pos;
use orchid_base::macros::{MTree, mtreev_from_api, mtreev_to_api};
use orchid_base::parse::Comment;
use orchid_base::reqnot::Requester;
use trait_set::trait_set; use trait_set::trait_set;
use crate::{api, atom::AtomFactory, lexer::err_cascade, system::SysCtx};
use std::{num::NonZero, sync::RwLock}; use crate::api;
use crate::atom::AtomFactory;
use crate::lexer::err_cascade;
use crate::system::SysCtx;
pub trait Macro { pub trait Macro {
fn pattern() -> MTree<'static, Never>; fn pattern() -> MTree<'static, Never>;
fn apply(binds: HashMap<Tok<String>, MTree<'_, Never>>) -> MTree<'_, AtomFactory>; fn apply(binds: HashMap<Tok<String>, MTree<'_, Never>>) -> MTree<'_, AtomFactory>;
} }
pub trait DynMacro { pub trait DynMacro {
fn pattern(&self) -> MTree<'static, Never>; fn pattern(&self) -> MTree<'static, Never>;
fn apply<'a>(&self, binds: HashMap<Tok<String>, MTree<'a, Never>>) -> MTree<'a, AtomFactory>; fn apply<'a>(&self, binds: HashMap<Tok<String>, MTree<'a, Never>>) -> MTree<'a, AtomFactory>;
} }
impl<T: Macro> DynMacro for T { impl<T: Macro> DynMacro for T {
fn pattern(&self) -> MTree<'static, Never> { Self::pattern() } fn pattern(&self) -> MTree<'static, Never> { Self::pattern() }
fn apply<'a>(&self, binds: HashMap<Tok<String>, MTree<'a, Never>>) -> MTree<'a, AtomFactory> { fn apply<'a>(&self, binds: HashMap<Tok<String>, MTree<'a, Never>>) -> MTree<'a, AtomFactory> {
Self::apply(binds) Self::apply(binds)
} }
} }
pub struct RuleCtx<'a> { pub struct RuleCtx<'a> {
pub(crate) args: HashMap<Tok<String>, Vec<MTree<'a, Never>>>, pub(crate) args: HashMap<Tok<String>, Vec<MTree<'a, Never>>>,
pub(crate) run_id: api::ParsId, pub(crate) run_id: api::ParsId,
pub(crate) sys: SysCtx, pub(crate) sys: SysCtx,
} }
impl<'a> RuleCtx<'a> { impl<'a> RuleCtx<'a> {
pub fn recurse(&mut self, tree: &[MTree<'a, Never>]) -> OrcRes<Vec<MTree<'a, Never>>> { pub fn recurse(&mut self, tree: &[MTree<'a, Never>]) -> OrcRes<Vec<MTree<'a, Never>>> {
let req = api::RunMacros{ let req =
run_id: self.run_id, api::RunMacros { run_id: self.run_id, query: mtreev_to_api(tree, &mut |b| match *b {}) };
query: mtreev_to_api(tree, &mut |b| match *b {}) Ok(mtreev_from_api(&self.sys.reqnot.request(req).ok_or_else(err_cascade)?, &mut |_| {
}; panic!("Returned atom from Rule recursion")
Ok(mtreev_from_api( }))
&self.sys.reqnot.request(req).ok_or_else(err_cascade)?, }
&mut |_| panic!("Returned atom from Rule recursion") pub fn getv(&mut self, key: &Tok<String>) -> Vec<MTree<'a, Never>> {
)) self.args.remove(key).expect("Key not found")
} }
pub fn getv(&mut self, key: &Tok<String>) -> Vec<MTree<'a, Never>> { pub fn gets(&mut self, key: &Tok<String>) -> MTree<'a, Never> {
self.args.remove(key).expect("Key not found") let v = self.getv(key);
} assert!(v.len() == 1, "Not a scalar");
pub fn gets(&mut self, key: &Tok<String>) -> MTree<'a, Never> { v.into_iter().next().unwrap()
let v = self.getv(key); }
assert!(v.len() == 1, "Not a scalar"); pub fn unused_arg<'b>(&mut self, keys: impl IntoIterator<Item = &'b Tok<String>>) {
v.into_iter().next().unwrap() keys.into_iter().for_each(|k| {
} self.getv(k);
pub fn unused_arg<'b>(&mut self, keys: impl IntoIterator<Item = &'b Tok<String>>) { });
keys.into_iter().for_each(|k| {self.getv(k);}); }
}
} }
trait_set! { trait_set! {
pub trait RuleCB = for<'a> Fn(RuleCtx<'a>) -> OrcRes<Vec<MTree<'a, AtomFactory>>> + Send + Sync; pub trait RuleCB = for<'a> Fn(RuleCtx<'a>) -> OrcRes<Vec<MTree<'a, AtomFactory>>> + Send + Sync;
} }
lazy_static!{ lazy_static! {
static ref RULES: RwLock<HashMap<api::MacroId, Box<dyn RuleCB>>> = RwLock::default(); static ref RULES: RwLock<HashMap<api::MacroId, Box<dyn RuleCB>>> = RwLock::default();
} }
pub struct Rule { pub struct Rule {
pub(crate) comments: Vec<Comment>, pub(crate) comments: Vec<Comment>,
pub(crate) pattern: Vec<MTree<'static, Never>>, pub(crate) pattern: Vec<MTree<'static, Never>>,
pub(crate) id: api::MacroId, pub(crate) id: api::MacroId,
} }
impl Rule { impl Rule {
pub(crate) fn to_api(&self) -> api::MacroRule { pub(crate) fn to_api(&self) -> api::MacroRule {
api::MacroRule { api::MacroRule {
comments: self.comments.iter().map(|c| c.to_api()).collect(), comments: self.comments.iter().map(|c| c.to_api()).collect(),
location: api::Location::Inherit, location: api::Location::Inherit,
pattern: mtreev_to_api(&self.pattern, &mut |b| match *b {}), pattern: mtreev_to_api(&self.pattern, &mut |b| match *b {}),
id: self.id, id: self.id,
} }
} }
} }
pub fn rule_cmt<'a>( pub fn rule_cmt<'a>(
cmt: impl IntoIterator<Item = &'a str>, cmt: impl IntoIterator<Item = &'a str>,
pattern: Vec<MTree<'static, Never>>, pattern: Vec<MTree<'static, Never>>,
apply: impl RuleCB + 'static apply: impl RuleCB + 'static,
) -> Rule { ) -> Rule {
let mut rules = RULES.write().unwrap(); let mut rules = RULES.write().unwrap();
let id = api::MacroId(NonZero::new(rules.len() as u64 + 1).unwrap()); let id = api::MacroId(NonZero::new(rules.len() as u64 + 1).unwrap());
rules.insert(id, Box::new(apply)); rules.insert(id, Box::new(apply));
let comments = cmt.into_iter().map(|s| Comment { pos: Pos::Inherit, text: intern(s) }).collect(); let comments = cmt.into_iter().map(|s| Comment { pos: Pos::Inherit, text: intern(s) }).collect();
Rule { comments, pattern, id } Rule { comments, pattern, id }
} }
pub fn rule(pattern: Vec<MTree<'static, Never>>, apply: impl RuleCB + 'static) -> Rule { pub fn rule(pattern: Vec<MTree<'static, Never>>, apply: impl RuleCB + 'static) -> Rule {
rule_cmt([], pattern, apply) rule_cmt([], pattern, apply)
} }
pub(crate) fn apply_rule( pub(crate) fn apply_rule(
id: api::MacroId, id: api::MacroId,
ctx: RuleCtx<'static> ctx: RuleCtx<'static>,
) -> OrcRes<Vec<MTree<'static, AtomFactory>>> { ) -> OrcRes<Vec<MTree<'static, AtomFactory>>> {
let rules = RULES.read().unwrap(); let rules = RULES.read().unwrap();
rules[&id](ctx) rules[&id](ctx)
} }

View File

@@ -5,32 +5,32 @@ use crate::api;
use crate::system::{DynSystemCard, SystemCard}; use crate::system::{DynSystemCard, SystemCard};
pub struct SystemHandle<C: SystemCard> { pub struct SystemHandle<C: SystemCard> {
pub(crate) _card: PhantomData<C>, pub(crate) _card: PhantomData<C>,
pub(crate) id: api::SysId, pub(crate) id: api::SysId,
} }
impl<C: SystemCard> SystemHandle<C> { impl<C: SystemCard> SystemHandle<C> {
pub(crate) fn new(id: api::SysId) -> Self { Self { _card: PhantomData, id } } pub(crate) fn new(id: api::SysId) -> Self { Self { _card: PhantomData, id } }
pub fn id(&self) -> api::SysId { self.id } pub fn id(&self) -> api::SysId { self.id }
} }
impl<C: SystemCard> Clone for SystemHandle<C> { impl<C: SystemCard> Clone for SystemHandle<C> {
fn clone(&self) -> Self { Self::new(self.id) } fn clone(&self) -> Self { Self::new(self.id) }
} }
pub trait DynSystemHandle { pub trait DynSystemHandle {
fn id(&self) -> api::SysId; fn id(&self) -> api::SysId;
fn get_card(&self) -> &dyn DynSystemCard; fn get_card(&self) -> &dyn DynSystemCard;
} }
pub fn leak_card<T: Default>() -> &'static T { pub fn leak_card<T: Default>() -> &'static T {
const { const {
if 0 != size_of::<T>() { if 0 != size_of::<T>() {
panic!("Attempted to leak positively sized Card. Card types must always be zero-sized"); panic!("Attempted to leak positively sized Card. Card types must always be zero-sized");
} }
} }
Box::leak(Box::default()) Box::leak(Box::default())
} }
impl<C: SystemCard> DynSystemHandle for SystemHandle<C> { impl<C: SystemCard> DynSystemHandle for SystemHandle<C> {
fn id(&self) -> api::SysId { self.id } fn id(&self) -> api::SysId { self.id }
fn get_card(&self) -> &'static dyn DynSystemCard { leak_card::<C>() } fn get_card(&self) -> &'static dyn DynSystemCard { leak_card::<C>() }
} }

View File

@@ -7,34 +7,34 @@ use crate::tree::GenTokTree;
pub type GenSnippet<'a> = Snippet<'a, 'a, ForeignAtom<'a>, AtomFactory>; pub type GenSnippet<'a> = Snippet<'a, 'a, ForeignAtom<'a>, AtomFactory>;
pub trait Parser: Send + Sync + Sized + Default + 'static { pub trait Parser: Send + Sync + Sized + Default + 'static {
const LINE_HEAD: &'static str; const LINE_HEAD: &'static str;
fn parse( fn parse(
exported: bool, exported: bool,
comments: Vec<Comment>, comments: Vec<Comment>,
line: GenSnippet<'_>, line: GenSnippet<'_>,
) -> OrcRes<Vec<GenTokTree<'_>>>; ) -> OrcRes<Vec<GenTokTree<'_>>>;
} }
pub trait DynParser: Send + Sync + 'static { pub trait DynParser: Send + Sync + 'static {
fn line_head(&self) -> &'static str; fn line_head(&self) -> &'static str;
fn parse<'a>( fn parse<'a>(
&self, &self,
exported: bool, exported: bool,
comments: Vec<Comment>, comments: Vec<Comment>,
line: GenSnippet<'a>, line: GenSnippet<'a>,
) -> OrcRes<Vec<GenTokTree<'a>>>; ) -> OrcRes<Vec<GenTokTree<'a>>>;
} }
impl<T: Parser> DynParser for T { impl<T: Parser> DynParser for T {
fn line_head(&self) -> &'static str { Self::LINE_HEAD } fn line_head(&self) -> &'static str { Self::LINE_HEAD }
fn parse<'a>( fn parse<'a>(
&self, &self,
exported: bool, exported: bool,
comments: Vec<Comment>, comments: Vec<Comment>,
line: GenSnippet<'a>, line: GenSnippet<'a>,
) -> OrcRes<Vec<GenTokTree<'a>>> { ) -> OrcRes<Vec<GenTokTree<'a>>> {
Self::parse(exported, comments, line) Self::parse(exported, comments, line)
} }
} }
pub type ParserObj = &'static dyn DynParser; pub type ParserObj = &'static dyn DynParser;

View File

@@ -10,11 +10,10 @@ use orchid_base::logging::Logger;
use orchid_base::reqnot::{Receipt, ReqNot}; use orchid_base::reqnot::{Receipt, ReqNot};
use crate::api; use crate::api;
use crate::atom::{get_info, AtomCtx, AtomDynfo, AtomicFeatures, ForeignAtom, TypAtom}; use crate::atom::{AtomCtx, AtomDynfo, AtomicFeatures, ForeignAtom, TypAtom, get_info};
use crate::entrypoint::ExtReq; use crate::entrypoint::ExtReq;
use crate::fs::DeclFs; use crate::fs::DeclFs;
use crate::func_atom::Fun; use crate::func_atom::Fun;
// use crate::fun::Fun;
use crate::lexer::LexerObj; use crate::lexer::LexerObj;
use crate::parser::ParserObj; use crate::parser::ParserObj;
use crate::system_ctor::{CtedObj, SystemCtor}; use crate::system_ctor::{CtedObj, SystemCtor};
@@ -22,118 +21,115 @@ use crate::tree::MemKind;
/// System as consumed by foreign code /// System as consumed by foreign code
pub trait SystemCard: Default + Send + Sync + 'static { pub trait SystemCard: Default + Send + Sync + 'static {
type Ctor: SystemCtor; type Ctor: SystemCtor;
type Req: Coding; type Req: Coding;
fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>>; fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>>;
} }
pub trait DynSystemCard: Send + Sync + 'static { pub trait DynSystemCard: Send + Sync + 'static {
fn name(&self) -> &'static str; fn name(&self) -> &'static str;
/// Atoms explicitly defined by the system card. Do not rely on this for /// Atoms explicitly defined by the system card. Do not rely on this for
/// querying atoms as it doesn't include the general atom types /// querying atoms as it doesn't include the general atom types
fn atoms(&self) -> BoxedIter<Option<Box<dyn AtomDynfo>>>; fn atoms(&self) -> BoxedIter<Option<Box<dyn AtomDynfo>>>;
} }
/// Atoms supported by this package which may appear in all extensions. /// Atoms supported by this package which may appear in all extensions.
/// The indices of these are bitwise negated, such that the MSB of an atom index /// The indices of these are bitwise negated, such that the MSB of an atom index
/// marks whether it belongs to this package (0) or the importer (1) /// marks whether it belongs to this package (0) or the importer (1)
fn general_atoms() -> impl Iterator<Item = Option<Box<dyn AtomDynfo>>> { fn general_atoms() -> impl Iterator<Item = Option<Box<dyn AtomDynfo>>> {
[Some(Fun::dynfo())].into_iter() [Some(Fun::dynfo())].into_iter()
} }
pub fn atom_info_for( pub fn atom_info_for(
sys: &(impl DynSystemCard + ?Sized), sys: &(impl DynSystemCard + ?Sized),
tid: TypeId, tid: TypeId,
) -> Option<(api::AtomId, Box<dyn AtomDynfo>)> { ) -> Option<(api::AtomId, Box<dyn AtomDynfo>)> {
(sys.atoms().enumerate().map(|(i, o)| (NonZero::new(i as u64 + 1).unwrap(), o))) (sys.atoms().enumerate().map(|(i, o)| (NonZero::new(i as u64 + 1).unwrap(), o)))
.chain(general_atoms().enumerate().map(|(i, o)| (NonZero::new(!(i as u64)).unwrap(), o))) .chain(general_atoms().enumerate().map(|(i, o)| (NonZero::new(!(i as u64)).unwrap(), o)))
.filter_map(|(i, o)| o.map(|a| (api::AtomId(i), a))) .filter_map(|(i, o)| o.map(|a| (api::AtomId(i), a)))
.find(|ent| ent.1.tid() == tid) .find(|ent| ent.1.tid() == tid)
} }
pub fn atom_by_idx( pub fn atom_by_idx(
sys: &(impl DynSystemCard + ?Sized), sys: &(impl DynSystemCard + ?Sized),
tid: api::AtomId, tid: api::AtomId,
) -> Option<Box<dyn AtomDynfo>> { ) -> Option<Box<dyn AtomDynfo>> {
if (u64::from(tid.0) >> (u64::BITS - 1)) & 1 == 1 { if (u64::from(tid.0) >> (u64::BITS - 1)) & 1 == 1 {
general_atoms().nth(!u64::from(tid.0) as usize).unwrap() general_atoms().nth(!u64::from(tid.0) as usize).unwrap()
} else { } else {
sys.atoms().nth(u64::from(tid.0) as usize - 1).unwrap() sys.atoms().nth(u64::from(tid.0) as usize - 1).unwrap()
} }
} }
pub fn resolv_atom( pub fn resolv_atom(sys: &(impl DynSystemCard + ?Sized), atom: &api::Atom) -> Box<dyn AtomDynfo> {
sys: &(impl DynSystemCard + ?Sized), let tid = api::AtomId::decode(&mut &atom.data[..8]);
atom: &api::Atom, atom_by_idx(sys, tid).expect("Value of nonexistent type found")
) -> Box<dyn AtomDynfo> {
let tid = api::AtomId::decode(&mut &atom.data[..8]);
atom_by_idx(sys, tid).expect("Value of nonexistent type found")
} }
impl<T: SystemCard> DynSystemCard for T { impl<T: SystemCard> DynSystemCard for T {
fn name(&self) -> &'static str { T::Ctor::NAME } fn name(&self) -> &'static str { T::Ctor::NAME }
fn atoms(&self) -> BoxedIter<Option<Box<dyn AtomDynfo>>> { Box::new(Self::atoms().into_iter()) } fn atoms(&self) -> BoxedIter<Option<Box<dyn AtomDynfo>>> { Box::new(Self::atoms().into_iter()) }
} }
/// System as defined by author /// System as defined by author
pub trait System: Send + Sync + SystemCard + 'static { pub trait System: Send + Sync + SystemCard + 'static {
fn env() -> Vec<(Tok<String>, MemKind)>; fn env() -> Vec<(Tok<String>, MemKind)>;
fn vfs() -> DeclFs; fn vfs() -> DeclFs;
fn lexers() -> Vec<LexerObj>; fn lexers() -> Vec<LexerObj>;
fn parsers() -> Vec<ParserObj>; fn parsers() -> Vec<ParserObj>;
fn request(hand: ExtReq, req: Self::Req) -> Receipt; fn request(hand: ExtReq, req: Self::Req) -> Receipt;
} }
pub trait DynSystem: Send + Sync + DynSystemCard + 'static { pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
fn dyn_env(&self) -> HashMap<Tok<String>, MemKind>; fn dyn_env(&self) -> HashMap<Tok<String>, MemKind>;
fn dyn_vfs(&self) -> DeclFs; fn dyn_vfs(&self) -> DeclFs;
fn dyn_lexers(&self) -> Vec<LexerObj>; fn dyn_lexers(&self) -> Vec<LexerObj>;
fn dyn_parsers(&self) -> Vec<ParserObj>; fn dyn_parsers(&self) -> Vec<ParserObj>;
fn dyn_request(&self, hand: ExtReq, req: Vec<u8>) -> Receipt; fn dyn_request(&self, hand: ExtReq, req: Vec<u8>) -> Receipt;
fn card(&self) -> &dyn DynSystemCard; fn card(&self) -> &dyn DynSystemCard;
} }
impl<T: System> DynSystem for T { impl<T: System> DynSystem for T {
fn dyn_env(&self) -> HashMap<Tok<String>, MemKind> { Self::env().into_iter().collect() } fn dyn_env(&self) -> HashMap<Tok<String>, MemKind> { Self::env().into_iter().collect() }
fn dyn_vfs(&self) -> DeclFs { Self::vfs() } fn dyn_vfs(&self) -> DeclFs { Self::vfs() }
fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() } fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() }
fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() } fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() }
fn dyn_request(&self, hand: ExtReq, req: Vec<u8>) -> Receipt { fn dyn_request(&self, hand: ExtReq, req: Vec<u8>) -> Receipt {
Self::request(hand, <Self as SystemCard>::Req::decode(&mut &req[..])) Self::request(hand, <Self as SystemCard>::Req::decode(&mut &req[..]))
} }
fn card(&self) -> &dyn DynSystemCard { self } fn card(&self) -> &dyn DynSystemCard { self }
} }
pub fn downcast_atom<A: AtomicFeatures>(foreign: ForeignAtom) -> Result<TypAtom<A>, ForeignAtom> { pub fn downcast_atom<A: AtomicFeatures>(foreign: ForeignAtom) -> Result<TypAtom<A>, ForeignAtom> {
let mut data = &foreign.atom.data[..]; let mut data = &foreign.atom.data[..];
let ctx = foreign.ctx.clone(); let ctx = foreign.ctx.clone();
let info_ent = (ctx.cted.deps().find(|s| s.id() == foreign.atom.owner)) let info_ent = (ctx.cted.deps().find(|s| s.id() == foreign.atom.owner))
.map(|sys| get_info::<A>(sys.get_card())) .map(|sys| get_info::<A>(sys.get_card()))
.filter(|(pos, _)| api::AtomId::decode(&mut data) == *pos); .filter(|(pos, _)| api::AtomId::decode(&mut data) == *pos);
match info_ent { match info_ent {
None => Err(foreign), None => Err(foreign),
Some((_, info)) => { Some((_, info)) => {
let val = info.decode(AtomCtx(data, foreign.atom.drop, ctx)); let val = info.decode(AtomCtx(data, foreign.atom.drop, ctx));
let value = *val.downcast::<A::Data>().expect("atom decode returned wrong type"); let value = *val.downcast::<A::Data>().expect("atom decode returned wrong type");
Ok(TypAtom { value, data: foreign }) Ok(TypAtom { value, data: foreign })
}, },
} }
} }
#[derive(Clone)] #[derive(Clone)]
pub struct SysCtx { pub struct SysCtx {
pub reqnot: ReqNot<api::ExtMsgSet>, pub reqnot: ReqNot<api::ExtMsgSet>,
pub id: api::SysId, pub id: api::SysId,
pub cted: CtedObj, pub cted: CtedObj,
pub logger: Arc<Logger>, pub logger: Arc<Logger>,
} }
impl SysCtx { impl SysCtx {
pub fn new( pub fn new(
id: api::SysId, id: api::SysId,
cted: &CtedObj, cted: &CtedObj,
logger: &Arc<Logger>, logger: &Arc<Logger>,
reqnot: ReqNot<api::ExtMsgSet>, reqnot: ReqNot<api::ExtMsgSet>,
) -> Self { ) -> Self {
Self { cted: cted.clone(), id, logger: logger.clone(), reqnot } Self { cted: cted.clone(), id, logger: logger.clone(), reqnot }
} }
} }

View File

@@ -1,7 +1,7 @@
use std::any::Any; use std::any::Any;
use std::sync::Arc; use std::sync::Arc;
use orchid_base::boxed_iter::{box_empty, box_once, BoxedIter}; use orchid_base::boxed_iter::{BoxedIter, box_empty, box_once};
use ordered_float::NotNan; use ordered_float::NotNan;
use crate::api; use crate::api;
@@ -9,94 +9,94 @@ use crate::other_system::{DynSystemHandle, SystemHandle};
use crate::system::{DynSystem, System, SystemCard}; use crate::system::{DynSystem, System, SystemCard};
pub struct Cted<Ctor: SystemCtor + ?Sized> { pub struct Cted<Ctor: SystemCtor + ?Sized> {
pub deps: <Ctor::Deps as DepDef>::Sat, pub deps: <Ctor::Deps as DepDef>::Sat,
pub inst: Arc<Ctor::Instance>, pub inst: Arc<Ctor::Instance>,
} }
impl<C: SystemCtor + ?Sized> Clone for Cted<C> { impl<C: SystemCtor + ?Sized> Clone for Cted<C> {
fn clone(&self) -> Self { Self { deps: self.deps.clone(), inst: self.inst.clone() } } fn clone(&self) -> Self { Self { deps: self.deps.clone(), inst: self.inst.clone() } }
} }
pub trait DynCted: Send + Sync + 'static { pub trait DynCted: Send + Sync + 'static {
fn as_any(&self) -> &dyn Any; fn as_any(&self) -> &dyn Any;
fn deps<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)>; fn deps<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)>;
fn inst(&self) -> Arc<dyn DynSystem>; fn inst(&self) -> Arc<dyn DynSystem>;
} }
impl<C: SystemCtor + ?Sized> DynCted for Cted<C> { impl<C: SystemCtor + ?Sized> DynCted for Cted<C> {
fn as_any(&self) -> &dyn Any { self } fn as_any(&self) -> &dyn Any { self }
fn deps<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)> { self.deps.iter() } fn deps<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)> { self.deps.iter() }
fn inst(&self) -> Arc<dyn DynSystem> { self.inst.clone() } fn inst(&self) -> Arc<dyn DynSystem> { self.inst.clone() }
} }
pub type CtedObj = Arc<dyn DynCted>; pub type CtedObj = Arc<dyn DynCted>;
pub trait DepSat: Clone + Send + Sync + 'static { pub trait DepSat: Clone + Send + Sync + 'static {
fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)>; fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)>;
} }
pub trait DepDef { pub trait DepDef {
type Sat: DepSat; type Sat: DepSat;
fn report(names: &mut impl FnMut(&'static str)); fn report(names: &mut impl FnMut(&'static str));
fn create(take: &mut impl FnMut() -> api::SysId) -> Self::Sat; fn create(take: &mut impl FnMut() -> api::SysId) -> Self::Sat;
} }
impl<T: SystemCard> DepSat for SystemHandle<T> { impl<T: SystemCard> DepSat for SystemHandle<T> {
fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)> { box_once(self) } fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)> { box_once(self) }
} }
impl<T: SystemCard> DepDef for T { impl<T: SystemCard> DepDef for T {
type Sat = SystemHandle<Self>; type Sat = SystemHandle<Self>;
fn report(names: &mut impl FnMut(&'static str)) { names(T::Ctor::NAME) } fn report(names: &mut impl FnMut(&'static str)) { names(T::Ctor::NAME) }
fn create(take: &mut impl FnMut() -> api::SysId) -> Self::Sat { SystemHandle::new(take()) } fn create(take: &mut impl FnMut() -> api::SysId) -> Self::Sat { SystemHandle::new(take()) }
} }
impl DepSat for () { impl DepSat for () {
fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)> { box_empty() } fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)> { box_empty() }
} }
impl DepDef for () { impl DepDef for () {
type Sat = (); type Sat = ();
fn create(_: &mut impl FnMut() -> api::SysId) -> Self::Sat {} fn create(_: &mut impl FnMut() -> api::SysId) -> Self::Sat {}
fn report(_: &mut impl FnMut(&'static str)) {} fn report(_: &mut impl FnMut(&'static str)) {}
} }
pub trait SystemCtor: Send + Sync + 'static { pub trait SystemCtor: Send + Sync + 'static {
type Deps: DepDef; type Deps: DepDef;
type Instance: System; type Instance: System;
const NAME: &'static str; const NAME: &'static str;
const VERSION: f64; const VERSION: f64;
fn inst() -> Option<Self::Instance>; fn inst() -> Option<Self::Instance>;
} }
pub trait DynSystemCtor: Send + Sync + 'static { pub trait DynSystemCtor: Send + Sync + 'static {
fn decl(&self, id: api::SysDeclId) -> api::SystemDecl; fn decl(&self, id: api::SysDeclId) -> api::SystemDecl;
fn new_system(&self, new: &api::NewSystem) -> CtedObj; fn new_system(&self, new: &api::NewSystem) -> CtedObj;
} }
impl<T: SystemCtor> DynSystemCtor for T { impl<T: SystemCtor> DynSystemCtor for T {
fn decl(&self, id: api::SysDeclId) -> api::SystemDecl { fn decl(&self, id: api::SysDeclId) -> api::SystemDecl {
// Version is equivalent to priority for all practical purposes // Version is equivalent to priority for all practical purposes
let priority = NotNan::new(T::VERSION).unwrap(); let priority = NotNan::new(T::VERSION).unwrap();
// aggregate depends names // aggregate depends names
let mut depends = Vec::new(); let mut depends = Vec::new();
T::Deps::report(&mut |n| depends.push(n.to_string())); T::Deps::report(&mut |n| depends.push(n.to_string()));
api::SystemDecl { name: T::NAME.to_string(), depends, id, priority } api::SystemDecl { name: T::NAME.to_string(), depends, id, priority }
} }
fn new_system(&self, api::NewSystem { system: _, id: _, depends }: &api::NewSystem) -> CtedObj { fn new_system(&self, api::NewSystem { system: _, id: _, depends }: &api::NewSystem) -> CtedObj {
let mut ids = depends.iter().copied(); let mut ids = depends.iter().copied();
let inst = Arc::new(T::inst().expect("Constructor did not create system")); let inst = Arc::new(T::inst().expect("Constructor did not create system"));
let deps = T::Deps::create(&mut || ids.next().unwrap()); let deps = T::Deps::create(&mut || ids.next().unwrap());
Arc::new(Cted::<T> { deps, inst }) Arc::new(Cted::<T> { deps, inst })
} }
} }
mod dep_set_tuple_impls { mod dep_set_tuple_impls {
use orchid_base::box_chain; use orchid_base::box_chain;
use orchid_base::boxed_iter::BoxedIter; use orchid_base::boxed_iter::BoxedIter;
use paste::paste; use paste::paste;
use super::{DepDef, DepSat}; use super::{DepDef, DepSat};
use crate::api; use crate::api;
use crate::system_ctor::DynSystemHandle; use crate::system_ctor::DynSystemHandle;
macro_rules! dep_set_tuple_impl { macro_rules! dep_set_tuple_impl {
($($name:ident),*) => { ($($name:ident),*) => {
impl<$( $name :DepSat ),*> DepSat for ( $( $name , )* ) { impl<$( $name :DepSat ),*> DepSat for ( $( $name , )* ) {
fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)> { fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)> {
@@ -137,20 +137,20 @@ mod dep_set_tuple_impls {
}; };
} }
dep_set_tuple_impl!(A); dep_set_tuple_impl!(A);
dep_set_tuple_impl!(A, B); // 2 dep_set_tuple_impl!(A, B); // 2
dep_set_tuple_impl!(A, B, C); dep_set_tuple_impl!(A, B, C);
dep_set_tuple_impl!(A, B, C, D); // 4 dep_set_tuple_impl!(A, B, C, D); // 4
dep_set_tuple_impl!(A, B, C, D, E); dep_set_tuple_impl!(A, B, C, D, E);
dep_set_tuple_impl!(A, B, C, D, E, F); dep_set_tuple_impl!(A, B, C, D, E, F);
dep_set_tuple_impl!(A, B, C, D, E, F, G); dep_set_tuple_impl!(A, B, C, D, E, F, G);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H); // 8 dep_set_tuple_impl!(A, B, C, D, E, F, G, H); // 8
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I); dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J); dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K); dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L); // 12 dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L); // 12
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M); dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M, N); dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M, N);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O); dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P); // 16 dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P); // 16
} }

View File

@@ -15,7 +15,7 @@ use trait_set::trait_set;
use crate::api; use crate::api;
use crate::atom::{AtomFactory, ForeignAtom}; use crate::atom::{AtomFactory, ForeignAtom};
use crate::conv::{ToExpr, TryFromExpr}; use crate::conv::ToExpr;
use crate::entrypoint::MemberRecord; use crate::entrypoint::MemberRecord;
use crate::expr::Expr; use crate::expr::Expr;
use crate::func_atom::{ExprFunc, Fun}; use crate::func_atom::{ExprFunc, Fun};
@@ -26,169 +26,169 @@ pub type GenTokTree<'a> = TokTree<'a, ForeignAtom<'a>, AtomFactory>;
pub type GenTok<'a> = Token<'a, ForeignAtom<'a>, AtomFactory>; pub type GenTok<'a> = Token<'a, ForeignAtom<'a>, AtomFactory>;
pub fn do_extra(f: &AtomFactory, r: Range<u32>, ctx: SysCtx) -> api::TokenTree { pub fn do_extra(f: &AtomFactory, r: Range<u32>, ctx: SysCtx) -> api::TokenTree {
api::TokenTree { range: r, token: api::Token::Atom(f.clone().build(ctx)) } api::TokenTree { range: r, token: api::Token::Atom(f.clone().build(ctx)) }
} }
fn with_export(mem: GenMember, public: bool) -> Vec<GenItem> { fn with_export(mem: GenMember, public: bool) -> Vec<GenItem> {
(public.then(|| GenItemKind::Export(mem.name.clone()).at(Pos::Inherit)).into_iter()) (public.then(|| GenItemKind::Export(mem.name.clone()).at(Pos::Inherit)).into_iter())
.chain([GenItemKind::Member(mem).at(Pos::Inherit)]) .chain([GenItemKind::Member(mem).at(Pos::Inherit)])
.collect() .collect()
} }
pub struct GenItem { pub struct GenItem {
pub kind: GenItemKind, pub kind: GenItemKind,
pub comments: Vec<Comment>, pub comments: Vec<Comment>,
pub pos: Pos, pub pos: Pos,
} }
impl GenItem { impl GenItem {
pub fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::Item { pub fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::Item {
let kind = match self.kind { let kind = match self.kind {
GenItemKind::Export(n) => api::ItemKind::Export(n.to_api()), GenItemKind::Export(n) => api::ItemKind::Export(n.to_api()),
GenItemKind::Member(mem) => api::ItemKind::Member(mem.into_api(ctx)), GenItemKind::Member(mem) => api::ItemKind::Member(mem.into_api(ctx)),
GenItemKind::Import(cn) => api::ItemKind::Import(cn.tok().to_api()), GenItemKind::Import(cn) => api::ItemKind::Import(cn.tok().to_api()),
GenItemKind::Macro(prio, rules) => api::ItemKind::Macro(api::MacroBlock { GenItemKind::Macro(prio, rules) => api::ItemKind::Macro(api::MacroBlock {
priority: prio, priority: prio,
rules: rules.into_iter().map(|r| r.to_api()).collect_vec(), rules: rules.into_iter().map(|r| r.to_api()).collect_vec(),
}), }),
}; };
let comments = self.comments.into_iter().map(|c| c.to_api()).collect_vec(); let comments = self.comments.into_iter().map(|c| c.to_api()).collect_vec();
api::Item { location: self.pos.to_api(), comments, kind } api::Item { location: self.pos.to_api(), comments, kind }
} }
} }
pub fn cnst(public: bool, name: &str, value: impl ToExpr) -> Vec<GenItem> { pub fn cnst(public: bool, name: &str, value: impl ToExpr) -> Vec<GenItem> {
with_export(GenMember { name: intern(name), kind: MemKind::Const(value.to_expr()) }, public) with_export(GenMember { name: intern(name), kind: MemKind::Const(value.to_expr()) }, public)
} }
pub fn module( pub fn module(
public: bool, public: bool,
name: &str, name: &str,
imports: impl IntoIterator<Item = Sym>, imports: impl IntoIterator<Item = Sym>,
items: impl IntoIterator<Item = Vec<GenItem>>, items: impl IntoIterator<Item = Vec<GenItem>>,
) -> Vec<GenItem> { ) -> Vec<GenItem> {
let (name, kind) = root_mod(name, imports, items); let (name, kind) = root_mod(name, imports, items);
with_export(GenMember { name, kind }, public) with_export(GenMember { name, kind }, public)
} }
pub fn root_mod( pub fn root_mod(
name: &str, name: &str,
imports: impl IntoIterator<Item = Sym>, imports: impl IntoIterator<Item = Sym>,
items: impl IntoIterator<Item = Vec<GenItem>>, items: impl IntoIterator<Item = Vec<GenItem>>,
) -> (Tok<String>, MemKind) { ) -> (Tok<String>, MemKind) {
let kind = MemKind::Mod { let kind = MemKind::Mod {
imports: imports.into_iter().collect(), imports: imports.into_iter().collect(),
items: items.into_iter().flatten().collect(), items: items.into_iter().flatten().collect(),
}; };
(intern(name), kind) (intern(name), kind)
} }
pub fn fun<I, O>(exported: bool, name: &str, xf: impl ExprFunc<I, O>) -> Vec<GenItem> { pub fn fun<I, O>(exported: bool, name: &str, xf: impl ExprFunc<I, O>) -> Vec<GenItem> {
let fac = LazyMemberFactory::new(move |sym| MemKind::Const(Fun::new(sym, xf).to_expr())); let fac = LazyMemberFactory::new(move |sym| MemKind::Const(Fun::new(sym, xf).to_expr()));
with_export(GenMember { name: intern(name), kind: MemKind::Lazy(fac) }, exported) with_export(GenMember { name: intern(name), kind: MemKind::Lazy(fac) }, exported)
} }
pub fn macro_block(prio: Option<f64>, rules: impl IntoIterator<Item = Rule>) -> Vec<GenItem> { pub fn macro_block(prio: Option<f64>, rules: impl IntoIterator<Item = Rule>) -> Vec<GenItem> {
let prio = prio.map(|p| NotNan::new(p).unwrap()); let prio = prio.map(|p| NotNan::new(p).unwrap());
vec![GenItemKind::Macro(prio, rules.into_iter().collect_vec()).gen()] vec![GenItemKind::Macro(prio, rules.into_iter().collect_vec()).gen()]
} }
pub fn comments<'a>( pub fn comments<'a>(
cmts: impl IntoIterator<Item = &'a str> + Clone, cmts: impl IntoIterator<Item = &'a str> + Clone,
mut val: Vec<GenItem>, mut val: Vec<GenItem>,
) -> Vec<GenItem> { ) -> Vec<GenItem> {
for v in val.iter_mut() { for v in val.iter_mut() {
v.comments v.comments
.extend(cmts.clone().into_iter().map(|c| Comment { text: intern(c), pos: Pos::Inherit })); .extend(cmts.clone().into_iter().map(|c| Comment { text: intern(c), pos: Pos::Inherit }));
} }
val val
} }
trait_set! { trait_set! {
trait LazyMemberCallback = FnOnce(Sym) -> MemKind + Send + Sync + DynClone trait LazyMemberCallback = FnOnce(Sym) -> MemKind + Send + Sync + DynClone
} }
pub struct LazyMemberFactory(Box<dyn LazyMemberCallback>); pub struct LazyMemberFactory(Box<dyn LazyMemberCallback>);
impl LazyMemberFactory { impl LazyMemberFactory {
pub fn new(cb: impl FnOnce(Sym) -> MemKind + Send + Sync + Clone + 'static) -> Self { pub fn new(cb: impl FnOnce(Sym) -> MemKind + Send + Sync + Clone + 'static) -> Self {
Self(Box::new(cb)) Self(Box::new(cb))
} }
pub fn build(self, path: Sym) -> MemKind { (self.0)(path) } pub fn build(self, path: Sym) -> MemKind { (self.0)(path) }
} }
impl Clone for LazyMemberFactory { impl Clone for LazyMemberFactory {
fn clone(&self) -> Self { Self(clone_box(&*self.0)) } fn clone(&self) -> Self { Self(clone_box(&*self.0)) }
} }
pub enum GenItemKind { pub enum GenItemKind {
Member(GenMember), Member(GenMember),
Export(Tok<String>), Export(Tok<String>),
Import(Sym), Import(Sym),
Macro(Option<NotNan<f64>>, Vec<Rule>), Macro(Option<NotNan<f64>>, Vec<Rule>),
} }
impl GenItemKind { impl GenItemKind {
pub fn at(self, pos: Pos) -> GenItem { GenItem { kind: self, comments: vec![], pos } } pub fn at(self, pos: Pos) -> GenItem { GenItem { kind: self, comments: vec![], pos } }
pub fn gen(self) -> GenItem { GenItem { kind: self, comments: vec![], pos: Pos::Inherit } } pub fn gen(self) -> GenItem { GenItem { kind: self, comments: vec![], pos: Pos::Inherit } }
pub fn gen_equiv(self, comments: Vec<Comment>) -> GenItem { pub fn gen_equiv(self, comments: Vec<Comment>) -> GenItem {
GenItem { kind: self, comments, pos: Pos::Inherit } GenItem { kind: self, comments, pos: Pos::Inherit }
} }
} }
pub struct GenMember { pub struct GenMember {
name: Tok<String>, name: Tok<String>,
kind: MemKind, kind: MemKind,
} }
impl GenMember { impl GenMember {
pub fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::Member { pub fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::Member {
api::Member { api::Member {
name: self.name.to_api(), name: self.name.to_api(),
kind: self.kind.into_api(&mut ctx.push_path(self.name)), kind: self.kind.into_api(&mut ctx.push_path(self.name)),
} }
} }
} }
pub enum MemKind { pub enum MemKind {
Const(Expr), Const(Expr),
Mod { imports: Vec<Sym>, items: Vec<GenItem> }, Mod { imports: Vec<Sym>, items: Vec<GenItem> },
Lazy(LazyMemberFactory), Lazy(LazyMemberFactory),
} }
impl MemKind { impl MemKind {
pub fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::MemberKind { pub fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::MemberKind {
match self { match self {
Self::Lazy(lazy) => api::MemberKind::Lazy(ctx.with_lazy(lazy)), Self::Lazy(lazy) => api::MemberKind::Lazy(ctx.with_lazy(lazy)),
Self::Const(c) => Self::Const(c) =>
api::MemberKind::Const(c.api_return(ctx.sys(), &mut |_| panic!("Slot found in const tree"))), api::MemberKind::Const(c.api_return(ctx.sys(), &mut |_| panic!("Slot in const tree"))),
Self::Mod { imports, items } => api::MemberKind::Module(api::Module { Self::Mod { imports, items } => api::MemberKind::Module(api::Module {
items: (imports.into_iter()) items: (imports.into_iter())
.map(|t| GenItemKind::Import(t).gen()) .map(|t| GenItemKind::Import(t).gen())
.chain(items) .chain(items)
.map(|i| i.into_api(ctx)) .map(|i| i.into_api(ctx))
.collect_vec(), .collect_vec(),
}), }),
} }
} }
} }
pub trait TreeIntoApiCtx { pub trait TreeIntoApiCtx {
fn sys(&self) -> SysCtx; fn sys(&self) -> SysCtx;
fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId; fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId;
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx; fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx;
} }
pub struct TIACtxImpl<'a, 'b> { pub struct TIACtxImpl<'a, 'b> {
pub sys: SysCtx, pub sys: SysCtx,
pub basepath: &'a [Tok<String>], pub basepath: &'a [Tok<String>],
pub path: Substack<'a, Tok<String>>, pub path: Substack<'a, Tok<String>>,
pub lazy: &'b mut HashMap<api::TreeId, MemberRecord>, pub lazy: &'b mut HashMap<api::TreeId, MemberRecord>,
} }
impl<'a, 'b> TreeIntoApiCtx for TIACtxImpl<'a, 'b> { impl<'a, 'b> TreeIntoApiCtx for TIACtxImpl<'a, 'b> {
fn sys(&self) -> SysCtx { self.sys.clone() } fn sys(&self) -> SysCtx { self.sys.clone() }
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx { fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx {
TIACtxImpl { TIACtxImpl {
sys: self.sys.clone(), sys: self.sys.clone(),
lazy: self.lazy, lazy: self.lazy,
basepath: self.basepath, basepath: self.basepath,
path: self.path.push(seg), path: self.path.push(seg),
} }
} }
fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId { fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId {
let id = api::TreeId(NonZero::new((self.lazy.len() + 2) as u64).unwrap()); let id = api::TreeId(NonZero::new((self.lazy.len() + 2) as u64).unwrap());
let path = Sym::new(self.basepath.iter().cloned().chain(self.path.unreverse())).unwrap(); let path = Sym::new(self.basepath.iter().cloned().chain(self.path.unreverse())).unwrap();
self.lazy.insert(id, MemberRecord::Gen(path, fac)); self.lazy.insert(id, MemberRecord::Gen(path, fac));
id id
} }
} }

View File

@@ -9,13 +9,13 @@ edition = "2021"
derive_destructure = "1.0.0" derive_destructure = "1.0.0"
hashbrown = "0.15.2" hashbrown = "0.15.2"
itertools = "0.14.0" itertools = "0.14.0"
lazy_static = "1.4.0" lazy_static = "1.5.0"
never = "0.1.0" never = "0.1.0"
num-traits = "0.2.19" num-traits = "0.2.19"
orchid-api = { version = "0.1.0", path = "../orchid-api" } orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
orchid-base = { version = "0.1.0", path = "../orchid-base" } orchid-base = { version = "0.1.0", path = "../orchid-base" }
ordered-float = "4.2.0" ordered-float = "4.6.0"
paste = "1.0.15" paste = "1.0.15"
substack = "1.1.1" substack = "1.1.1"
trait-set = "0.3.0" trait-set = "0.3.0"

View File

@@ -4,41 +4,41 @@ use std::{fmt, io, mem, process};
use orchid_base::msg::{recv_msg, send_msg}; use orchid_base::msg::{recv_msg, send_msg};
pub struct SharedChild { pub struct SharedChild {
child: process::Child, child: process::Child,
stdin: Mutex<process::ChildStdin>, stdin: Mutex<process::ChildStdin>,
stdout: Mutex<process::ChildStdout>, stdout: Mutex<process::ChildStdout>,
debug: Option<(String, Mutex<Box<dyn fmt::Write>>)>, debug: Option<(String, Mutex<Box<dyn fmt::Write>>)>,
} }
impl SharedChild { impl SharedChild {
pub fn new( pub fn new(
command: &mut process::Command, command: &mut process::Command,
debug: Option<(&str, impl fmt::Write + 'static)>, debug: Option<(&str, impl fmt::Write + 'static)>,
) -> io::Result<Self> { ) -> io::Result<Self> {
let mut child = let mut child =
command.stdin(process::Stdio::piped()).stdout(process::Stdio::piped()).spawn()?; command.stdin(process::Stdio::piped()).stdout(process::Stdio::piped()).spawn()?;
let stdin = Mutex::new(child.stdin.take().expect("Piped stdin above")); let stdin = Mutex::new(child.stdin.take().expect("Piped stdin above"));
let stdout = Mutex::new(child.stdout.take().expect("Piped stdout above")); let stdout = Mutex::new(child.stdout.take().expect("Piped stdout above"));
let debug = debug.map(|(n, w)| (n.to_string(), Mutex::new(Box::new(w) as Box<dyn fmt::Write>))); let debug = debug.map(|(n, w)| (n.to_string(), Mutex::new(Box::new(w) as Box<dyn fmt::Write>)));
Ok(Self { child, stdin, stdout, debug }) Ok(Self { child, stdin, stdout, debug })
} }
pub fn send_msg(&self, msg: &[u8]) -> io::Result<()> { pub fn send_msg(&self, msg: &[u8]) -> io::Result<()> {
if let Some((n, dbg)) = &self.debug { if let Some((n, dbg)) = &self.debug {
let mut dbg = dbg.lock().unwrap(); let mut dbg = dbg.lock().unwrap();
writeln!(dbg, "To {n}: {msg:?}").unwrap(); writeln!(dbg, "To {n}: {msg:?}").unwrap();
} }
send_msg(&mut *self.stdin.lock().unwrap(), msg) send_msg(&mut *self.stdin.lock().unwrap(), msg)
} }
pub fn recv_msg(&self) -> io::Result<Vec<u8>> { pub fn recv_msg(&self) -> io::Result<Vec<u8>> {
let msg = recv_msg(&mut *self.stdout.lock().unwrap()); let msg = recv_msg(&mut *self.stdout.lock().unwrap());
if let Some((n, dbg)) = &self.debug { if let Some((n, dbg)) = &self.debug {
let mut dbg = dbg.lock().unwrap(); let mut dbg = dbg.lock().unwrap();
writeln!(dbg, "From {n}: {msg:?}").unwrap(); writeln!(dbg, "From {n}: {msg:?}").unwrap();
} }
msg msg
} }
} }
impl Drop for SharedChild { impl Drop for SharedChild {
fn drop(&mut self) { mem::drop(self.child.kill()) } fn drop(&mut self) { mem::drop(self.child.kill()) }
} }

View File

@@ -18,123 +18,124 @@ pub type ExprParseCtx = ();
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct Expr { pub struct Expr {
is_canonical: Arc<AtomicBool>, is_canonical: Arc<AtomicBool>,
pos: Pos, pos: Pos,
kind: Arc<RwLock<ExprKind>>, kind: Arc<RwLock<ExprKind>>,
} }
impl Expr { impl Expr {
pub fn pos(&self) -> Pos { self.pos.clone() } pub fn pos(&self) -> Pos { self.pos.clone() }
pub fn as_atom(&self) -> Option<AtomHand> { todo!() } pub fn as_atom(&self) -> Option<AtomHand> { todo!() }
pub fn strong_count(&self) -> usize { todo!() } pub fn strong_count(&self) -> usize { todo!() }
pub fn id(&self) -> api::ExprTicket { pub fn id(&self) -> api::ExprTicket {
api::ExprTicket( api::ExprTicket(
NonZeroU64::new(self.kind.as_ref() as *const RwLock<_> as usize as u64) NonZeroU64::new(self.kind.as_ref() as *const RwLock<_> as usize as u64)
.expect("this is a ref, it cannot be null"), .expect("this is a ref, it cannot be null"),
) )
} }
pub fn canonicalize(&self) -> api::ExprTicket { pub fn canonicalize(&self) -> api::ExprTicket {
if !self.is_canonical.swap(true, Ordering::Relaxed) { if !self.is_canonical.swap(true, Ordering::Relaxed) {
KNOWN_EXPRS.write().unwrap().entry(self.id()).or_insert_with(|| self.clone()); KNOWN_EXPRS.write().unwrap().entry(self.id()).or_insert_with(|| self.clone());
} }
self.id() self.id()
} }
pub fn resolve(tk: api::ExprTicket) -> Option<Self> { pub fn resolve(tk: api::ExprTicket) -> Option<Self> {
KNOWN_EXPRS.read().unwrap().get(&tk).cloned() KNOWN_EXPRS.read().unwrap().get(&tk).cloned()
} }
pub fn from_api(api: &api::Expression, ctx: &mut ExprParseCtx) -> Self { pub fn from_api(api: &api::Expression, ctx: &mut ExprParseCtx) -> Self {
if let api::ExpressionKind::Slot(tk) = &api.kind { if let api::ExpressionKind::Slot(tk) = &api.kind {
return Self::resolve(*tk).expect("Invalid slot"); return Self::resolve(*tk).expect("Invalid slot");
} }
Self { Self {
kind: Arc::new(RwLock::new(ExprKind::from_api(&api.kind, ctx))), kind: Arc::new(RwLock::new(ExprKind::from_api(&api.kind, ctx))),
is_canonical: Arc::default(), is_canonical: Arc::default(),
pos: Pos::from_api(&api.location), pos: Pos::from_api(&api.location),
} }
} }
pub fn to_api(&self) -> api::InspectedKind { pub fn to_api(&self) -> api::InspectedKind {
use api::InspectedKind as K; use api::InspectedKind as K;
match &*self.kind.read().unwrap() { match &*self.kind.read().unwrap() {
ExprKind::Atom(a) => K::Atom(a.to_api()), ExprKind::Atom(a) => K::Atom(a.to_api()),
ExprKind::Bottom(b) => K::Bottom(b.to_api()), ExprKind::Bottom(b) => K::Bottom(b.to_api()),
_ => K::Opaque, _ => K::Opaque,
} }
} }
} }
impl Drop for Expr { impl Drop for Expr {
fn drop(&mut self) { fn drop(&mut self) {
// If the only two references left are this and known, remove from known // If the only two references left are this and known, remove from known
if Arc::strong_count(&self.kind) == 2 && self.is_canonical.load(Ordering::Relaxed) { if Arc::strong_count(&self.kind) == 2 && self.is_canonical.load(Ordering::Relaxed) {
// if known is poisoned, a leak is preferable to a panicking destructor // if known is poisoned, a leak is preferable to a panicking destructor
if let Ok(mut w) = KNOWN_EXPRS.write() { if let Ok(mut w) = KNOWN_EXPRS.write() {
w.remove(&self.id()); w.remove(&self.id());
} }
} }
} }
} }
lazy_static! { lazy_static! {
static ref KNOWN_EXPRS: RwLock<HashMap<api::ExprTicket, Expr>> = RwLock::default(); static ref KNOWN_EXPRS: RwLock<HashMap<api::ExprTicket, Expr>> = RwLock::default();
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum ExprKind { pub enum ExprKind {
Seq(Expr, Expr), Seq(Expr, Expr),
Call(Expr, Expr), Call(Expr, Expr),
Atom(AtomHand), Atom(AtomHand),
Arg, Arg,
Lambda(Option<PathSet>, Expr), Lambda(Option<PathSet>, Expr),
Bottom(OrcErrv), Bottom(OrcErrv),
Const(Sym), Const(Sym),
} }
impl ExprKind { impl ExprKind {
pub fn from_api(api: &api::ExpressionKind, ctx: &mut ExprParseCtx) -> Self { pub fn from_api(api: &api::ExpressionKind, ctx: &mut ExprParseCtx) -> Self {
match_mapping!(api, api::ExpressionKind => ExprKind { match_mapping!(api, api::ExpressionKind => ExprKind {
Lambda(id => PathSet::from_api(*id, api), b => Expr::from_api(b, ctx)), Lambda(id => PathSet::from_api(*id, api), b => Expr::from_api(b, ctx)),
Bottom(b => OrcErrv::from_api(b)), Bottom(b => OrcErrv::from_api(b)),
Call(f => Expr::from_api(f, ctx), x => Expr::from_api(x, ctx)), Call(f => Expr::from_api(f, ctx), x => Expr::from_api(x, ctx)),
Const(c => Sym::from_api(*c)), Const(c => Sym::from_api(*c)),
Seq(a => Expr::from_api(a, ctx), b => Expr::from_api(b, ctx)), Seq(a => Expr::from_api(a, ctx), b => Expr::from_api(b, ctx)),
} { } {
api::ExpressionKind::Arg(_) => ExprKind::Arg, api::ExpressionKind::Arg(_) => ExprKind::Arg,
api::ExpressionKind::NewAtom(a) => ExprKind::Atom(AtomHand::from_api(a.clone())), api::ExpressionKind::NewAtom(a) => ExprKind::Atom(AtomHand::from_api(a.clone())),
api::ExpressionKind::Slot(_) => panic!("Handled in Expr"), api::ExpressionKind::Slot(_) => panic!("Handled in Expr"),
}) })
} }
} }
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)] #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub enum Step { pub enum Step {
Left, Left,
Right, Right,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct PathSet { pub struct PathSet {
/// The single steps through [super::nort::Clause::Apply] /// The single steps through [super::nort::Clause::Apply]
pub steps: VecDeque<Step>, pub steps: VecDeque<Step>,
/// if Some, it splits at a [super::nort::Clause::Apply]. If None, it ends in /// if Some, it splits at a [super::nort::Clause::Apply]. If None, it ends in
/// a [super::nort::Clause::LambdaArg] /// a [super::nort::Clause::LambdaArg]
pub next: Option<(Box<PathSet>, Box<PathSet>)>, pub next: Option<(Box<PathSet>, Box<PathSet>)>,
} }
impl PathSet { impl PathSet {
pub fn after(mut self, step: Step) -> Self { pub fn after(mut self, step: Step) -> Self {
self.steps.push_front(step); self.steps.push_front(step);
self self
} }
pub fn from_api(id: u64, api: &api::ExpressionKind) -> Option<Self> { pub fn from_api(id: u64, api: &api::ExpressionKind) -> Option<Self> {
use api::ExpressionKind as K; use api::ExpressionKind as K;
match &api { match &api {
K::Arg(id2) => (id == *id2).then(|| Self { steps: VecDeque::new(), next: None }), K::Arg(id2) => (id == *id2).then(|| Self { steps: VecDeque::new(), next: None }),
K::Bottom(_) | K::Const(_) | K::NewAtom(_) | K::Slot(_) => None, K::Bottom(_) | K::Const(_) | K::NewAtom(_) | K::Slot(_) => None,
K::Lambda(_, b) => Self::from_api(id, &b.kind), K::Lambda(_, b) => Self::from_api(id, &b.kind),
K::Call(l, r) | K::Seq(l, r) => K::Call(l, r) | K::Seq(l, r) => {
match (Self::from_api(id, &l.kind), Self::from_api(id, &r.kind)) { match (Self::from_api(id, &l.kind), Self::from_api(id, &r.kind)) {
(Some(a), Some(b)) => (Some(a), Some(b)) =>
Some(Self { steps: VecDeque::new(), next: Some((Box::new(a), Box::new(b))) }), Some(Self { steps: VecDeque::new(), next: Some((Box::new(a), Box::new(b))) }),
(Some(l), None) => Some(l.after(Step::Left)), (Some(l), None) => Some(l.after(Step::Left)),
(None, Some(r)) => Some(r.after(Step::Right)), (None, Some(r)) => Some(r.after(Step::Right)),
(None, None) => None, (None, None) => None,
}, }
} },
} }
}
} }

View File

@@ -32,78 +32,78 @@ use crate::tree::{Member, ParsTokTree};
#[derive(Debug, destructure)] #[derive(Debug, destructure)]
pub struct AtomData { pub struct AtomData {
owner: System, owner: System,
drop: Option<api::AtomId>, drop: Option<api::AtomId>,
data: Vec<u8>, data: Vec<u8>,
} }
impl AtomData { impl AtomData {
fn api(self) -> api::Atom { fn api(self) -> api::Atom {
let (owner, drop, data) = self.destructure(); let (owner, drop, data) = self.destructure();
api::Atom { data, drop, owner: owner.id() } api::Atom { data, drop, owner: owner.id() }
} }
fn api_ref(&self) -> api::Atom { fn api_ref(&self) -> api::Atom {
api::Atom { data: self.data.clone(), drop: self.drop, owner: self.owner.id() } api::Atom { data: self.data.clone(), drop: self.drop, owner: self.owner.id() }
} }
} }
impl Drop for AtomData { impl Drop for AtomData {
fn drop(&mut self) { fn drop(&mut self) {
if let Some(id) = self.drop { if let Some(id) = self.drop {
self.owner.reqnot().notify(api::AtomDrop(self.owner.id(), id)) self.owner.reqnot().notify(api::AtomDrop(self.owner.id(), id))
} }
} }
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct AtomHand(Arc<AtomData>); pub struct AtomHand(Arc<AtomData>);
impl AtomHand { impl AtomHand {
pub fn from_api(atom: api::Atom) -> Self { pub fn from_api(atom: api::Atom) -> Self {
fn create_new(api::Atom { data, drop, owner }: api::Atom) -> AtomHand { fn create_new(api::Atom { data, drop, owner }: api::Atom) -> AtomHand {
let owner = System::resolve(owner).expect("Atom owned by non-existing system"); let owner = System::resolve(owner).expect("Atom owned by non-existing system");
AtomHand(Arc::new(AtomData { data, drop, owner })) AtomHand(Arc::new(AtomData { data, drop, owner }))
} }
if let Some(id) = atom.drop { if let Some(id) = atom.drop {
lazy_static! { lazy_static! {
static ref OWNED_ATOMS: Mutex<HashMap<(api::SysId, api::AtomId), Weak<AtomData>>> = static ref OWNED_ATOMS: Mutex<HashMap<(api::SysId, api::AtomId), Weak<AtomData>>> =
Mutex::default(); Mutex::default();
} }
let owner = atom.owner; let owner = atom.owner;
let mut owned_g = OWNED_ATOMS.lock().unwrap(); let mut owned_g = OWNED_ATOMS.lock().unwrap();
if let Some(data) = owned_g.get(&(owner, id)) { if let Some(data) = owned_g.get(&(owner, id)) {
if let Some(atom) = data.upgrade() { if let Some(atom) = data.upgrade() {
return Self(atom); return Self(atom);
} }
} }
let new = create_new(atom); let new = create_new(atom);
owned_g.insert((owner, id), Arc::downgrade(&new.0)); owned_g.insert((owner, id), Arc::downgrade(&new.0));
new new
} else { } else {
create_new(atom) create_new(atom)
} }
} }
pub fn call(self, arg: Expr) -> api::Expression { pub fn call(self, arg: Expr) -> api::Expression {
let owner_sys = self.0.owner.clone(); let owner_sys = self.0.owner.clone();
let reqnot = owner_sys.reqnot(); let reqnot = owner_sys.reqnot();
let ticket = owner_sys.give_expr(arg.canonicalize(), || arg); let ticket = owner_sys.give_expr(arg.canonicalize(), || arg);
match Arc::try_unwrap(self.0) { match Arc::try_unwrap(self.0) {
Ok(data) => reqnot.request(api::FinalCall(data.api(), ticket)), Ok(data) => reqnot.request(api::FinalCall(data.api(), ticket)),
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), ticket)), Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), ticket)),
} }
} }
pub fn req(&self, key: api::TStrv, req: Vec<u8>) -> Option<Vec<u8>> { pub fn req(&self, key: api::TStrv, req: Vec<u8>) -> Option<Vec<u8>> {
self.0.owner.reqnot().request(api::Fwded(self.0.api_ref(), key, req)) self.0.owner.reqnot().request(api::Fwded(self.0.api_ref(), key, req))
} }
pub fn api_ref(&self) -> api::Atom { self.0.api_ref() } pub fn api_ref(&self) -> api::Atom { self.0.api_ref() }
pub fn print(&self) -> String { self.0.owner.reqnot().request(api::AtomPrint(self.0.api_ref())) } pub fn print(&self) -> String { self.0.owner.reqnot().request(api::AtomPrint(self.0.api_ref())) }
} }
impl AtomRepr for AtomHand { impl AtomRepr for AtomHand {
type Ctx = (); type Ctx = ();
fn from_api(atom: &orchid_api::Atom, _: Pos, (): &mut Self::Ctx) -> Self { fn from_api(atom: &orchid_api::Atom, _: Pos, (): &mut Self::Ctx) -> Self {
Self::from_api(atom.clone()) Self::from_api(atom.clone())
} }
fn to_api(&self) -> orchid_api::Atom { self.api_ref() } fn to_api(&self) -> orchid_api::Atom { self.api_ref() }
} }
impl fmt::Display for AtomHand { impl fmt::Display for AtomHand {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(&self.print()) } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(&self.print()) }
} }
pub type OnMessage = Box<dyn FnMut(&[u8]) + Send>; pub type OnMessage = Box<dyn FnMut(&[u8]) + Send>;
@@ -115,9 +115,9 @@ pub type OnMessage = Box<dyn FnMut(&[u8]) + Send>;
/// ///
/// There are no ordering guarantees about these /// There are no ordering guarantees about these
pub trait ExtensionPort: Send + Sync { pub trait ExtensionPort: Send + Sync {
fn set_onmessage(&self, callback: OnMessage); fn set_onmessage(&self, callback: OnMessage);
fn send(&self, msg: &[u8]); fn send(&self, msg: &[u8]);
fn header(&self) -> &api::ExtensionHeader; fn header(&self) -> &api::ExtensionHeader;
} }
/// Data held about an Extension. This is refcounted within [Extension]. It's /// Data held about an Extension. This is refcounted within [Extension]. It's
@@ -126,323 +126,323 @@ pub trait ExtensionPort: Send + Sync {
/// upgrading fails. /// upgrading fails.
#[derive(destructure)] #[derive(destructure)]
pub struct ExtensionData { pub struct ExtensionData {
port: Arc<dyn ExtensionPort>, port: Arc<dyn ExtensionPort>,
// child: Mutex<process::Child>, // child: Mutex<process::Child>,
// child_stdin: Mutex<ChildStdin>, // child_stdin: Mutex<ChildStdin>,
reqnot: ReqNot<api::HostMsgSet>, reqnot: ReqNot<api::HostMsgSet>,
systems: Vec<SystemCtor>, systems: Vec<SystemCtor>,
logger: Logger, logger: Logger,
} }
impl Drop for ExtensionData { impl Drop for ExtensionData {
fn drop(&mut self) { self.reqnot.notify(api::HostExtNotif::Exit); } fn drop(&mut self) { self.reqnot.notify(api::HostExtNotif::Exit); }
} }
fn acq_expr(sys: api::SysId, extk: api::ExprTicket) { fn acq_expr(sys: api::SysId, extk: api::ExprTicket) {
(System::resolve(sys).expect("Expr acq'd by invalid system")) (System::resolve(sys).expect("Expr acq'd by invalid system"))
.give_expr(extk, || Expr::resolve(extk).expect("Invalid expr acq'd")); .give_expr(extk, || Expr::resolve(extk).expect("Invalid expr acq'd"));
} }
fn rel_expr(sys: api::SysId, extk: api::ExprTicket) { fn rel_expr(sys: api::SysId, extk: api::ExprTicket) {
let sys = System::resolve(sys).unwrap(); let sys = System::resolve(sys).unwrap();
let mut exprs = sys.0.exprs.write().unwrap(); let mut exprs = sys.0.exprs.write().unwrap();
exprs.entry(extk).and_replace_entry_with(|_, (rc, rt)| { exprs.entry(extk).and_replace_entry_with(|_, (rc, rt)| {
(0 < rc.fetch_sub(1, Ordering::Relaxed)).then_some((rc, rt)) (0 < rc.fetch_sub(1, Ordering::Relaxed)).then_some((rc, rt))
}); });
} }
#[derive(Clone)] #[derive(Clone)]
pub struct Extension(Arc<ExtensionData>); pub struct Extension(Arc<ExtensionData>);
impl Extension { impl Extension {
pub fn new_process(port: Arc<dyn ExtensionPort>, logger: Logger) -> io::Result<Self> { pub fn new_process(port: Arc<dyn ExtensionPort>, logger: Logger) -> io::Result<Self> {
let eh = port.header(); let eh = port.header();
let ret = Arc::new_cyclic(|weak: &Weak<ExtensionData>| ExtensionData { let ret = Arc::new_cyclic(|weak: &Weak<ExtensionData>| ExtensionData {
systems: (eh.systems.iter().cloned()) systems: (eh.systems.iter().cloned())
.map(|decl| SystemCtor { decl, ext: weak.clone() }) .map(|decl| SystemCtor { decl, ext: weak.clone() })
.collect(), .collect(),
logger, logger,
port: port.clone(), port: port.clone(),
reqnot: ReqNot::new( reqnot: ReqNot::new(
clone!(weak; move |sfn, _| { clone!(weak; move |sfn, _| {
let data = weak.upgrade().unwrap(); let data = weak.upgrade().unwrap();
data.logger.log_buf("Downsending", sfn); data.logger.log_buf("Downsending", sfn);
data.port.send(sfn); data.port.send(sfn);
}), }),
clone!(weak; move |notif, _| match notif { clone!(weak; move |notif, _| match notif {
api::ExtHostNotif::ExprNotif(api::ExprNotif::Acquire(acq)) => acq_expr(acq.0, acq.1), api::ExtHostNotif::ExprNotif(api::ExprNotif::Acquire(acq)) => acq_expr(acq.0, acq.1),
api::ExtHostNotif::ExprNotif(api::ExprNotif::Release(rel)) => rel_expr(rel.0, rel.1), api::ExtHostNotif::ExprNotif(api::ExprNotif::Release(rel)) => rel_expr(rel.0, rel.1),
api::ExtHostNotif::ExprNotif(api::ExprNotif::Move(mov)) => { api::ExtHostNotif::ExprNotif(api::ExprNotif::Move(mov)) => {
acq_expr(mov.inc, mov.expr); acq_expr(mov.inc, mov.expr);
rel_expr(mov.dec, mov.expr); rel_expr(mov.dec, mov.expr);
}, },
api::ExtHostNotif::Log(api::Log(str)) => weak.upgrade().unwrap().logger.log(str), api::ExtHostNotif::Log(api::Log(str)) => weak.upgrade().unwrap().logger.log(str),
}), }),
|hand, req| match req { |hand, req| match req {
api::ExtHostReq::Ping(ping) => hand.handle(&ping, &()), api::ExtHostReq::Ping(ping) => hand.handle(&ping, &()),
api::ExtHostReq::IntReq(intreq) => match intreq { api::ExtHostReq::IntReq(intreq) => match intreq {
api::IntReq::InternStr(s) => hand.handle(&s, &intern(&**s.0).to_api()), api::IntReq::InternStr(s) => hand.handle(&s, &intern(&**s.0).to_api()),
api::IntReq::InternStrv(v) => hand.handle(&v, &intern(&*v.0).to_api()), api::IntReq::InternStrv(v) => hand.handle(&v, &intern(&*v.0).to_api()),
api::IntReq::ExternStr(si) => hand.handle(&si, &Tok::<String>::from_api(si.0).arc()), api::IntReq::ExternStr(si) => hand.handle(&si, &Tok::<String>::from_api(si.0).arc()),
api::IntReq::ExternStrv(vi) => hand.handle( api::IntReq::ExternStrv(vi) => hand.handle(
&vi, &vi,
&Arc::new( &Arc::new(
Tok::<Vec<Tok<String>>>::from_api(vi.0).iter().map(|t| t.to_api()).collect_vec(), Tok::<Vec<Tok<String>>>::from_api(vi.0).iter().map(|t| t.to_api()).collect_vec(),
), ),
), ),
}, },
api::ExtHostReq::Fwd(ref fw @ api::Fwd(ref atom, ref key, ref body)) => { api::ExtHostReq::Fwd(ref fw @ api::Fwd(ref atom, ref key, ref body)) => {
let sys = System::resolve(atom.owner).unwrap(); let sys = System::resolve(atom.owner).unwrap();
hand.handle(fw, &sys.reqnot().request(api::Fwded(fw.0.clone(), *key, body.clone()))) hand.handle(fw, &sys.reqnot().request(api::Fwded(fw.0.clone(), *key, body.clone())))
}, },
api::ExtHostReq::SysFwd(ref fw @ api::SysFwd(id, ref body)) => { api::ExtHostReq::SysFwd(ref fw @ api::SysFwd(id, ref body)) => {
let sys = System::resolve(id).unwrap(); let sys = System::resolve(id).unwrap();
hand.handle(fw, &sys.request(body.clone())) hand.handle(fw, &sys.request(body.clone()))
}, },
api::ExtHostReq::SubLex(sl) => { api::ExtHostReq::SubLex(sl) => {
let (rep_in, rep_out) = sync_channel(0); let (rep_in, rep_out) = sync_channel(0);
let lex_g = LEX_RECUR.lock().unwrap(); let lex_g = LEX_RECUR.lock().unwrap();
let req_in = lex_g.get(&sl.id).expect("Sublex for nonexistent lexid"); let req_in = lex_g.get(&sl.id).expect("Sublex for nonexistent lexid");
req_in.send(ReqPair(sl.clone(), rep_in)).unwrap(); req_in.send(ReqPair(sl.clone(), rep_in)).unwrap();
hand.handle(&sl, &rep_out.recv().unwrap()) hand.handle(&sl, &rep_out.recv().unwrap())
}, },
api::ExtHostReq::ExprReq(api::ExprReq::Inspect(ins @ api::Inspect { target })) => { api::ExtHostReq::ExprReq(api::ExprReq::Inspect(ins @ api::Inspect { target })) => {
let expr = Expr::resolve(target).expect("Invalid ticket"); let expr = Expr::resolve(target).expect("Invalid ticket");
hand.handle(&ins, &api::Inspected { hand.handle(&ins, &api::Inspected {
refcount: expr.strong_count() as u32, refcount: expr.strong_count() as u32,
location: expr.pos().to_api(), location: expr.pos().to_api(),
kind: expr.to_api(), kind: expr.to_api(),
}) })
}, },
api::ExtHostReq::RunMacros(ref rm @ api::RunMacros { ref run_id, ref query }) => hand api::ExtHostReq::RunMacros(ref rm @ api::RunMacros { ref run_id, ref query }) => hand
.handle( .handle(
rm, rm,
&macro_recur( &macro_recur(
*run_id, *run_id,
mtreev_from_api(query, &mut |_| panic!("Recursion never contains atoms")), mtreev_from_api(query, &mut |_| panic!("Recursion never contains atoms")),
) )
.map(|x| macro_treev_to_api(*run_id, x)), .map(|x| macro_treev_to_api(*run_id, x)),
), ),
}, },
), ),
}); });
let weak = Arc::downgrade(&ret); let weak = Arc::downgrade(&ret);
port.set_onmessage(Box::new(move |msg| { port.set_onmessage(Box::new(move |msg| {
if let Some(xd) = weak.upgrade() { if let Some(xd) = weak.upgrade() {
xd.reqnot.receive(msg) xd.reqnot.receive(msg)
} }
})); }));
Ok(Self(ret)) Ok(Self(ret))
} }
pub fn systems(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() } pub fn systems(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() }
} }
pub struct SystemCtor { pub struct SystemCtor {
decl: api::SystemDecl, decl: api::SystemDecl,
ext: Weak<ExtensionData>, ext: Weak<ExtensionData>,
} }
impl SystemCtor { impl SystemCtor {
pub fn name(&self) -> &str { &self.decl.name } pub fn name(&self) -> &str { &self.decl.name }
pub fn priority(&self) -> NotNan<f64> { self.decl.priority } pub fn priority(&self) -> NotNan<f64> { self.decl.priority }
pub fn depends(&self) -> impl ExactSizeIterator<Item = &str> { pub fn depends(&self) -> impl ExactSizeIterator<Item = &str> {
self.decl.depends.iter().map(|s| &**s) self.decl.depends.iter().map(|s| &**s)
} }
pub fn run<'a>(&self, depends: impl IntoIterator<Item = &'a System>) -> System { pub fn run<'a>(&self, depends: impl IntoIterator<Item = &'a System>) -> System {
let mut inst_g = SYSTEM_INSTS.write().unwrap(); let mut inst_g = SYSTEM_INSTS.write().unwrap();
let depends = depends.into_iter().map(|si| si.id()).collect_vec(); let depends = depends.into_iter().map(|si| si.id()).collect_vec();
debug_assert_eq!(depends.len(), self.decl.depends.len(), "Wrong number of deps provided"); debug_assert_eq!(depends.len(), self.decl.depends.len(), "Wrong number of deps provided");
let ext = self.ext.upgrade().expect("SystemCtor should be freed before Extension"); let ext = self.ext.upgrade().expect("SystemCtor should be freed before Extension");
static NEXT_ID: AtomicU16 = AtomicU16::new(1); static NEXT_ID: AtomicU16 = AtomicU16::new(1);
let id = let id =
api::SysId(NonZero::new(NEXT_ID.fetch_add(1, Ordering::Relaxed)).expect("next_id wrapped")); api::SysId(NonZero::new(NEXT_ID.fetch_add(1, Ordering::Relaxed)).expect("next_id wrapped"));
let sys_inst = ext.reqnot.request(api::NewSystem { depends, id, system: self.decl.id }); let sys_inst = ext.reqnot.request(api::NewSystem { depends, id, system: self.decl.id });
let data = System(Arc::new(SystemInstData { let data = System(Arc::new(SystemInstData {
decl_id: self.decl.id, decl_id: self.decl.id,
ext: Extension(ext), ext: Extension(ext),
exprs: RwLock::default(), exprs: RwLock::default(),
lex_filter: sys_inst.lex_filter, lex_filter: sys_inst.lex_filter,
const_root: OnceLock::new(), const_root: OnceLock::new(),
line_types: sys_inst.line_types.into_iter().map(Tok::from_api).collect(), line_types: sys_inst.line_types.into_iter().map(Tok::from_api).collect(),
id, id,
})); }));
let root = (sys_inst.const_root.into_iter()) let root = (sys_inst.const_root.into_iter())
.map(|(k, v)| { .map(|(k, v)| {
Member::from_api( Member::from_api(
api::Member { name: k, kind: v }, api::Member { name: k, kind: v },
Substack::Bottom.push(Tok::from_api(k)), Substack::Bottom.push(Tok::from_api(k)),
&data, &data,
) )
}) })
.collect_vec(); .collect_vec();
data.0.const_root.set(root).unwrap(); data.0.const_root.set(root).unwrap();
inst_g.insert(id, data.clone()); inst_g.insert(id, data.clone());
data data
} }
} }
lazy_static! { lazy_static! {
static ref SYSTEM_INSTS: RwLock<HashMap<api::SysId, System>> = RwLock::default(); static ref SYSTEM_INSTS: RwLock<HashMap<api::SysId, System>> = RwLock::default();
static ref LEX_RECUR: Mutex<HashMap<api::ParsId, SyncSender<ReqPair<api::SubLex>>>> = static ref LEX_RECUR: Mutex<HashMap<api::ParsId, SyncSender<ReqPair<api::SubLex>>>> =
Mutex::default(); Mutex::default();
} }
pub struct ReqPair<R: Request>(R, pub SyncSender<R::Response>); pub struct ReqPair<R: Request>(R, pub SyncSender<R::Response>);
#[derive(destructure)] #[derive(destructure)]
pub struct SystemInstData { pub struct SystemInstData {
exprs: RwLock<HashMap<api::ExprTicket, (AtomicU32, Expr)>>, exprs: RwLock<HashMap<api::ExprTicket, (AtomicU32, Expr)>>,
ext: Extension, ext: Extension,
decl_id: api::SysDeclId, decl_id: api::SysDeclId,
lex_filter: api::CharFilter, lex_filter: api::CharFilter,
id: api::SysId, id: api::SysId,
const_root: OnceLock<Vec<Member>>, const_root: OnceLock<Vec<Member>>,
line_types: Vec<Tok<String>>, line_types: Vec<Tok<String>>,
} }
impl Drop for SystemInstData { impl Drop for SystemInstData {
fn drop(&mut self) { fn drop(&mut self) {
self.ext.0.reqnot.notify(api::SystemDrop(self.id)); self.ext.0.reqnot.notify(api::SystemDrop(self.id));
if let Ok(mut g) = SYSTEM_INSTS.write() { if let Ok(mut g) = SYSTEM_INSTS.write() {
g.remove(&self.id); g.remove(&self.id);
} }
} }
} }
#[derive(Clone)] #[derive(Clone)]
pub struct System(Arc<SystemInstData>); pub struct System(Arc<SystemInstData>);
impl System { impl System {
pub fn id(&self) -> api::SysId { self.id } pub fn id(&self) -> api::SysId { self.id }
fn resolve(id: api::SysId) -> Option<System> { SYSTEM_INSTS.read().unwrap().get(&id).cloned() } fn resolve(id: api::SysId) -> Option<System> { SYSTEM_INSTS.read().unwrap().get(&id).cloned() }
fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { &self.0.ext.0.reqnot } fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { &self.0.ext.0.reqnot }
fn give_expr(&self, ticket: api::ExprTicket, get_expr: impl FnOnce() -> Expr) -> api::ExprTicket { fn give_expr(&self, ticket: api::ExprTicket, get_expr: impl FnOnce() -> Expr) -> api::ExprTicket {
match self.0.exprs.write().unwrap().entry(ticket) { match self.0.exprs.write().unwrap().entry(ticket) {
Entry::Occupied(mut oe) => { Entry::Occupied(mut oe) => {
oe.get_mut().0.fetch_add(1, Ordering::Relaxed); oe.get_mut().0.fetch_add(1, Ordering::Relaxed);
}, },
Entry::Vacant(v) => { Entry::Vacant(v) => {
v.insert((AtomicU32::new(1), get_expr())); v.insert((AtomicU32::new(1), get_expr()));
}, },
} }
ticket ticket
} }
pub fn get_tree(&self, id: api::TreeId) -> api::MemberKind { pub fn get_tree(&self, id: api::TreeId) -> api::MemberKind {
self.reqnot().request(api::GetMember(self.0.id, id)) self.reqnot().request(api::GetMember(self.0.id, id))
} }
pub fn has_lexer(&self) -> bool { !self.0.lex_filter.0.is_empty() } pub fn has_lexer(&self) -> bool { !self.0.lex_filter.0.is_empty() }
pub fn can_lex(&self, c: char) -> bool { char_filter_match(&self.0.lex_filter, c) } pub fn can_lex(&self, c: char) -> bool { char_filter_match(&self.0.lex_filter, c) }
/// Have this system lex a part of the source. It is assumed that /// Have this system lex a part of the source. It is assumed that
/// [Self::can_lex] was called and returned true. /// [Self::can_lex] was called and returned true.
pub fn lex( pub fn lex(
&self, &self,
source: Tok<String>, source: Tok<String>,
pos: u32, pos: u32,
mut r: impl FnMut(u32) -> Option<api::SubLexed> + Send, mut r: impl FnMut(u32) -> Option<api::SubLexed> + Send,
) -> api::OrcResult<Option<api::LexedExpr>> { ) -> api::OrcResult<Option<api::LexedExpr>> {
// get unique lex ID // get unique lex ID
static LEX_ID: AtomicU64 = AtomicU64::new(1); static LEX_ID: AtomicU64 = AtomicU64::new(1);
let id = api::ParsId(NonZero::new(LEX_ID.fetch_add(1, Ordering::Relaxed)).unwrap()); let id = api::ParsId(NonZero::new(LEX_ID.fetch_add(1, Ordering::Relaxed)).unwrap());
thread::scope(|s| { thread::scope(|s| {
// create and register channel // create and register channel
let (req_in, req_out) = sync_channel(0); let (req_in, req_out) = sync_channel(0);
LEX_RECUR.lock().unwrap().insert(id, req_in); // LEX_RECUR released LEX_RECUR.lock().unwrap().insert(id, req_in); // LEX_RECUR released
// spawn recursion handler which will exit when the sender is collected // spawn recursion handler which will exit when the sender is collected
s.spawn(move || { s.spawn(move || {
while let Ok(ReqPair(sublex, rep_in)) = req_out.recv() { while let Ok(ReqPair(sublex, rep_in)) = req_out.recv() {
rep_in.send(r(sublex.pos)).unwrap() rep_in.send(r(sublex.pos)).unwrap()
} }
}); });
// Pass control to extension // Pass control to extension
let ret = let ret =
self.reqnot().request(api::LexExpr { id, pos, sys: self.id(), text: source.to_api() }); self.reqnot().request(api::LexExpr { id, pos, sys: self.id(), text: source.to_api() });
// collect sender to unblock recursion handler thread before returning // collect sender to unblock recursion handler thread before returning
LEX_RECUR.lock().unwrap().remove(&id); LEX_RECUR.lock().unwrap().remove(&id);
ret.transpose() ret.transpose()
}) // exit recursion handler thread }) // exit recursion handler thread
} }
pub fn can_parse(&self, line_type: Tok<String>) -> bool { self.line_types.contains(&line_type) } pub fn can_parse(&self, line_type: Tok<String>) -> bool { self.line_types.contains(&line_type) }
pub fn line_types(&self) -> impl Iterator<Item = Tok<String>> + '_ { pub fn line_types(&self) -> impl Iterator<Item = Tok<String>> + '_ {
self.line_types.iter().cloned() self.line_types.iter().cloned()
} }
pub fn parse( pub fn parse(
&self, &self,
line: Vec<ParsTokTree>, line: Vec<ParsTokTree>,
exported: bool, exported: bool,
comments: Vec<Comment>, comments: Vec<Comment>,
) -> OrcRes<Vec<ParsTokTree>> { ) -> OrcRes<Vec<ParsTokTree>> {
let line = line.iter().map(|t| t.to_api(&mut |n, _| match *n {})).collect_vec(); let line = line.iter().map(|t| t.to_api(&mut |n, _| match *n {})).collect_vec();
let comments = comments.iter().map(Comment::to_api).collect_vec(); let comments = comments.iter().map(Comment::to_api).collect_vec();
let parsed = let parsed =
(self.reqnot().request(api::ParseLine { exported, sys: self.id(), comments, line })) (self.reqnot().request(api::ParseLine { exported, sys: self.id(), comments, line }))
.map_err(|e| OrcErrv::from_api(&e))?; .map_err(|e| OrcErrv::from_api(&e))?;
Ok(ttv_from_api(parsed, &mut ())) Ok(ttv_from_api(parsed, &mut ()))
} }
pub fn request(&self, req: Vec<u8>) -> Vec<u8> { pub fn request(&self, req: Vec<u8>) -> Vec<u8> {
self.reqnot().request(api::SysFwded(self.id(), req)) self.reqnot().request(api::SysFwded(self.id(), req))
} }
} }
impl fmt::Debug for System { impl fmt::Debug for System {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let ctor = (self.0.ext.0.systems.iter().find(|c| c.decl.id == self.0.decl_id)) let ctor = (self.0.ext.0.systems.iter().find(|c| c.decl.id == self.0.decl_id))
.expect("System instance with no associated constructor"); .expect("System instance with no associated constructor");
write!(f, "System({} @ {} #{}, ", ctor.decl.name, ctor.decl.priority, self.0.id.0)?; write!(f, "System({} @ {} #{}, ", ctor.decl.name, ctor.decl.priority, self.0.id.0)?;
match self.0.exprs.read() { match self.0.exprs.read() {
Err(_) => write!(f, "expressions unavailable"), Err(_) => write!(f, "expressions unavailable"),
Ok(r) => { Ok(r) => {
let rc: u32 = r.values().map(|v| v.0.load(Ordering::Relaxed)).sum(); let rc: u32 = r.values().map(|v| v.0.load(Ordering::Relaxed)).sum();
write!(f, "{rc} refs to {} exprs", r.len()) write!(f, "{rc} refs to {} exprs", r.len())
}, },
} }
} }
} }
impl Deref for System { impl Deref for System {
type Target = SystemInstData; type Target = SystemInstData;
fn deref(&self) -> &Self::Target { self.0.as_ref() } fn deref(&self) -> &Self::Target { self.0.as_ref() }
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum SysResolvErr { pub enum SysResolvErr {
Loop(Vec<String>), Loop(Vec<String>),
Missing(String), Missing(String),
} }
pub fn init_systems(tgts: &[String], exts: &[Extension]) -> Result<Vec<System>, SysResolvErr> { pub fn init_systems(tgts: &[String], exts: &[Extension]) -> Result<Vec<System>, SysResolvErr> {
let mut to_load = HashMap::<&str, &SystemCtor>::new(); let mut to_load = HashMap::<&str, &SystemCtor>::new();
let mut to_find = tgts.iter().map(|s| s.as_str()).collect::<VecDeque<&str>>(); let mut to_find = tgts.iter().map(|s| s.as_str()).collect::<VecDeque<&str>>();
while let Some(target) = to_find.pop_front() { while let Some(target) = to_find.pop_front() {
if to_load.contains_key(target) { if to_load.contains_key(target) {
continue; continue;
} }
let ctor = (exts.iter()) let ctor = (exts.iter())
.flat_map(|e| e.systems().filter(|c| c.decl.name == target)) .flat_map(|e| e.systems().filter(|c| c.decl.name == target))
.max_by_key(|c| c.decl.priority) .max_by_key(|c| c.decl.priority)
.ok_or_else(|| SysResolvErr::Missing(target.to_string()))?; .ok_or_else(|| SysResolvErr::Missing(target.to_string()))?;
to_load.insert(target, ctor); to_load.insert(target, ctor);
to_find.extend(ctor.decl.depends.iter().map(|s| s.as_str())); to_find.extend(ctor.decl.depends.iter().map(|s| s.as_str()));
} }
let mut to_load_ordered = Vec::new(); let mut to_load_ordered = Vec::new();
fn walk_deps<'a>( fn walk_deps<'a>(
graph: &mut HashMap<&str, &'a SystemCtor>, graph: &mut HashMap<&str, &'a SystemCtor>,
list: &mut Vec<&'a SystemCtor>, list: &mut Vec<&'a SystemCtor>,
chain: Stackframe<&str>, chain: Stackframe<&str>,
) -> Result<(), SysResolvErr> { ) -> Result<(), SysResolvErr> {
if let Some(ctor) = graph.remove(chain.item) { if let Some(ctor) = graph.remove(chain.item) {
// if the above is none, the system is already queued. Missing systems are // if the above is none, the system is already queued. Missing systems are
// detected above // detected above
for dep in ctor.decl.depends.iter() { for dep in ctor.decl.depends.iter() {
if Substack::Frame(chain).iter().any(|c| c == dep) { if Substack::Frame(chain).iter().any(|c| c == dep) {
let mut circle = vec![dep.to_string()]; let mut circle = vec![dep.to_string()];
circle.extend(Substack::Frame(chain).iter().map(|s| s.to_string())); circle.extend(Substack::Frame(chain).iter().map(|s| s.to_string()));
return Err(SysResolvErr::Loop(circle)); return Err(SysResolvErr::Loop(circle));
} }
walk_deps(graph, list, Substack::Frame(chain).new_frame(dep))? walk_deps(graph, list, Substack::Frame(chain).new_frame(dep))?
} }
list.push(ctor); list.push(ctor);
} }
Ok(()) Ok(())
} }
for tgt in tgts { for tgt in tgts {
walk_deps(&mut to_load, &mut to_load_ordered, Substack::Bottom.new_frame(tgt))?; walk_deps(&mut to_load, &mut to_load_ordered, Substack::Bottom.new_frame(tgt))?;
} }
let mut systems = HashMap::<&str, System>::new(); let mut systems = HashMap::<&str, System>::new();
for ctor in to_load_ordered.iter() { for ctor in to_load_ordered.iter() {
let sys = ctor.run(ctor.depends().map(|n| &systems[n])); let sys = ctor.run(ctor.depends().map(|n| &systems[n]));
systems.insert(ctor.name(), sys); systems.insert(ctor.name(), sys);
} }
Ok(systems.into_values().collect_vec()) Ok(systems.into_values().collect_vec())
} }

View File

@@ -2,208 +2,208 @@ use std::num::NonZeroU64;
use std::sync::Arc; use std::sync::Arc;
use hashbrown::HashMap; use hashbrown::HashMap;
use orchid_base::error::{mk_errv, OrcErrv, OrcRes}; use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
use orchid_base::{intern, match_mapping}; use orchid_base::interner::{Tok, intern};
use orchid_base::interner::{intern, Tok};
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::number::{num_to_err, parse_num}; use orchid_base::number::{num_to_err, parse_num};
use orchid_base::parse::{name_char, name_start, op_char, unrep_space}; use orchid_base::parse::{name_char, name_start, op_char, unrep_space};
use orchid_base::tokens::PARENS; use orchid_base::tokens::PARENS;
use orchid_base::tree::Ph; use orchid_base::tree::Ph;
use orchid_base::{intern, match_mapping};
use crate::api; use crate::api;
use crate::extension::{AtomHand, System}; use crate::extension::{AtomHand, System};
use crate::tree::{ParsTok, ParsTokTree}; use crate::tree::{ParsTok, ParsTokTree};
pub struct LexCtx<'a> { pub struct LexCtx<'a> {
pub systems: &'a [System], pub systems: &'a [System],
pub source: &'a Tok<String>, pub source: &'a Tok<String>,
pub tail: &'a str, pub tail: &'a str,
pub sub_trees: &'a mut HashMap<api::TreeTicket, ParsTokTree>, pub sub_trees: &'a mut HashMap<api::TreeTicket, ParsTokTree>,
} }
impl<'a> LexCtx<'a> { impl<'a> LexCtx<'a> {
pub fn push<'b>(&'b mut self, pos: u32) -> LexCtx<'b> pub fn push<'b>(&'b mut self, pos: u32) -> LexCtx<'b>
where 'a: 'b { where 'a: 'b {
LexCtx { LexCtx {
source: self.source, source: self.source,
tail: &self.source[pos as usize..], tail: &self.source[pos as usize..],
systems: self.systems, systems: self.systems,
sub_trees: &mut *self.sub_trees, sub_trees: &mut *self.sub_trees,
} }
} }
pub fn get_pos(&self) -> u32 { self.end_pos() - self.tail.len() as u32 } pub fn get_pos(&self) -> u32 { self.end_pos() - self.tail.len() as u32 }
pub fn end_pos(&self) -> u32 { self.source.len() as u32 } pub fn end_pos(&self) -> u32 { self.source.len() as u32 }
pub fn set_pos(&mut self, pos: u32) { self.tail = &self.source[pos as usize..] } pub fn set_pos(&mut self, pos: u32) { self.tail = &self.source[pos as usize..] }
pub fn push_pos(&mut self, delta: u32) { self.set_pos(self.get_pos() + delta) } pub fn push_pos(&mut self, delta: u32) { self.set_pos(self.get_pos() + delta) }
pub fn set_tail(&mut self, tail: &'a str) { self.tail = tail } pub fn set_tail(&mut self, tail: &'a str) { self.tail = tail }
pub fn strip_prefix(&mut self, tgt: &str) -> bool { pub fn strip_prefix(&mut self, tgt: &str) -> bool {
if let Some(src) = self.tail.strip_prefix(tgt) { if let Some(src) = self.tail.strip_prefix(tgt) {
self.tail = src; self.tail = src;
return true; return true;
} }
false false
} }
pub fn add_subtree(&mut self, subtree: ParsTokTree) -> api::TreeTicket { pub fn add_subtree(&mut self, subtree: ParsTokTree) -> api::TreeTicket {
let next_idx = api::TreeTicket(NonZeroU64::new(self.sub_trees.len() as u64 + 1).unwrap()); let next_idx = api::TreeTicket(NonZeroU64::new(self.sub_trees.len() as u64 + 1).unwrap());
self.sub_trees.insert(next_idx, subtree); self.sub_trees.insert(next_idx, subtree);
next_idx next_idx
} }
pub fn rm_subtree(&mut self, ticket: api::TreeTicket) -> ParsTokTree { pub fn rm_subtree(&mut self, ticket: api::TreeTicket) -> ParsTokTree {
self.sub_trees.remove(&ticket).unwrap() self.sub_trees.remove(&ticket).unwrap()
} }
pub fn strip_char(&mut self, tgt: char) -> bool { pub fn strip_char(&mut self, tgt: char) -> bool {
if let Some(src) = self.tail.strip_prefix(tgt) { if let Some(src) = self.tail.strip_prefix(tgt) {
self.tail = src; self.tail = src;
return true; return true;
} }
false false
} }
pub fn trim(&mut self, filter: impl Fn(char) -> bool) { pub fn trim(&mut self, filter: impl Fn(char) -> bool) {
self.tail = self.tail.trim_start_matches(filter); self.tail = self.tail.trim_start_matches(filter);
} }
pub fn trim_ws(&mut self) { self.trim(|c| c.is_whitespace() && !"\r\n".contains(c)) } pub fn trim_ws(&mut self) { self.trim(|c| c.is_whitespace() && !"\r\n".contains(c)) }
pub fn get_start_matches(&mut self, filter: impl Fn(char) -> bool) -> &'a str { pub fn get_start_matches(&mut self, filter: impl Fn(char) -> bool) -> &'a str {
let rest = self.tail.trim_start_matches(filter); let rest = self.tail.trim_start_matches(filter);
let matches = &self.tail[..self.tail.len() - rest.len()]; let matches = &self.tail[..self.tail.len() - rest.len()];
self.tail = rest; self.tail = rest;
matches matches
} }
} }
pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> { pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
let start = ctx.get_pos(); let start = ctx.get_pos();
assert!( assert!(
!ctx.tail.is_empty() && !ctx.tail.starts_with(unrep_space), !ctx.tail.is_empty() && !ctx.tail.starts_with(unrep_space),
"Lexing empty string or whitespace to token!\n\ "Lexing empty string or whitespace to token!\n\
Invocations of lex_tok should check for empty string" Invocations of lex_tok should check for empty string"
); );
let tok = if ctx.strip_prefix("\r\n") || ctx.strip_prefix("\r") || ctx.strip_prefix("\n") { let tok = if ctx.strip_prefix("\r\n") || ctx.strip_prefix("\r") || ctx.strip_prefix("\n") {
ParsTok::BR ParsTok::BR
} else if ctx.strip_prefix("::") { } else if ctx.strip_prefix("::") {
ParsTok::NS ParsTok::NS
} else if ctx.strip_prefix("--[") { } else if ctx.strip_prefix("--[") {
let (cmt, tail) = ctx.tail.split_once("]--").ok_or_else(|| { let (cmt, tail) = ctx.tail.split_once("]--").ok_or_else(|| {
mk_errv(intern!(str: "Unterminated block comment"), "This block comment has no ending ]--", [ mk_errv(intern!(str: "Unterminated block comment"), "This block comment has no ending ]--", [
Pos::Range(start..start + 3).into(), Pos::Range(start..start + 3).into(),
]) ])
})?; })?;
ctx.set_tail(tail); ctx.set_tail(tail);
ParsTok::Comment(Arc::new(cmt.to_string())) ParsTok::Comment(Arc::new(cmt.to_string()))
} else if let Some(tail) = ctx.tail.strip_prefix("--").filter(|t| !t.starts_with(op_char)) { } else if let Some(tail) = ctx.tail.strip_prefix("--").filter(|t| !t.starts_with(op_char)) {
let end = tail.find(['\n', '\r']).map_or(tail.len(), |n| n - 1); let end = tail.find(['\n', '\r']).map_or(tail.len(), |n| n - 1);
ctx.push_pos(end as u32); ctx.push_pos(end as u32);
ParsTok::Comment(Arc::new(tail[2..end].to_string())) ParsTok::Comment(Arc::new(tail[2..end].to_string()))
} else if ctx.strip_char('\\') { } else if ctx.strip_char('\\') {
let mut arg = Vec::new(); let mut arg = Vec::new();
ctx.trim_ws(); ctx.trim_ws();
while !ctx.strip_char('.') { while !ctx.strip_char('.') {
if ctx.tail.is_empty() { if ctx.tail.is_empty() {
return Err(mk_errv( return Err(mk_errv(
intern!(str: "Unclosed lambda"), intern!(str: "Unclosed lambda"),
"Lambdae started with \\ should separate arguments from body with .", "Lambdae started with \\ should separate arguments from body with .",
[Pos::Range(start..start + 1).into()], [Pos::Range(start..start + 1).into()],
)); ));
} }
arg.push(lex_once(ctx)?); arg.push(lex_once(ctx)?);
ctx.trim_ws(); ctx.trim_ws();
} }
ParsTok::LambdaHead(arg) ParsTok::LambdaHead(arg)
} else if let Some((lp, rp, paren)) = PARENS.iter().find(|(lp, ..)| ctx.strip_char(*lp)) { } else if let Some((lp, rp, paren)) = PARENS.iter().find(|(lp, ..)| ctx.strip_char(*lp)) {
let mut body = Vec::new(); let mut body = Vec::new();
ctx.trim_ws(); ctx.trim_ws();
while !ctx.strip_char(*rp) { while !ctx.strip_char(*rp) {
if ctx.tail.is_empty() { if ctx.tail.is_empty() {
return Err(mk_errv( return Err(mk_errv(
intern!(str: "unclosed paren"), intern!(str: "unclosed paren"),
format!("this {lp} has no matching {rp}"), format!("this {lp} has no matching {rp}"),
[Pos::Range(start..start + 1).into()], [Pos::Range(start..start + 1).into()],
)); ));
} }
body.push(lex_once(ctx)?); body.push(lex_once(ctx)?);
ctx.trim_ws(); ctx.trim_ws();
} }
ParsTok::S(*paren, body) ParsTok::S(*paren, body)
} else if ctx.strip_prefix("macro") && } else if ctx.strip_prefix("macro")
!ctx.tail.chars().next().is_some_and(|x| x.is_ascii_alphabetic()) && !ctx.tail.chars().next().is_some_and(|x| x.is_ascii_alphabetic())
{ {
ctx.strip_prefix("macro"); ctx.strip_prefix("macro");
if ctx.strip_char('(') { if ctx.strip_char('(') {
let pos = ctx.get_pos(); let pos = ctx.get_pos();
let numstr = ctx.get_start_matches(|x| x != ')').trim(); let numstr = ctx.get_start_matches(|x| x != ')').trim();
let num = parse_num(numstr).map_err(|e| num_to_err(e, pos))?; let num = parse_num(numstr).map_err(|e| num_to_err(e, pos))?;
ParsTok::Macro(Some(num.to_f64())) ParsTok::Macro(Some(num.to_f64()))
} else { } else {
ParsTok::Macro(None) ParsTok::Macro(None)
} }
} else { } else {
for sys in ctx.systems { for sys in ctx.systems {
let mut errors = Vec::new(); let mut errors = Vec::new();
if ctx.tail.starts_with(|c| sys.can_lex(c)) { if ctx.tail.starts_with(|c| sys.can_lex(c)) {
let lx = let lx =
sys.lex(ctx.source.clone(), ctx.get_pos(), |pos| match lex_once(&mut ctx.push(pos)) { sys.lex(ctx.source.clone(), ctx.get_pos(), |pos| match lex_once(&mut ctx.push(pos)) {
Ok(t) => Some(api::SubLexed { pos, ticket: ctx.add_subtree(t) }), Ok(t) => Some(api::SubLexed { pos, ticket: ctx.add_subtree(t) }),
Err(e) => { Err(e) => {
errors.push(e); errors.push(e);
None None
}, },
}); });
match lx { match lx {
Err(e) => return Err(errors.into_iter().fold(OrcErrv::from_api(&e), |a, b| a + b)), Err(e) => return Err(errors.into_iter().fold(OrcErrv::from_api(&e), |a, b| a + b)),
Ok(Some(lexed)) => return Ok(tt_to_owned(&lexed.expr, &mut ctx.push(lexed.pos))), Ok(Some(lexed)) => return Ok(tt_to_owned(&lexed.expr, &mut ctx.push(lexed.pos))),
Ok(None) => match errors.into_iter().reduce(|a, b| a + b) { Ok(None) => match errors.into_iter().reduce(|a, b| a + b) {
Some(errors) => return Err(errors), Some(errors) => return Err(errors),
None => continue, None => continue,
}, },
} }
} }
} }
if ctx.tail.starts_with(name_start) { if ctx.tail.starts_with(name_start) {
ParsTok::Name(intern(ctx.get_start_matches(name_char))) ParsTok::Name(intern(ctx.get_start_matches(name_char)))
} else if ctx.tail.starts_with(op_char) { } else if ctx.tail.starts_with(op_char) {
ParsTok::Name(intern(ctx.get_start_matches(op_char))) ParsTok::Name(intern(ctx.get_start_matches(op_char)))
} else { } else {
return Err(mk_errv( return Err(mk_errv(
intern!(str: "Unrecognized character"), intern!(str: "Unrecognized character"),
"The following syntax is meaningless.", "The following syntax is meaningless.",
[Pos::Range(start..start + 1).into()], [Pos::Range(start..start + 1).into()],
)); ));
} }
}; };
Ok(ParsTokTree { tok, range: start..ctx.get_pos() }) Ok(ParsTokTree { tok, range: start..ctx.get_pos() })
} }
fn tt_to_owned(api: &api::TokenTree, ctx: &mut LexCtx<'_>) -> ParsTokTree { fn tt_to_owned(api: &api::TokenTree, ctx: &mut LexCtx<'_>) -> ParsTokTree {
let tok = match_mapping!(&api.token, api::Token => ParsTok { let tok = match_mapping!(&api.token, api::Token => ParsTok {
Atom(atom => AtomHand::from_api(atom.clone())), Atom(atom => AtomHand::from_api(atom.clone())),
Bottom(err => OrcErrv::from_api(err)), Bottom(err => OrcErrv::from_api(err)),
LambdaHead(arg => ttv_to_owned(arg, ctx)), LambdaHead(arg => ttv_to_owned(arg, ctx)),
Name(name => Tok::from_api(*name)), Name(name => Tok::from_api(*name)),
S(p.clone(), b.iter().map(|t| tt_to_owned(t, ctx)).collect()), S(p.clone(), b.iter().map(|t| tt_to_owned(t, ctx)).collect()),
BR, NS, BR, NS,
Comment(c.clone()), Comment(c.clone()),
Ph(ph => Ph::from_api(ph)), Ph(ph => Ph::from_api(ph)),
Macro(*prio), Macro(*prio),
} { } {
api::Token::Slot(id) => return ctx.rm_subtree(*id), api::Token::Slot(id) => return ctx.rm_subtree(*id),
}); });
ParsTokTree { range: api.range.clone(), tok } ParsTokTree { range: api.range.clone(), tok }
} }
fn ttv_to_owned<'a>( fn ttv_to_owned<'a>(
api: impl IntoIterator<Item = &'a api::TokenTree>, api: impl IntoIterator<Item = &'a api::TokenTree>,
ctx: &mut LexCtx<'_> ctx: &mut LexCtx<'_>,
) -> Vec<ParsTokTree> { ) -> Vec<ParsTokTree> {
api.into_iter().map(|t| tt_to_owned(t, ctx)).collect() api.into_iter().map(|t| tt_to_owned(t, ctx)).collect()
} }
pub fn lex(text: Tok<String>, systems: &[System]) -> OrcRes<Vec<ParsTokTree>> { pub fn lex(text: Tok<String>, systems: &[System]) -> OrcRes<Vec<ParsTokTree>> {
let mut sub_trees = HashMap::new(); let mut sub_trees = HashMap::new();
let mut ctx = LexCtx { source: &text, sub_trees: &mut sub_trees, tail: &text[..], systems }; let mut ctx = LexCtx { source: &text, sub_trees: &mut sub_trees, tail: &text[..], systems };
let mut tokv = Vec::new(); let mut tokv = Vec::new();
ctx.trim(unrep_space); ctx.trim(unrep_space);
while !ctx.tail.is_empty() { while !ctx.tail.is_empty() {
tokv.push(lex_once(&mut ctx)?); tokv.push(lex_once(&mut ctx)?);
ctx.trim(unrep_space); ctx.trim(unrep_space);
} }
Ok(tokv) Ok(tokv)
} }

View File

@@ -4,8 +4,8 @@ pub mod child;
pub mod expr; pub mod expr;
pub mod extension; pub mod extension;
pub mod lex; pub mod lex;
pub mod macros;
pub mod parse; pub mod parse;
pub mod rule;
pub mod subprocess; pub mod subprocess;
pub mod tree; pub mod tree;
pub mod macros;
pub mod rule;

View File

@@ -17,158 +17,158 @@ pub type MacTok = MTok<'static, AtomHand>;
pub type MacTree = MTree<'static, AtomHand>; pub type MacTree = MTree<'static, AtomHand>;
trait_set! { trait_set! {
trait MacroCB = Fn(Vec<MacTree>) -> Option<Vec<MacTree>> + Send + Sync; trait MacroCB = Fn(Vec<MacTree>) -> Option<Vec<MacTree>> + Send + Sync;
} }
lazy_static! { lazy_static! {
static ref RECURSION: RwLock<HashMap<api::ParsId, Box<dyn MacroCB>>> = RwLock::default(); static ref RECURSION: RwLock<HashMap<api::ParsId, Box<dyn MacroCB>>> = RwLock::default();
static ref MACRO_SLOTS: RwLock<HashMap<api::ParsId, HashMap<api::MacroTreeId, Arc<MacTok>>>> = static ref MACRO_SLOTS: RwLock<HashMap<api::ParsId, HashMap<api::MacroTreeId, Arc<MacTok>>>> =
RwLock::default(); RwLock::default();
} }
pub fn macro_recur(run_id: api::ParsId, input: Vec<MacTree>) -> Option<Vec<MacTree>> { pub fn macro_recur(run_id: api::ParsId, input: Vec<MacTree>) -> Option<Vec<MacTree>> {
(RECURSION.read().unwrap()[&run_id])(input) (RECURSION.read().unwrap()[&run_id])(input)
} }
pub fn macro_treev_to_api(run_id: api::ParsId, mtree: Vec<MacTree>) -> Vec<api::MacroTree> { pub fn macro_treev_to_api(run_id: api::ParsId, mtree: Vec<MacTree>) -> Vec<api::MacroTree> {
let mut g = MACRO_SLOTS.write().unwrap(); let mut g = MACRO_SLOTS.write().unwrap();
let run_cache = g.get_mut(&run_id).expect("Parser run not found"); let run_cache = g.get_mut(&run_id).expect("Parser run not found");
mtreev_to_api(&mtree, &mut |a: &AtomHand| { mtreev_to_api(&mtree, &mut |a: &AtomHand| {
let id = api::MacroTreeId((run_cache.len() as u64 + 1).try_into().unwrap()); let id = api::MacroTreeId((run_cache.len() as u64 + 1).try_into().unwrap());
run_cache.insert(id, Arc::new(MacTok::Atom(a.clone()))); run_cache.insert(id, Arc::new(MacTok::Atom(a.clone())));
api::MacroToken::Slot(id) api::MacroToken::Slot(id)
}) })
} }
pub fn macro_treev_from_api(api: Vec<api::MacroTree>) -> Vec<MacTree> { pub fn macro_treev_from_api(api: Vec<api::MacroTree>) -> Vec<MacTree> {
mtreev_from_api(&api, &mut |atom| MacTok::Atom(AtomHand::from_api(atom.clone()))) mtreev_from_api(&api, &mut |atom| MacTok::Atom(AtomHand::from_api(atom.clone())))
} }
pub fn deslot_macro(run_id: api::ParsId, tree: &[MacTree]) -> Option<Vec<MacTree>> { pub fn deslot_macro(run_id: api::ParsId, tree: &[MacTree]) -> Option<Vec<MacTree>> {
let mut slots = (MACRO_SLOTS.write().unwrap()).remove(&run_id).expect("Run not found"); let mut slots = (MACRO_SLOTS.write().unwrap()).remove(&run_id).expect("Run not found");
return work(&mut slots, tree); return work(&mut slots, tree);
fn work( fn work(
slots: &mut HashMap<api::MacroTreeId, Arc<MacTok>>, slots: &mut HashMap<api::MacroTreeId, Arc<MacTok>>,
tree: &[MacTree], tree: &[MacTree],
) -> Option<Vec<MacTree>> { ) -> Option<Vec<MacTree>> {
let items = (tree.iter()) let items = (tree.iter())
.map(|t| { .map(|t| {
Some(MacTree { Some(MacTree {
tok: match &*t.tok { tok: match &*t.tok {
MacTok::Atom(_) | MacTok::Name(_) | MacTok::Ph(_) => return None, MacTok::Atom(_) | MacTok::Name(_) | MacTok::Ph(_) => return None,
MacTok::Ref(_) => panic!("Ref is an extension-local optimization"), MacTok::Ref(_) => panic!("Ref is an extension-local optimization"),
MacTok::Done(_) => panic!("Created and removed by matcher"), MacTok::Done(_) => panic!("Created and removed by matcher"),
MacTok::Slot(slot) => slots.get(&slot.id()).expect("Slot not found").clone(), MacTok::Slot(slot) => slots.get(&slot.id()).expect("Slot not found").clone(),
MacTok::S(paren, b) => Arc::new(MacTok::S(*paren, work(slots, b)?)), MacTok::S(paren, b) => Arc::new(MacTok::S(*paren, work(slots, b)?)),
MacTok::Lambda(a, b) => Arc::new(match (work(slots, a), work(slots, b)) { MacTok::Lambda(a, b) => Arc::new(match (work(slots, a), work(slots, b)) {
(None, None) => return None, (None, None) => return None,
(Some(a), None) => MacTok::Lambda(a, b.clone()), (Some(a), None) => MacTok::Lambda(a, b.clone()),
(None, Some(b)) => MacTok::Lambda(a.clone(), b), (None, Some(b)) => MacTok::Lambda(a.clone(), b),
(Some(a), Some(b)) => MacTok::Lambda(a, b), (Some(a), Some(b)) => MacTok::Lambda(a, b),
}), }),
}, },
pos: t.pos.clone(), pos: t.pos.clone(),
}) })
}) })
.collect_vec(); .collect_vec();
let any_changed = items.iter().any(Option::is_some); let any_changed = items.iter().any(Option::is_some);
any_changed.then(|| { any_changed.then(|| {
(items.into_iter().enumerate()) (items.into_iter().enumerate())
.map(|(i, opt)| opt.unwrap_or_else(|| tree[i].clone())) .map(|(i, opt)| opt.unwrap_or_else(|| tree[i].clone()))
.collect_vec() .collect_vec()
}) })
} }
} }
pub struct Macro<Matcher> { pub struct Macro<Matcher> {
deps: HashSet<Sym>, deps: HashSet<Sym>,
cases: Vec<(Matcher, Code)>, cases: Vec<(Matcher, Code)>,
} }
pub struct MacroRepo { pub struct MacroRepo {
named: HashMap<Sym, Vec<Macro<NamedMatcher>>>, named: HashMap<Sym, Vec<Macro<NamedMatcher>>>,
prio: Vec<Macro<PriodMatcher>>, prio: Vec<Macro<PriodMatcher>>,
} }
impl MacroRepo { impl MacroRepo {
/// TODO: the recursion inside this function needs to be moved into Orchid. /// TODO: the recursion inside this function needs to be moved into Orchid.
/// See the markdown note /// See the markdown note
pub fn process_exprv(&self, target: &[MacTree]) -> Option<Vec<MacTree>> { pub fn process_exprv(&self, target: &[MacTree]) -> Option<Vec<MacTree>> {
let mut workcp = target.to_vec(); let mut workcp = target.to_vec();
let mut lexicon; let mut lexicon;
'try_named: loop { 'try_named: loop {
lexicon = HashSet::new(); lexicon = HashSet::new();
target.iter().for_each(|tgt| fill_lexicon(tgt, &mut lexicon)); target.iter().for_each(|tgt| fill_lexicon(tgt, &mut lexicon));
for (i, tree) in workcp.iter().enumerate() { for (i, tree) in workcp.iter().enumerate() {
let MacTok::Name(name) = &*tree.tok else { continue }; let MacTok::Name(name) = &*tree.tok else { continue };
let matches = (self.named.get(name).into_iter().flatten()) let matches = (self.named.get(name).into_iter().flatten())
.filter(|m| m.deps.is_subset(&lexicon)) .filter(|m| m.deps.is_subset(&lexicon))
.filter_map(|mac| { .filter_map(|mac| {
mac.cases.iter().find_map(|cas| cas.0.apply(&workcp[i..], |_| false).map(|s| (cas, s))) mac.cases.iter().find_map(|cas| cas.0.apply(&workcp[i..], |_| false).map(|s| (cas, s)))
}) })
.collect_vec(); .collect_vec();
assert!( assert!(
matches.len() < 2, matches.len() < 2,
"Multiple conflicting matches on {:?}: {:?}", "Multiple conflicting matches on {:?}: {:?}",
&workcp[i..], &workcp[i..],
matches matches
); );
let Some((case, (state, tail))) = matches.into_iter().next() else { continue }; let Some((case, (state, tail))) = matches.into_iter().next() else { continue };
let inj = (run_body(&case.1, state).into_iter()) let inj = (run_body(&case.1, state).into_iter())
.map(|MacTree { pos, tok }| MacTree { pos, tok: Arc::new(MacTok::Done(tok)) }); .map(|MacTree { pos, tok }| MacTree { pos, tok: Arc::new(MacTok::Done(tok)) });
workcp.splice(i..(workcp.len() - tail.len()), inj); workcp.splice(i..(workcp.len() - tail.len()), inj);
continue 'try_named; continue 'try_named;
} }
break; break;
} }
if let Some(((_, body), state)) = (self.prio.iter()) if let Some(((_, body), state)) = (self.prio.iter())
.filter(|mac| mac.deps.is_subset(&lexicon)) .filter(|mac| mac.deps.is_subset(&lexicon))
.flat_map(|mac| &mac.cases) .flat_map(|mac| &mac.cases)
.find_map(|case| case.0.apply(&workcp, |_| false).map(|state| (case, state))) .find_map(|case| case.0.apply(&workcp, |_| false).map(|state| (case, state)))
{ {
return Some(run_body(body, state)); return Some(run_body(body, state));
} }
let results = (workcp.into_iter()) let results = (workcp.into_iter())
.map(|mt| match &*mt.tok { .map(|mt| match &*mt.tok {
MTok::S(p, body) => self.process_exprv(body).map(|body| MTok::S(*p, body).at(mt.pos)), MTok::S(p, body) => self.process_exprv(body).map(|body| MTok::S(*p, body).at(mt.pos)),
MTok::Lambda(arg, body) => match (self.process_exprv(arg), self.process_exprv(body)) { MTok::Lambda(arg, body) => match (self.process_exprv(arg), self.process_exprv(body)) {
(Some(arg), Some(body)) => Some(MTok::Lambda(arg, body).at(mt.pos)), (Some(arg), Some(body)) => Some(MTok::Lambda(arg, body).at(mt.pos)),
(Some(arg), None) => Some(MTok::Lambda(arg, body.to_vec()).at(mt.pos)), (Some(arg), None) => Some(MTok::Lambda(arg, body.to_vec()).at(mt.pos)),
(None, Some(body)) => Some(MTok::Lambda(arg.to_vec(), body).at(mt.pos)), (None, Some(body)) => Some(MTok::Lambda(arg.to_vec(), body).at(mt.pos)),
(None, None) => None, (None, None) => None,
}, },
_ => None, _ => None,
}) })
.collect_vec(); .collect_vec();
results.iter().any(Option::is_some).then(|| { results.iter().any(Option::is_some).then(|| {
(results.into_iter().zip(target)) (results.into_iter().zip(target))
.map(|(opt, fb)| opt.unwrap_or_else(|| fb.clone())) .map(|(opt, fb)| opt.unwrap_or_else(|| fb.clone()))
.collect_vec() .collect_vec()
}) })
} }
} }
fn fill_lexicon(tgt: &MacTree, lexicon: &mut HashSet<Sym>) { fn fill_lexicon(tgt: &MacTree, lexicon: &mut HashSet<Sym>) {
match &*tgt.tok { match &*tgt.tok {
MTok::Name(n) => { MTok::Name(n) => {
lexicon.insert(n.clone()); lexicon.insert(n.clone());
}, },
MTok::Lambda(arg, body) => { MTok::Lambda(arg, body) => {
arg.iter().for_each(|t| fill_lexicon(t, lexicon)); arg.iter().for_each(|t| fill_lexicon(t, lexicon));
body.iter().for_each(|t| fill_lexicon(t, lexicon)) body.iter().for_each(|t| fill_lexicon(t, lexicon))
}, },
MTok::S(_, body) => body.iter().for_each(|t| fill_lexicon(t, lexicon)), MTok::S(_, body) => body.iter().for_each(|t| fill_lexicon(t, lexicon)),
_ => (), _ => (),
} }
} }
fn run_body(body: &Code, mut state: MatchState<'_>) -> Vec<MacTree> { fn run_body(body: &Code, mut state: MatchState<'_>) -> Vec<MacTree> {
let inject: Vec<MacTree> = todo!("Call the interpreter with bindings"); let inject: Vec<MacTree> = todo!("Call the interpreter with bindings");
inject inject
.into_iter() .into_iter()
.map(|MTree { pos, tok }| MTree { pos, tok: Arc::new(MTok::Done(tok)) }) .map(|MTree { pos, tok }| MTree { pos, tok: Arc::new(MTok::Done(tok)) })
.collect_vec() .collect_vec()
} }

View File

@@ -3,279 +3,295 @@ use std::{iter, thread};
use itertools::Itertools; use itertools::Itertools;
use never::Never; use never::Never;
use orchid_base::error::{mk_err, mk_errv, OrcErrv, OrcRes, Reporter}; use orchid_base::error::{OrcErrv, OrcRes, Reporter, mk_err, mk_errv};
use orchid_base::intern; use orchid_base::intern;
use orchid_base::interner::Tok; use orchid_base::interner::Tok;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::macros::{MTok, MTree}; use orchid_base::macros::{MTok, MTree};
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::parse::{ use orchid_base::parse::{
expect_end, line_items, parse_multiname, strip_fluff, try_pop_no_fluff, Comment, Import, Comment, Import, Parsed, Snippet, expect_end, line_items, parse_multiname, strip_fluff,
Parsed, Snippet, try_pop_no_fluff,
}; };
use orchid_base::tree::{Paren, TokTree, Token}; use orchid_base::tree::{Paren, TokTree, Token};
use substack::Substack; use substack::Substack;
use crate::extension::{AtomHand, System}; use crate::extension::{AtomHand, System};
use crate::macros::MacTree; use crate::macros::MacTree;
use crate::tree::{Code, CodeLocator, Item, ItemKind, Member, MemberKind, Module, ParsTokTree, Rule, RuleKind}; use crate::tree::{
Code, CodeLocator, Item, ItemKind, Member, MemberKind, Module, ParsTokTree, Rule, RuleKind,
};
type ParsSnippet<'a> = Snippet<'a, 'static, AtomHand, Never>; type ParsSnippet<'a> = Snippet<'a, 'static, AtomHand, Never>;
pub trait ParseCtx: Send + Sync { pub trait ParseCtx: Send + Sync {
fn systems(&self) -> impl Iterator<Item = &System>; fn systems(&self) -> impl Iterator<Item = &System>;
fn reporter(&self) -> &impl Reporter; fn reporter(&self) -> &impl Reporter;
} }
pub fn parse_items( pub fn parse_items(
ctx: &impl ParseCtx, ctx: &impl ParseCtx,
path: Substack<Tok<String>>, path: Substack<Tok<String>>,
items: ParsSnippet items: ParsSnippet,
) -> OrcRes<Vec<Item>> { ) -> OrcRes<Vec<Item>> {
let lines = line_items(items); let lines = line_items(items);
let mut ok = iter::from_fn(|| None).take(lines.len()).collect_vec(); let mut ok = iter::from_fn(|| None).take(lines.len()).collect_vec();
thread::scope(|s| { thread::scope(|s| {
let mut threads = Vec::new(); let mut threads = Vec::new();
for (slot, Parsed { output: cmts, tail }) in ok.iter_mut().zip(lines.into_iter()) { for (slot, Parsed { output: cmts, tail }) in ok.iter_mut().zip(lines.into_iter()) {
let path = &path; let path = &path;
threads.push(s.spawn(move || { threads.push(s.spawn(move || {
*slot = Some(parse_item(ctx, path.clone(), cmts, tail)?); *slot = Some(parse_item(ctx, path.clone(), cmts, tail)?);
Ok::<(), OrcErrv>(()) Ok::<(), OrcErrv>(())
})) }))
} }
for t in threads { for t in threads {
t.join().unwrap().err().into_iter().flatten().for_each(|e| ctx.reporter().report(e)) t.join().unwrap().err().into_iter().flatten().for_each(|e| ctx.reporter().report(e))
} }
}); });
Ok(ok.into_iter().flatten().flatten().collect_vec()) Ok(ok.into_iter().flatten().flatten().collect_vec())
} }
pub fn parse_item( pub fn parse_item(
ctx: &impl ParseCtx, ctx: &impl ParseCtx,
path: Substack<Tok<String>>, path: Substack<Tok<String>>,
comments: Vec<Comment>, comments: Vec<Comment>,
item: ParsSnippet, item: ParsSnippet,
) -> OrcRes<Vec<Item>> { ) -> OrcRes<Vec<Item>> {
match item.pop_front() { match item.pop_front() {
Some((TokTree { tok: Token::Name(n), .. }, postdisc)) => match n { Some((TokTree { tok: Token::Name(n), .. }, postdisc)) => match n {
n if *n == intern!(str: "export") => match try_pop_no_fluff(postdisc)? { n if *n == intern!(str: "export") => match try_pop_no_fluff(postdisc)? {
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } => Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } =>
parse_exportable_item(ctx, path, comments, true, n.clone(), tail), parse_exportable_item(ctx, path, comments, true, n.clone(), tail),
Parsed { output: TokTree { tok: Token::NS, .. }, tail } => { Parsed { output: TokTree { tok: Token::NS, .. }, tail } => {
let Parsed { output: exports, tail } = parse_multiname(ctx.reporter(), tail)?; let Parsed { output: exports, tail } = parse_multiname(ctx.reporter(), tail)?;
let mut ok = Vec::new(); let mut ok = Vec::new();
exports.into_iter().for_each(|(e, pos)| match (&e.path.as_slice(), e.name) { exports.into_iter().for_each(|(e, pos)| match (&e.path.as_slice(), e.name) {
([], Some(n)) => ([], Some(n)) =>
ok.push(Item { comments: comments.clone(), pos, kind: ItemKind::Export(n) }), ok.push(Item { comments: comments.clone(), pos, kind: ItemKind::Export(n) }),
(_, Some(_)) => ctx.reporter().report(mk_err( (_, Some(_)) => ctx.reporter().report(mk_err(
intern!(str: "Compound export"), intern!(str: "Compound export"),
"Cannot export compound names (names containing the :: separator)", "Cannot export compound names (names containing the :: separator)",
[pos.into()], [pos.into()],
)), )),
(_, None) => ctx.reporter().report(mk_err( (_, None) => ctx.reporter().report(mk_err(
intern!(str: "Wildcard export"), intern!(str: "Wildcard export"),
"Exports cannot contain the globstar *", "Exports cannot contain the globstar *",
[pos.into()], [pos.into()],
)), )),
}); });
expect_end(tail)?; expect_end(tail)?;
Ok(ok) Ok(ok)
}, },
Parsed { output, .. } => Err(mk_errv( Parsed { output, .. } => Err(mk_errv(
intern!(str: "Malformed export"), intern!(str: "Malformed export"),
"`export` can either prefix other lines or list names inside ::( ) or ::[ ]", "`export` can either prefix other lines or list names inside ::( ) or ::[ ]",
[Pos::Range(output.range.clone()).into()], [Pos::Range(output.range.clone()).into()],
)), )),
}, },
n if *n == intern!(str: "import") => parse_import(ctx, postdisc).map(|v| { n if *n == intern!(str: "import") => parse_import(ctx, postdisc).map(|v| {
Vec::from_iter(v.into_iter().map(|(t, pos)| Item { Vec::from_iter(v.into_iter().map(|(t, pos)| Item {
comments: comments.clone(), comments: comments.clone(),
pos, pos,
kind: ItemKind::Import(t), kind: ItemKind::Import(t),
})) }))
}), }),
n => parse_exportable_item(ctx, path, comments, false, n.clone(), postdisc), n => parse_exportable_item(ctx, path, comments, false, n.clone(), postdisc),
}, },
Some(_) => Some(_) =>
Err(mk_errv(intern!(str: "Expected a line type"), "All lines must begin with a keyword", [ Err(mk_errv(intern!(str: "Expected a line type"), "All lines must begin with a keyword", [
Pos::Range(item.pos()).into(), Pos::Range(item.pos()).into(),
])), ])),
None => unreachable!("These lines are filtered and aggregated in earlier stages"), None => unreachable!("These lines are filtered and aggregated in earlier stages"),
} }
} }
pub fn parse_import(ctx: &impl ParseCtx, tail: ParsSnippet) -> OrcRes<Vec<(Import, Pos)>> { pub fn parse_import(ctx: &impl ParseCtx, tail: ParsSnippet) -> OrcRes<Vec<(Import, Pos)>> {
let Parsed { output: imports, tail } = parse_multiname(ctx.reporter(), tail)?; let Parsed { output: imports, tail } = parse_multiname(ctx.reporter(), tail)?;
expect_end(tail)?; expect_end(tail)?;
Ok(imports) Ok(imports)
} }
pub fn parse_exportable_item( pub fn parse_exportable_item(
ctx: &impl ParseCtx, ctx: &impl ParseCtx,
path: Substack<Tok<String>>, path: Substack<Tok<String>>,
comments: Vec<Comment>, comments: Vec<Comment>,
exported: bool, exported: bool,
discr: Tok<String>, discr: Tok<String>,
tail: ParsSnippet, tail: ParsSnippet,
) -> OrcRes<Vec<Item>> { ) -> OrcRes<Vec<Item>> {
let kind = if discr == intern!(str: "mod") { let kind = if discr == intern!(str: "mod") {
let (name, body) = parse_module(ctx, path, tail)?; let (name, body) = parse_module(ctx, path, tail)?;
ItemKind::Member(Member::new(name, MemberKind::Mod(body))) ItemKind::Member(Member::new(name, MemberKind::Mod(body)))
} else if discr == intern!(str: "const") { } else if discr == intern!(str: "const") {
let (name, val) = parse_const(tail)?; let (name, val) = parse_const(tail)?;
let locator = CodeLocator::to_const(path.push(name.clone()).unreverse()); let locator = CodeLocator::to_const(path.push(name.clone()).unreverse());
ItemKind::Member(Member::new(name, MemberKind::Const(Code::from_code(locator, val)))) ItemKind::Member(Member::new(name, MemberKind::Const(Code::from_code(locator, val))))
} else if let Some(sys) = ctx.systems().find(|s| s.can_parse(discr.clone())) { } else if let Some(sys) = ctx.systems().find(|s| s.can_parse(discr.clone())) {
let line = sys.parse(tail.to_vec(), exported, comments)?; let line = sys.parse(tail.to_vec(), exported, comments)?;
return parse_items(ctx, path, Snippet::new(tail.prev(), &line)); return parse_items(ctx, path, Snippet::new(tail.prev(), &line));
} else { } else {
let ext_lines = ctx.systems().flat_map(System::line_types).join(", "); let ext_lines = ctx.systems().flat_map(System::line_types).join(", ");
return Err(mk_errv( return Err(mk_errv(
intern!(str: "Unrecognized line type"), intern!(str: "Unrecognized line type"),
format!("Line types are: const, mod, macro, grammar, {ext_lines}"), format!("Line types are: const, mod, macro, grammar, {ext_lines}"),
[Pos::Range(tail.prev().range.clone()).into()], [Pos::Range(tail.prev().range.clone()).into()],
)); ));
}; };
Ok(vec![Item { comments, pos: Pos::Range(tail.pos()), kind }]) Ok(vec![Item { comments, pos: Pos::Range(tail.pos()), kind }])
} }
pub fn parse_module( pub fn parse_module(
ctx: &impl ParseCtx, ctx: &impl ParseCtx,
path: Substack<Tok<String>>, path: Substack<Tok<String>>,
tail: ParsSnippet tail: ParsSnippet,
) -> OrcRes<(Tok<String>, Module)> { ) -> OrcRes<(Tok<String>, Module)> {
let (name, tail) = match try_pop_no_fluff(tail)? { let (name, tail) = match try_pop_no_fluff(tail)? {
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } => (n.clone(), tail), Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } => (n.clone(), tail),
Parsed { output, .. } => Parsed { output, .. } => {
return Err(mk_errv( return Err(mk_errv(
intern!(str: "Missing module name"), intern!(str: "Missing module name"),
format!("A name was expected, {output} was found"), format!("A name was expected, {output} was found"),
[Pos::Range(output.range.clone()).into()], [Pos::Range(output.range.clone()).into()],
)), ));
}; },
let Parsed { output, tail: surplus } = try_pop_no_fluff(tail)?; };
expect_end(surplus)?; let Parsed { output, tail: surplus } = try_pop_no_fluff(tail)?;
let body = output.as_s(Paren::Round).ok_or_else(|| mk_errv( expect_end(surplus)?;
intern!(str: "Expected module body"), let body = output.as_s(Paren::Round).ok_or_else(|| {
format!("A ( block ) was expected, {output} was found"), mk_errv(
[Pos::Range(output.range.clone()).into()], intern!(str: "Expected module body"),
))?; format!("A ( block ) was expected, {output} was found"),
let path = path.push(name.clone()); [Pos::Range(output.range.clone()).into()],
Ok((name, Module::new(parse_items(ctx, path, body)?))) )
})?;
let path = path.push(name.clone());
Ok((name, Module::new(parse_items(ctx, path, body)?)))
} }
pub fn parse_const(tail: ParsSnippet) -> OrcRes<(Tok<String>, Vec<ParsTokTree>)> { pub fn parse_const(tail: ParsSnippet) -> OrcRes<(Tok<String>, Vec<ParsTokTree>)> {
let Parsed { output, tail } = try_pop_no_fluff(tail)?; let Parsed { output, tail } = try_pop_no_fluff(tail)?;
let name = output.as_name().ok_or_else(|| mk_errv( let name = output.as_name().ok_or_else(|| {
intern!(str: "Missing module name"), mk_errv(
format!("A name was expected, {output} was found"), intern!(str: "Missing module name"),
[Pos::Range(output.range.clone()).into()], format!("A name was expected, {output} was found"),
))?; [Pos::Range(output.range.clone()).into()],
let Parsed { output, tail } = try_pop_no_fluff(tail)?; )
if !output.is_kw(intern!(str: "=")) { })?;
return Err(mk_errv( let Parsed { output, tail } = try_pop_no_fluff(tail)?;
intern!(str: "Missing walrus := separator"), if !output.is_kw(intern!(str: "=")) {
format!("Expected operator := , found {output}"), return Err(mk_errv(
[Pos::Range(output.range.clone()).into()], intern!(str: "Missing walrus := separator"),
)) format!("Expected operator := , found {output}"),
} [Pos::Range(output.range.clone()).into()],
try_pop_no_fluff(tail)?; ));
Ok((name, tail.iter().flat_map(strip_fluff).collect_vec())) }
try_pop_no_fluff(tail)?;
Ok((name, tail.iter().flat_map(strip_fluff).collect_vec()))
} }
pub fn parse_mtree(mut snip: ParsSnippet<'_>) -> OrcRes<Vec<MacTree>> { pub fn parse_mtree(mut snip: ParsSnippet<'_>) -> OrcRes<Vec<MacTree>> {
let mut mtreev = Vec::new(); let mut mtreev = Vec::new();
while let Some((ttree, tail)) = snip.pop_front() { while let Some((ttree, tail)) = snip.pop_front() {
let (range, tok, tail) = match &ttree.tok { let (range, tok, tail) = match &ttree.tok {
Token::S(p, b) => ( Token::S(p, b) =>
ttree.range.clone(), (ttree.range.clone(), MTok::S(*p, parse_mtree(Snippet::new(ttree, b))?), tail),
MTok::S(*p, parse_mtree(Snippet::new(ttree, b))?), Token::Name(tok) => {
tail, let mut segments = vec![tok.clone()];
), let mut end = ttree.range.end;
Token::Name(tok) => { while let Some((TokTree { tok: Token::NS, .. }, tail)) = snip.pop_front() {
let mut segments = vec![tok.clone()]; let Parsed { output, tail } = try_pop_no_fluff(tail)?;
let mut end = ttree.range.end; segments.push(output.as_name().ok_or_else(|| {
while let Some((TokTree { tok: Token::NS, .. }, tail)) = snip.pop_front() { mk_errv(
let Parsed { output, tail } = try_pop_no_fluff(tail)?; intern!(str: "Namespaced name interrupted"),
segments.push(output.as_name().ok_or_else(|| mk_errv( "In expression context, :: must always be followed by a name.\n\
intern!(str: "Namespaced name interrupted"),
"In expression context, :: must always be followed by a name.\n\
::() is permitted only in import and export items", ::() is permitted only in import and export items",
[Pos::Range(output.range.clone()).into()] [Pos::Range(output.range.clone()).into()],
))?); )
snip = tail; })?);
end = output.range.end; snip = tail;
} end = output.range.end;
(ttree.range.start..end, MTok::Name(Sym::new(segments).unwrap()), snip) }
}, (ttree.range.start..end, MTok::Name(Sym::new(segments).unwrap()), snip)
Token::NS => return Err(mk_errv( },
intern!(str: "Unexpected :: in macro pattern"), Token::NS => {
":: can only follow a name outside export statements", return Err(mk_errv(
[Pos::Range(ttree.range.clone()).into()] intern!(str: "Unexpected :: in macro pattern"),
)), ":: can only follow a name outside export statements",
Token::Ph(ph) => (ttree.range.clone(), MTok::Ph(ph.clone()), tail), [Pos::Range(ttree.range.clone()).into()],
Token::Atom(_) | Token::Macro(_) => return Err(mk_errv( ));
intern!(str: "Unsupported token in macro patterns"), },
format!("Macro patterns can only contain names, braces, and lambda, not {ttree}."), Token::Ph(ph) => (ttree.range.clone(), MTok::Ph(ph.clone()), tail),
[Pos::Range(ttree.range.clone()).into()] Token::Atom(_) | Token::Macro(_) => {
)), return Err(mk_errv(
Token::BR | Token::Comment(_) => continue, intern!(str: "Unsupported token in macro patterns"),
Token::Bottom(e) => return Err(e.clone()), format!("Macro patterns can only contain names, braces, and lambda, not {ttree}."),
Token::LambdaHead(arg) => ( [Pos::Range(ttree.range.clone()).into()],
ttree.range.start..snip.pos().end, ));
MTok::Lambda(parse_mtree(Snippet::new(ttree, arg))?, parse_mtree(tail)?), },
Snippet::new(ttree, &[]), Token::BR | Token::Comment(_) => continue,
), Token::Bottom(e) => return Err(e.clone()),
Token::Slot(_) | Token::X(_) => panic!("Did not expect {} in parsed token tree", &ttree.tok), Token::LambdaHead(arg) => (
}; ttree.range.start..snip.pos().end,
mtreev.push(MTree { pos: Pos::Range(range.clone()), tok: Arc::new(tok) }); MTok::Lambda(parse_mtree(Snippet::new(ttree, arg))?, parse_mtree(tail)?),
snip = tail; Snippet::new(ttree, &[]),
} ),
Ok(mtreev) Token::Slot(_) | Token::X(_) => panic!("Did not expect {} in parsed token tree", &ttree.tok),
};
mtreev.push(MTree { pos: Pos::Range(range.clone()), tok: Arc::new(tok) });
snip = tail;
}
Ok(mtreev)
} }
pub fn parse_macro(tail: ParsSnippet, macro_i: u16, path: Substack<Tok<String>>) -> OrcRes<Vec<Rule>> { pub fn parse_macro(
let (surplus, prev, block) = match try_pop_no_fluff(tail)? { tail: ParsSnippet,
Parsed { tail, output: o@TokTree { tok: Token::S(Paren::Round, b), .. } } => (tail, o, b), macro_i: u16,
Parsed { output, .. } => return Err(mk_errv( path: Substack<Tok<String>>,
intern!(str: "m"), ) -> OrcRes<Vec<Rule>> {
"Macro blocks must either start with a block or a ..$:number", let (surplus, prev, block) = match try_pop_no_fluff(tail)? {
[Pos::Range(output.range.clone()).into()] Parsed { tail, output: o @ TokTree { tok: Token::S(Paren::Round, b), .. } } => (tail, o, b),
)), Parsed { output, .. } => {
}; return Err(mk_errv(
expect_end(surplus)?; intern!(str: "m"),
let mut errors = Vec::new(); "Macro blocks must either start with a block or a ..$:number",
let mut rules = Vec::new(); [Pos::Range(output.range.clone()).into()],
for (i, item) in line_items(Snippet::new(prev, block)).into_iter().enumerate() { ));
let Parsed { tail, output } = try_pop_no_fluff(item.tail)?; },
if !output.is_kw(intern!(str: "rule")) { };
errors.extend(mk_errv( expect_end(surplus)?;
intern!(str: "non-rule in macro"), let mut errors = Vec::new();
format!("Expected `rule`, got {output}"), let mut rules = Vec::new();
[Pos::Range(output.range.clone()).into()] for (i, item) in line_items(Snippet::new(prev, block)).into_iter().enumerate() {
)); let Parsed { tail, output } = try_pop_no_fluff(item.tail)?;
continue if !output.is_kw(intern!(str: "rule")) {
}; errors.extend(mk_errv(
let (pat, body) = match tail.split_once(|t| t.is_kw(intern!(str: "=>"))) { intern!(str: "non-rule in macro"),
Some((a, b)) => (a, b), format!("Expected `rule`, got {output}"),
None => { [Pos::Range(output.range.clone()).into()],
errors.extend(mk_errv( ));
intern!(str: "no => in macro rule"), continue;
"The pattern and body of a rule must be separated by a =>", };
[Pos::Range(tail.pos()).into()], let (pat, body) = match tail.split_once(|t| t.is_kw(intern!(str: "=>"))) {
)); Some((a, b)) => (a, b),
continue None => {
} errors.extend(mk_errv(
}; intern!(str: "no => in macro rule"),
rules.push(Rule { "The pattern and body of a rule must be separated by a =>",
comments: item.output, [Pos::Range(tail.pos()).into()],
pos: Pos::Range(tail.pos()), ));
pattern: parse_mtree(pat)?, continue;
kind: RuleKind::Native(Code::from_code( },
CodeLocator::to_rule(path.unreverse(), macro_i, i as u16), };
body.to_vec(), rules.push(Rule {
)) comments: item.output,
}) pos: Pos::Range(tail.pos()),
} pattern: parse_mtree(pat)?,
if let Ok(e) = OrcErrv::new(errors) { Err(e) } else { Ok(rules) } kind: RuleKind::Native(Code::from_code(
CodeLocator::to_rule(path.unreverse(), macro_i, i as u16),
body.to_vec(),
)),
})
}
if let Ok(e) = OrcErrv::new(errors) { Err(e) } else { Ok(rules) }
} }

View File

@@ -1,29 +1,30 @@
use orchid_base::name::Sym;
use super::scal_match::scalv_match; use super::scal_match::scalv_match;
use super::shared::AnyMatcher; use super::shared::AnyMatcher;
use super::vec_match::vec_match; use super::vec_match::vec_match;
use orchid_base::name::Sym;
use crate::macros::MacTree; use crate::macros::MacTree;
use crate::rule::state::MatchState; use crate::rule::state::MatchState;
#[must_use] #[must_use]
pub fn any_match<'a>( pub fn any_match<'a>(
matcher: &AnyMatcher, matcher: &AnyMatcher,
seq: &'a [MacTree], seq: &'a [MacTree],
save_loc: &impl Fn(Sym) -> bool, save_loc: &impl Fn(Sym) -> bool,
) -> Option<MatchState<'a>> { ) -> Option<MatchState<'a>> {
match matcher { match matcher {
AnyMatcher::Scalar(scalv) => scalv_match(scalv, seq, save_loc), AnyMatcher::Scalar(scalv) => scalv_match(scalv, seq, save_loc),
AnyMatcher::Vec { left, mid, right } => { AnyMatcher::Vec { left, mid, right } => {
if seq.len() < left.len() + right.len() { if seq.len() < left.len() + right.len() {
return None; return None;
}; };
let left_split = left.len(); let left_split = left.len();
let right_split = seq.len() - right.len(); let right_split = seq.len() - right.len();
Some( Some(
scalv_match(left, &seq[..left_split], save_loc)? scalv_match(left, &seq[..left_split], save_loc)?
.combine(scalv_match(right, &seq[right_split..], save_loc)?) .combine(scalv_match(right, &seq[right_split..], save_loc)?)
.combine(vec_match(mid, &seq[left_split..right_split], save_loc)?), .combine(vec_match(mid, &seq[left_split..right_split], save_loc)?),
) )
}, },
} }
} }

View File

@@ -1,6 +1,6 @@
use itertools::Itertools;
use orchid_api::PhKind; use orchid_api::PhKind;
use orchid_base::interner::Tok; use orchid_base::interner::Tok;
use itertools::Itertools;
use orchid_base::side::Side; use orchid_base::side::Side;
use orchid_base::tree::Ph; use orchid_base::tree::Ph;
@@ -14,30 +14,30 @@ pub type MaxVecSplit<'a> = (&'a [MacTree], (Tok<String>, u8, bool), &'a [MacTree
/// slice of Expr's /// slice of Expr's
#[must_use] #[must_use]
fn split_at_max_vec(pattern: &[MacTree]) -> Option<MaxVecSplit> { fn split_at_max_vec(pattern: &[MacTree]) -> Option<MaxVecSplit> {
let rngidx = pattern let rngidx = pattern
.iter() .iter()
.position_max_by_key(|expr| vec_attrs(expr).map(|attrs| attrs.1 as i64).unwrap_or(-1))?; .position_max_by_key(|expr| vec_attrs(expr).map(|attrs| attrs.1 as i64).unwrap_or(-1))?;
let (left, not_left) = pattern.split_at(rngidx); let (left, not_left) = pattern.split_at(rngidx);
let (placeh, right) = let (placeh, right) =
not_left.split_first().expect("The index of the greatest element must be less than the length"); not_left.split_first().expect("The index of the greatest element must be less than the length");
vec_attrs(placeh).map(|attrs| (left, attrs, right)) vec_attrs(placeh).map(|attrs| (left, attrs, right))
} }
#[must_use] #[must_use]
fn scal_cnt<'a>(iter: impl Iterator<Item = &'a MacTree>) -> usize { fn scal_cnt<'a>(iter: impl Iterator<Item = &'a MacTree>) -> usize {
iter.take_while(|expr| vec_attrs(expr).is_none()).count() iter.take_while(|expr| vec_attrs(expr).is_none()).count()
} }
#[must_use] #[must_use]
pub fn mk_any(pattern: &[MacTree]) -> AnyMatcher { pub fn mk_any(pattern: &[MacTree]) -> AnyMatcher {
let left_split = scal_cnt(pattern.iter()); let left_split = scal_cnt(pattern.iter());
if pattern.len() <= left_split { if pattern.len() <= left_split {
return AnyMatcher::Scalar(mk_scalv(pattern)); return AnyMatcher::Scalar(mk_scalv(pattern));
} }
let (left, not_left) = pattern.split_at(left_split); let (left, not_left) = pattern.split_at(left_split);
let right_split = not_left.len() - scal_cnt(pattern.iter().rev()); let right_split = not_left.len() - scal_cnt(pattern.iter().rev());
let (mid, right) = not_left.split_at(right_split); let (mid, right) = not_left.split_at(right_split);
AnyMatcher::Vec { left: mk_scalv(left), mid: mk_vec(mid), right: mk_scalv(right) } AnyMatcher::Vec { left: mk_scalv(left), mid: mk_vec(mid), right: mk_scalv(right) }
} }
/// Pattern MUST NOT contain vectorial placeholders /// Pattern MUST NOT contain vectorial placeholders
@@ -47,105 +47,103 @@ fn mk_scalv(pattern: &[MacTree]) -> Vec<ScalMatcher> { pattern.iter().map(mk_sca
/// Pattern MUST start and end with a vectorial placeholder /// Pattern MUST start and end with a vectorial placeholder
#[must_use] #[must_use]
pub fn mk_vec(pattern: &[MacTree]) -> VecMatcher { pub fn mk_vec(pattern: &[MacTree]) -> VecMatcher {
debug_assert!(!pattern.is_empty(), "pattern cannot be empty"); debug_assert!(!pattern.is_empty(), "pattern cannot be empty");
debug_assert!(pattern.first().map(vec_attrs).is_some(), "pattern must start with a vectorial"); debug_assert!(pattern.first().map(vec_attrs).is_some(), "pattern must start with a vectorial");
debug_assert!(pattern.last().map(vec_attrs).is_some(), "pattern must end with a vectorial"); debug_assert!(pattern.last().map(vec_attrs).is_some(), "pattern must end with a vectorial");
let (left, (key, _, nonzero), right) = split_at_max_vec(pattern) let (left, (key, _, nonzero), right) = split_at_max_vec(pattern)
.expect("pattern must have vectorial placeholders at least at either end"); .expect("pattern must have vectorial placeholders at least at either end");
let r_sep_size = scal_cnt(right.iter()); let r_sep_size = scal_cnt(right.iter());
let (r_sep, r_side) = right.split_at(r_sep_size); let (r_sep, r_side) = right.split_at(r_sep_size);
let l_sep_size = scal_cnt(left.iter().rev()); let l_sep_size = scal_cnt(left.iter().rev());
let (l_side, l_sep) = left.split_at(left.len() - l_sep_size); let (l_side, l_sep) = left.split_at(left.len() - l_sep_size);
let main = VecMatcher::Placeh { key: key.clone(), nonzero }; let main = VecMatcher::Placeh { key: key.clone(), nonzero };
match (left, right) { match (left, right) {
(&[], &[]) => VecMatcher::Placeh { key, nonzero }, (&[], &[]) => VecMatcher::Placeh { key, nonzero },
(&[], _) => VecMatcher::Scan { (&[], _) => VecMatcher::Scan {
direction: Side::Left, direction: Side::Left,
left: Box::new(main), left: Box::new(main),
sep: mk_scalv(r_sep), sep: mk_scalv(r_sep),
right: Box::new(mk_vec(r_side)), right: Box::new(mk_vec(r_side)),
}, },
(_, &[]) => VecMatcher::Scan { (_, &[]) => VecMatcher::Scan {
direction: Side::Right, direction: Side::Right,
left: Box::new(mk_vec(l_side)), left: Box::new(mk_vec(l_side)),
sep: mk_scalv(l_sep), sep: mk_scalv(l_sep),
right: Box::new(main), right: Box::new(main),
}, },
(..) => { (..) => {
let mut key_order = let mut key_order =
l_side.iter().chain(r_side.iter()).filter_map(vec_attrs).collect::<Vec<_>>(); l_side.iter().chain(r_side.iter()).filter_map(vec_attrs).collect::<Vec<_>>();
key_order.sort_by_key(|(_, prio, _)| -(*prio as i64)); key_order.sort_by_key(|(_, prio, _)| -(*prio as i64));
VecMatcher::Middle { VecMatcher::Middle {
left: Box::new(mk_vec(l_side)), left: Box::new(mk_vec(l_side)),
left_sep: mk_scalv(l_sep), left_sep: mk_scalv(l_sep),
mid: Box::new(main), mid: Box::new(main),
right_sep: mk_scalv(r_sep), right_sep: mk_scalv(r_sep),
right: Box::new(mk_vec(r_side)), right: Box::new(mk_vec(r_side)),
key_order: key_order.into_iter().map(|(n, ..)| n).collect(), key_order: key_order.into_iter().map(|(n, ..)| n).collect(),
} }
}, },
} }
} }
/// Pattern MUST NOT be a vectorial placeholder /// Pattern MUST NOT be a vectorial placeholder
#[must_use] #[must_use]
fn mk_scalar(pattern: &MacTree) -> ScalMatcher { fn mk_scalar(pattern: &MacTree) -> ScalMatcher {
match &*pattern.tok { match &*pattern.tok {
MacTok::Atom(_) | MacTok::Done(_) => panic!("Atoms and Done aren't supported in matchers"), MacTok::Atom(_) | MacTok::Done(_) => panic!("Atoms and Done aren't supported in matchers"),
MacTok::Name(n) => ScalMatcher::Name(n.clone()), MacTok::Name(n) => ScalMatcher::Name(n.clone()),
MacTok::Ph(Ph { name, kind }) => match kind { MacTok::Ph(Ph { name, kind }) => match kind {
PhKind::Vector { .. } => { PhKind::Vector { .. } => {
panic!("Scalar matcher cannot be built from vector pattern") panic!("Scalar matcher cannot be built from vector pattern")
}, },
PhKind::Scalar => PhKind::Scalar => ScalMatcher::Placeh { key: name.clone() },
ScalMatcher::Placeh { key: name.clone() }, },
}, MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body))),
MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body))), MacTok::Lambda(arg, body) => ScalMatcher::Lambda(Box::new(mk_any(arg)), Box::new(mk_any(body))),
MacTok::Lambda(arg, body) => ScalMatcher::Lambda(Box::new(mk_any(arg)), Box::new(mk_any(body))), MacTok::Ref(_) | MacTok::Slot(_) => panic!("Extension-only variants"),
MacTok::Ref(_) | MacTok::Slot(_) => panic!("Extension-only variants"), }
}
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::sync::Arc; use std::sync::Arc;
use orchid_api::PhKind; use orchid_api::PhKind;
use orchid_base::{intern, location::SourceRange, sym, tree::Ph, tokens::Paren}; use orchid_base::location::SourceRange;
use orchid_base::tokens::Paren;
use orchid_base::tree::Ph;
use orchid_base::{intern, sym};
use crate::macros::{MacTok, MacTree}; use super::mk_any;
use crate::macros::{MacTok, MacTree};
use super::mk_any; #[test]
fn test_scan() {
#[test] let ex = |tok: MacTok| MacTree { tok: Arc::new(tok), pos: SourceRange::mock().pos() };
fn test_scan() { let pattern = vec![
let ex = |tok: MacTok| MacTree{ tok: Arc::new(tok), pos: SourceRange::mock().pos() }; ex(MacTok::Ph(Ph {
let pattern = vec![ kind: PhKind::Vector { priority: 0, at_least_one: false },
ex(MacTok::Ph(Ph { name: intern!(str: "::prefix"),
kind: PhKind::Vector { priority: 0, at_least_one: false }, })),
name: intern!(str: "::prefix"), ex(MacTok::Name(sym!(prelude::do))),
})), ex(MacTok::S(Paren::Round, vec![
ex(MacTok::Name(sym!(prelude::do))), ex(MacTok::Ph(Ph {
ex(MacTok::S( kind: PhKind::Vector { priority: 0, at_least_one: false },
Paren::Round, name: intern!(str: "expr"),
vec![ })),
ex(MacTok::Ph(Ph { ex(MacTok::Name(sym!(prelude::;))),
kind: PhKind::Vector { priority: 0, at_least_one: false }, ex(MacTok::Ph(Ph {
name: intern!(str: "expr"), kind: PhKind::Vector { priority: 1, at_least_one: false },
})), name: intern!(str: "rest"),
ex(MacTok::Name(sym!(prelude::;))), })),
ex(MacTok::Ph(Ph { ])),
kind: PhKind::Vector { priority: 1, at_least_one: false }, ex(MacTok::Ph(Ph {
name: intern!(str: "rest"), kind: PhKind::Vector { priority: 0, at_least_one: false },
})), name: intern!(str: "::suffix"),
], })),
)), ];
ex(MacTok::Ph(Ph { let matcher = mk_any(&pattern);
kind: PhKind::Vector { priority: 0, at_least_one: false }, println!("{matcher}");
name: intern!(str: "::suffix"), }
})),
];
let matcher = mk_any(&pattern);
println!("{matcher}");
}
} }

View File

@@ -21,65 +21,66 @@ pub fn last_is_vec(pattern: &[MacTree]) -> bool { vec_attrs(pattern.last().unwra
pub struct NamedMatcher(AnyMatcher); pub struct NamedMatcher(AnyMatcher);
impl NamedMatcher { impl NamedMatcher {
pub fn new(pattern: &[MacTree]) -> Self { pub fn new(pattern: &[MacTree]) -> Self {
assert!( assert!(
matches!(pattern.first().map(|tree| &*tree.tok), Some(MacTok::Name(_))), matches!(pattern.first().map(|tree| &*tree.tok), Some(MacTok::Name(_))),
"Named matchers must begin with a name" "Named matchers must begin with a name"
); );
match last_is_vec(pattern) { match last_is_vec(pattern) {
true => Self(mk_any(pattern)), true => Self(mk_any(pattern)),
false => { false => {
let kind: PhKind = PhKind::Vector { priority: 0, at_least_one: false }; let kind: PhKind = PhKind::Vector { priority: 0, at_least_one: false };
let suffix = [MacTok::Ph(Ph { name: intern!(str: "::after"), kind }).at(Pos::None)]; let suffix = [MacTok::Ph(Ph { name: intern!(str: "::after"), kind }).at(Pos::None)];
Self(mk_any(&pattern.iter().chain(&suffix).cloned().collect_vec())) Self(mk_any(&pattern.iter().chain(&suffix).cloned().collect_vec()))
}, },
} }
} }
/// Also returns the tail, if any, which should be matched further /// Also returns the tail, if any, which should be matched further
/// Note that due to how priod works below, the main usable information from the tail is /// Note that due to how priod works below, the main usable information from
/// its length /// the tail is its length
pub fn apply<'a>( pub fn apply<'a>(
&self, &self,
seq: &'a [MacTree], seq: &'a [MacTree],
save_loc: impl Fn(Sym) -> bool, save_loc: impl Fn(Sym) -> bool,
) -> Option<(MatchState<'a>, &'a [MacTree])> { ) -> Option<(MatchState<'a>, &'a [MacTree])> {
any_match(&self.0, seq, &save_loc).map(|mut state| match state.remove(intern!(str: "::after")) { any_match(&self.0, seq, &save_loc).map(|mut state| {
Some(StateEntry::Scalar(_)) => panic!("::after can never be a scalar entry!"), match state.remove(intern!(str: "::after")) {
Some(StateEntry::Vec(v)) => (state, v), Some(StateEntry::Scalar(_)) => panic!("::after can never be a scalar entry!"),
None => (state, &[][..]), Some(StateEntry::Vec(v)) => (state, v),
}) None => (state, &[][..]),
} }
})
}
} }
impl fmt::Display for NamedMatcher { impl fmt::Display for NamedMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) }
} }
impl fmt::Debug for NamedMatcher { impl fmt::Debug for NamedMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "NamedMatcher({self})") } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "NamedMatcher({self})") }
} }
pub struct PriodMatcher(VecMatcher); pub struct PriodMatcher(VecMatcher);
impl PriodMatcher { impl PriodMatcher {
pub fn new(pattern: &[MacTree]) -> Self { pub fn new(pattern: &[MacTree]) -> Self {
assert!( assert!(
pattern.first().and_then(vec_attrs).is_some() pattern.first().and_then(vec_attrs).is_some() && pattern.last().and_then(vec_attrs).is_some(),
&& pattern.last().and_then(vec_attrs).is_some(), "Prioritized matchers must start and end with a vectorial",
"Prioritized matchers must start and end with a vectorial", );
); Self(mk_vec(pattern))
Self(mk_vec(pattern)) }
} /// tokens before the offset always match the prefix
/// tokens before the offset always match the prefix pub fn apply<'a>(
pub fn apply<'a>( &self,
&self, seq: &'a [MacTree],
seq: &'a [MacTree], save_loc: impl Fn(Sym) -> bool,
save_loc: impl Fn(Sym) -> bool, ) -> Option<MatchState<'a>> {
) -> Option<MatchState<'a>> { vec_match(&self.0, seq, &save_loc)
vec_match(&self.0, seq, &save_loc) }
}
} }
impl fmt::Display for PriodMatcher { impl fmt::Display for PriodMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) }
} }
impl fmt::Debug for PriodMatcher { impl fmt::Debug for PriodMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "PriodMatcher({self})") } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "PriodMatcher({self})") }
} }

View File

@@ -4,10 +4,9 @@
//! //!
//! convert pattern into hierarchy of plain, scan, middle //! convert pattern into hierarchy of plain, scan, middle
//! - plain: accept any sequence or any non-empty sequence //! - plain: accept any sequence or any non-empty sequence
//! - scan: a single scalar pattern moves LTR or RTL, submatchers on either //! - scan: a single scalar pattern moves LTR or RTL, submatchers on either side
//! side //! - middle: two scalar patterns walk over all permutations of matches while
//! - middle: two scalar patterns walk over all permutations of matches //! getting progressively closer to each other
//! while getting progressively closer to each other
//! //!
//! # Application //! # Application
//! //!
@@ -16,10 +15,10 @@
mod any_match; mod any_match;
mod build; mod build;
pub mod matcher;
mod scal_match; mod scal_match;
pub mod shared; pub mod shared;
mod vec_match;
pub mod state; pub mod state;
mod vec_attrs; mod vec_attrs;
pub mod matcher; mod vec_match;
// pub mod matcher; // pub mod matcher;

View File

@@ -7,38 +7,38 @@ use crate::rule::state::{MatchState, StateEntry};
#[must_use] #[must_use]
pub fn scal_match<'a>( pub fn scal_match<'a>(
matcher: &ScalMatcher, matcher: &ScalMatcher,
expr: &'a MacTree, expr: &'a MacTree,
save_loc: &impl Fn(Sym) -> bool, save_loc: &impl Fn(Sym) -> bool,
) -> Option<MatchState<'a>> { ) -> Option<MatchState<'a>> {
match (matcher, &*expr.tok) { match (matcher, &*expr.tok) {
(ScalMatcher::Name(n1), MacTok::Name(n2)) if n1 == n2 => Some(match save_loc(n1.clone()) { (ScalMatcher::Name(n1), MacTok::Name(n2)) if n1 == n2 => Some(match save_loc(n1.clone()) {
true => MatchState::from_name(n1.clone(), expr.pos.clone()), true => MatchState::from_name(n1.clone(), expr.pos.clone()),
false => MatchState::default(), false => MatchState::default(),
}), }),
(ScalMatcher::Placeh { .. }, MacTok::Done(_)) => None, (ScalMatcher::Placeh { .. }, MacTok::Done(_)) => None,
(ScalMatcher::Placeh { key }, _) => (ScalMatcher::Placeh { key }, _) =>
Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))), Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))),
(ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 => (ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 =>
any_match(b_mat, &body[..], save_loc), any_match(b_mat, &body[..], save_loc),
(ScalMatcher::Lambda(arg_mat, b_mat), MacTok::Lambda(arg, body)) => (ScalMatcher::Lambda(arg_mat, b_mat), MacTok::Lambda(arg, body)) =>
Some(any_match(arg_mat, arg, save_loc)?.combine(any_match(b_mat, body, save_loc)?)), Some(any_match(arg_mat, arg, save_loc)?.combine(any_match(b_mat, body, save_loc)?)),
_ => None, _ => None,
} }
} }
#[must_use] #[must_use]
pub fn scalv_match<'a>( pub fn scalv_match<'a>(
matchers: &[ScalMatcher], matchers: &[ScalMatcher],
seq: &'a [MacTree], seq: &'a [MacTree],
save_loc: &impl Fn(Sym) -> bool, save_loc: &impl Fn(Sym) -> bool,
) -> Option<MatchState<'a>> { ) -> Option<MatchState<'a>> {
if seq.len() != matchers.len() { if seq.len() != matchers.len() {
return None; return None;
} }
let mut state = MatchState::default(); let mut state = MatchState::default();
for (matcher, expr) in matchers.iter().zip(seq.iter()) { for (matcher, expr) in matchers.iter().zip(seq.iter()) {
state = state.combine(scal_match(matcher, expr, save_loc)?); state = state.combine(scal_match(matcher, expr, save_loc)?);
} }
Some(state) Some(state)
} }

View File

@@ -9,93 +9,93 @@ use orchid_base::side::Side;
use orchid_base::tokens::{PARENS, Paren}; use orchid_base::tokens::{PARENS, Paren};
pub enum ScalMatcher { pub enum ScalMatcher {
Name(Sym), Name(Sym),
S(Paren, Box<AnyMatcher>), S(Paren, Box<AnyMatcher>),
Lambda(Box<AnyMatcher>, Box<AnyMatcher>), Lambda(Box<AnyMatcher>, Box<AnyMatcher>),
Placeh { key: Tok<String> }, Placeh { key: Tok<String> },
} }
pub enum VecMatcher { pub enum VecMatcher {
Placeh { Placeh {
key: Tok<String>, key: Tok<String>,
nonzero: bool, nonzero: bool,
}, },
Scan { Scan {
left: Box<VecMatcher>, left: Box<VecMatcher>,
sep: Vec<ScalMatcher>, sep: Vec<ScalMatcher>,
right: Box<VecMatcher>, right: Box<VecMatcher>,
/// The separator traverses the sequence towards this side /// The separator traverses the sequence towards this side
direction: Side, direction: Side,
}, },
Middle { Middle {
/// Matches the left outer region /// Matches the left outer region
left: Box<VecMatcher>, left: Box<VecMatcher>,
/// Matches the left separator /// Matches the left separator
left_sep: Vec<ScalMatcher>, left_sep: Vec<ScalMatcher>,
/// Matches the middle - can only ever be a plain placeholder /// Matches the middle - can only ever be a plain placeholder
mid: Box<VecMatcher>, mid: Box<VecMatcher>,
/// Matches the right separator /// Matches the right separator
right_sep: Vec<ScalMatcher>, right_sep: Vec<ScalMatcher>,
/// Matches the right outer region /// Matches the right outer region
right: Box<VecMatcher>, right: Box<VecMatcher>,
/// Order of significance for sorting equally good projects based on /// Order of significance for sorting equally good projects based on
/// the length of matches on either side. /// the length of matches on either side.
/// ///
/// Vectorial keys that appear on either side, in priority order /// Vectorial keys that appear on either side, in priority order
key_order: Vec<Tok<String>>, key_order: Vec<Tok<String>>,
}, },
} }
pub enum AnyMatcher { pub enum AnyMatcher {
Scalar(Vec<ScalMatcher>), Scalar(Vec<ScalMatcher>),
Vec { left: Vec<ScalMatcher>, mid: VecMatcher, right: Vec<ScalMatcher> }, Vec { left: Vec<ScalMatcher>, mid: VecMatcher, right: Vec<ScalMatcher> },
} }
// ################ Display ################ // ################ Display ################
impl fmt::Display for ScalMatcher { impl fmt::Display for ScalMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
Self::Placeh { key } => write!(f, "${key}"), Self::Placeh { key } => write!(f, "${key}"),
Self::Name(n) => write!(f, "{n}"), Self::Name(n) => write!(f, "{n}"),
Self::S(t, body) => { Self::S(t, body) => {
let (l, r, _) = PARENS.iter().find(|r| r.2 == *t).unwrap(); let (l, r, _) = PARENS.iter().find(|r| r.2 == *t).unwrap();
write!(f, "{l}{body}{r}") write!(f, "{l}{body}{r}")
}, },
Self::Lambda(arg, body) => write!(f, "\\{arg}.{body}"), Self::Lambda(arg, body) => write!(f, "\\{arg}.{body}"),
} }
} }
} }
impl fmt::Display for VecMatcher { impl fmt::Display for VecMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
Self::Placeh { key, nonzero: true } => write!(f, "...${key}"), Self::Placeh { key, nonzero: true } => write!(f, "...${key}"),
Self::Placeh { key, nonzero: false } => write!(f, "..${key}"), Self::Placeh { key, nonzero: false } => write!(f, "..${key}"),
Self::Scan { left, sep, right, direction } => { Self::Scan { left, sep, right, direction } => {
let arrow = if direction == &Side::Left { "<==" } else { "==>" }; let arrow = if direction == &Side::Left { "<==" } else { "==>" };
write!(f, "Scan{{{left} {arrow} {} {arrow} {right}}}", sep.iter().join(" ")) write!(f, "Scan{{{left} {arrow} {} {arrow} {right}}}", sep.iter().join(" "))
}, },
Self::Middle { left, left_sep, mid, right_sep, right, .. } => { Self::Middle { left, left_sep, mid, right_sep, right, .. } => {
let left_sep_s = left_sep.iter().join(" "); let left_sep_s = left_sep.iter().join(" ");
let right_sep_s = right_sep.iter().join(" "); let right_sep_s = right_sep.iter().join(" ");
write!(f, "Middle{{{left}|{left_sep_s}|{mid}|{right_sep_s}|{right}}}") write!(f, "Middle{{{left}|{left_sep_s}|{mid}|{right_sep_s}|{right}}}")
}, },
} }
} }
} }
impl fmt::Display for AnyMatcher { impl fmt::Display for AnyMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
Self::Scalar(s) => { Self::Scalar(s) => {
write!(f, "({})", s.iter().join(" ")) write!(f, "({})", s.iter().join(" "))
}, },
Self::Vec { left, mid, right } => { Self::Vec { left, mid, right } => {
let lefts = left.iter().join(" "); let lefts = left.iter().join(" ");
let rights = right.iter().join(" "); let rights = right.iter().join(" ");
write!(f, "[{lefts}|{mid}|{rights}]") write!(f, "[{lefts}|{mid}|{rights}]")
}, },
} }
} }
} }

View File

@@ -11,83 +11,81 @@ use orchid_base::name::Sym;
use crate::macros::MacTree; use crate::macros::MacTree;
enum StackAction { enum StackAction {
Return(Box<dyn Any>), Return(Box<dyn Any>),
Call { Call {
target: Box<dyn FnOnce(Box<dyn Any>) -> StackAction>, target: Box<dyn FnOnce(Box<dyn Any>) -> StackAction>,
param: Box<dyn Any>, param: Box<dyn Any>,
tail: Box<dyn FnOnce(Box<dyn Any>) -> StackAction> tail: Box<dyn FnOnce(Box<dyn Any>) -> StackAction>,
} },
} }
struct Trampoline { struct Trampoline {
stack: Vec<Box<dyn FnOnce(Box<dyn Any>) -> StackAction>> stack: Vec<Box<dyn FnOnce(Box<dyn Any>) -> StackAction>>,
} }
#[derive(Clone, Copy, Debug)] #[derive(Clone, Copy, Debug)]
pub enum StateEntry<'a> { pub enum StateEntry<'a> {
Vec(&'a [MacTree]), Vec(&'a [MacTree]),
Scalar(&'a MacTree), Scalar(&'a MacTree),
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct MatchState<'a> { pub struct MatchState<'a> {
placeholders: HashMap<Tok<String>, StateEntry<'a>>, placeholders: HashMap<Tok<String>, StateEntry<'a>>,
name_posv: HashMap<Sym, Vec<Pos>>, name_posv: HashMap<Sym, Vec<Pos>>,
} }
impl<'a> MatchState<'a> { impl<'a> MatchState<'a> {
pub fn from_ph(key: Tok<String>, entry: StateEntry<'a>) -> Self { pub fn from_ph(key: Tok<String>, entry: StateEntry<'a>) -> Self {
Self { placeholders: HashMap::from([(key, entry)]), name_posv: HashMap::new() } Self { placeholders: HashMap::from([(key, entry)]), name_posv: HashMap::new() }
} }
pub fn combine(self, s: Self) -> Self { pub fn combine(self, s: Self) -> Self {
Self { Self {
placeholders: self.placeholders.into_iter().chain(s.placeholders).collect(), placeholders: self.placeholders.into_iter().chain(s.placeholders).collect(),
name_posv: join_maps(self.name_posv, s.name_posv, |_, l, r| l.into_iter().chain(r).collect()), name_posv: join_maps(self.name_posv, s.name_posv, |_, l, r| l.into_iter().chain(r).collect()),
} }
} }
pub fn ph_len(&self, key: &Tok<String>) -> Option<usize> { pub fn ph_len(&self, key: &Tok<String>) -> Option<usize> {
match self.placeholders.get(key)? { match self.placeholders.get(key)? {
StateEntry::Vec(slc) => Some(slc.len()), StateEntry::Vec(slc) => Some(slc.len()),
_ => None, _ => None,
} }
} }
pub fn from_name(name: Sym, location: Pos) -> Self { pub fn from_name(name: Sym, location: Pos) -> Self {
Self { name_posv: HashMap::from([(name, vec![location])]), placeholders: HashMap::new() } Self { name_posv: HashMap::from([(name, vec![location])]), placeholders: HashMap::new() }
} }
pub fn remove(&mut self, name: Tok<String>) -> Option<StateEntry<'a>> { pub fn remove(&mut self, name: Tok<String>) -> Option<StateEntry<'a>> {
self.placeholders.remove(&name) self.placeholders.remove(&name)
} }
pub fn mk_owned(self) -> OwnedState { pub fn mk_owned(self) -> OwnedState {
OwnedState { OwnedState {
placeholders: (self.placeholders.into_iter()) placeholders: (self.placeholders.into_iter())
.map(|(k, v)| { .map(|(k, v)| {
( (
k.clone(), k.clone(),
match_mapping!(v, StateEntry => OwnedEntry { match_mapping!(v, StateEntry => OwnedEntry {
Scalar(tree.clone()), Scalar(tree.clone()),
Vec(v.to_vec()), Vec(v.to_vec()),
}), }),
) )
}) })
.collect(), .collect(),
name_posv: self.name_posv, name_posv: self.name_posv,
} }
} }
} }
impl Default for MatchState<'static> { impl Default for MatchState<'static> {
fn default() -> Self { Self { name_posv: HashMap::new(), placeholders: HashMap::new() } } fn default() -> Self { Self { name_posv: HashMap::new(), placeholders: HashMap::new() } }
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum OwnedEntry { pub enum OwnedEntry {
Vec(Vec<MacTree>), Vec(Vec<MacTree>),
Scalar(MacTree), Scalar(MacTree),
} }
pub struct OwnedState { pub struct OwnedState {
placeholders: HashMap<Tok<String>, OwnedEntry>, placeholders: HashMap<Tok<String>, OwnedEntry>,
name_posv: HashMap<Sym, Vec<Pos>>, name_posv: HashMap<Sym, Vec<Pos>>,
} }
impl OwnedState { impl OwnedState {
pub fn get(&self, key: &Tok<String>) -> Option<&OwnedEntry> { self.placeholders.get(key) } pub fn get(&self, key: &Tok<String>) -> Option<&OwnedEntry> { self.placeholders.get(key) }
pub fn positions(&self, name: &Sym) -> &[Pos] { self.name_posv.get(name).map_or(&[], |v| &v[..]) } pub fn positions(&self, name: &Sym) -> &[Pos] { self.name_posv.get(name).map_or(&[], |v| &v[..]) }
} }

View File

@@ -8,9 +8,9 @@ use crate::macros::{MacTok, MacTree};
/// a vectorial placeholder /// a vectorial placeholder
#[must_use] #[must_use]
pub fn vec_attrs(expr: &MacTree) -> Option<(Tok<String>, u8, bool)> { pub fn vec_attrs(expr: &MacTree) -> Option<(Tok<String>, u8, bool)> {
match (*expr.tok).clone() { match (*expr.tok).clone() {
MacTok::Ph(Ph { kind: PhKind::Vector { priority, at_least_one }, name }) => MacTok::Ph(Ph { kind: PhKind::Vector { priority, at_least_one }, name }) =>
Some((name, priority, at_least_one)), Some((name, priority, at_least_one)),
_ => None, _ => None,
} }
} }

View File

@@ -1,94 +1,95 @@
use std::cmp::Ordering; use std::cmp::Ordering;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::name::Sym;
use super::scal_match::scalv_match; use super::scal_match::scalv_match;
use super::shared::VecMatcher; use super::shared::VecMatcher;
use orchid_base::name::Sym; use crate::macros::MacTree;
use crate::{macros::MacTree, rule::state::{MatchState, StateEntry}}; use crate::rule::state::{MatchState, StateEntry};
#[must_use] #[must_use]
pub fn vec_match<'a>( pub fn vec_match<'a>(
matcher: &VecMatcher, matcher: &VecMatcher,
seq: &'a [MacTree], seq: &'a [MacTree],
save_loc: &impl Fn(Sym) -> bool, save_loc: &impl Fn(Sym) -> bool,
) -> Option<MatchState<'a>> { ) -> Option<MatchState<'a>> {
match matcher { match matcher {
VecMatcher::Placeh { key, nonzero } => { VecMatcher::Placeh { key, nonzero } => {
if *nonzero && seq.is_empty() { if *nonzero && seq.is_empty() {
return None; return None;
} }
Some(MatchState::from_ph(key.clone(), StateEntry::Vec(seq))) Some(MatchState::from_ph(key.clone(), StateEntry::Vec(seq)))
}, },
VecMatcher::Scan { left, sep, right, direction } => { VecMatcher::Scan { left, sep, right, direction } => {
if seq.len() < sep.len() { if seq.len() < sep.len() {
return None; return None;
} }
for lpos in direction.walk(0..=seq.len() - sep.len()) { for lpos in direction.walk(0..=seq.len() - sep.len()) {
let rpos = lpos + sep.len(); let rpos = lpos + sep.len();
let state = vec_match(left, &seq[..lpos], save_loc) let state = vec_match(left, &seq[..lpos], save_loc)
.and_then(|s| Some(s.combine(scalv_match(sep, &seq[lpos..rpos], save_loc)?))) .and_then(|s| Some(s.combine(scalv_match(sep, &seq[lpos..rpos], save_loc)?)))
.and_then(|s| Some(s.combine(vec_match(right, &seq[rpos..], save_loc)?))); .and_then(|s| Some(s.combine(vec_match(right, &seq[rpos..], save_loc)?)));
if let Some(s) = state { if let Some(s) = state {
return Some(s); return Some(s);
} }
} }
None None
}, },
// XXX predict heap space usage and allocation count // XXX predict heap space usage and allocation count
VecMatcher::Middle { left, left_sep, mid, right_sep, right, key_order } => { VecMatcher::Middle { left, left_sep, mid, right_sep, right, key_order } => {
if seq.len() < left_sep.len() + right_sep.len() { if seq.len() < left_sep.len() + right_sep.len() {
return None; return None;
} }
// Valid locations for the left separator // Valid locations for the left separator
let lposv = seq[..seq.len() - right_sep.len()] let lposv = seq[..seq.len() - right_sep.len()]
.windows(left_sep.len()) .windows(left_sep.len())
.enumerate() .enumerate()
.filter_map(|(i, window)| scalv_match(left_sep, window, save_loc).map(|s| (i, s))) .filter_map(|(i, window)| scalv_match(left_sep, window, save_loc).map(|s| (i, s)))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
// Valid locations for the right separator // Valid locations for the right separator
let rposv = seq[left_sep.len()..] let rposv = seq[left_sep.len()..]
.windows(right_sep.len()) .windows(right_sep.len())
.enumerate() .enumerate()
.filter_map(|(i, window)| scalv_match(right_sep, window, save_loc).map(|s| (i, s))) .filter_map(|(i, window)| scalv_match(right_sep, window, save_loc).map(|s| (i, s)))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
// Valid combinations of locations for the separators // Valid combinations of locations for the separators
let mut pos_pairs = lposv let mut pos_pairs = lposv
.into_iter() .into_iter()
.cartesian_product(rposv) .cartesian_product(rposv)
.filter(|((lpos, _), (rpos, _))| lpos + left_sep.len() <= *rpos) .filter(|((lpos, _), (rpos, _))| lpos + left_sep.len() <= *rpos)
.map(|((lpos, lstate), (rpos, rstate))| (lpos, rpos, lstate.combine(rstate))) .map(|((lpos, lstate), (rpos, rstate))| (lpos, rpos, lstate.combine(rstate)))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
// In descending order of size // In descending order of size
pos_pairs.sort_by_key(|(l, r, _)| -((r - l) as i64)); pos_pairs.sort_by_key(|(l, r, _)| -((r - l) as i64));
let eql_clusters = pos_pairs.into_iter().chunk_by(|(al, ar, _)| ar - al); let eql_clusters = pos_pairs.into_iter().chunk_by(|(al, ar, _)| ar - al);
for (_gap_size, cluster) in eql_clusters.into_iter() { for (_gap_size, cluster) in eql_clusters.into_iter() {
let best_candidate = cluster let best_candidate = cluster
.into_iter() .into_iter()
.filter_map(|(lpos, rpos, state)| { .filter_map(|(lpos, rpos, state)| {
Some( Some(
state state
.combine(vec_match(left, &seq[..lpos], save_loc)?) .combine(vec_match(left, &seq[..lpos], save_loc)?)
.combine(vec_match(mid, &seq[lpos + left_sep.len()..rpos], save_loc)?) .combine(vec_match(mid, &seq[lpos + left_sep.len()..rpos], save_loc)?)
.combine(vec_match(right, &seq[rpos + right_sep.len()..], save_loc)?), .combine(vec_match(right, &seq[rpos + right_sep.len()..], save_loc)?),
) )
}) })
.max_by(|a, b| { .max_by(|a, b| {
for key in key_order { for key in key_order {
let alen = a.ph_len(key).expect("key_order references scalar or missing"); let alen = a.ph_len(key).expect("key_order references scalar or missing");
let blen = b.ph_len(key).expect("key_order references scalar or missing"); let blen = b.ph_len(key).expect("key_order references scalar or missing");
match alen.cmp(&blen) { match alen.cmp(&blen) {
Ordering::Equal => (), Ordering::Equal => (),
any => return any, any => return any,
} }
} }
Ordering::Equal Ordering::Equal
}); });
if let Some(state) = best_candidate { if let Some(state) = best_candidate {
return Some(state); return Some(state);
} }
} }
None None
}, },
} }
} }

View File

@@ -1,7 +1,7 @@
use std::io::{self, BufRead as _, Write}; use std::io::{self, BufRead as _, Write};
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::mpsc::{sync_channel, SyncSender};
use std::sync::Mutex; use std::sync::Mutex;
use std::sync::mpsc::{SyncSender, sync_channel};
use std::{process, thread}; use std::{process, thread};
use orchid_api_traits::{Decode, Encode}; use orchid_api_traits::{Decode, Encode};
@@ -12,68 +12,61 @@ use crate::api;
use crate::extension::{ExtensionPort, OnMessage}; use crate::extension::{ExtensionPort, OnMessage};
pub struct Subprocess { pub struct Subprocess {
child: Mutex<process::Child>, child: Mutex<process::Child>,
stdin: Mutex<process::ChildStdin>, stdin: Mutex<process::ChildStdin>,
set_onmessage: SyncSender<OnMessage>, set_onmessage: SyncSender<OnMessage>,
header: api::ExtensionHeader, header: api::ExtensionHeader,
} }
impl Subprocess { impl Subprocess {
pub fn new(mut cmd: process::Command, logger: Logger) -> io::Result<Self> { pub fn new(mut cmd: process::Command, logger: Logger) -> io::Result<Self> {
let prog_pbuf = PathBuf::from(cmd.get_program()); let prog_pbuf = PathBuf::from(cmd.get_program());
let prog = prog_pbuf.file_stem().unwrap_or(cmd.get_program()).to_string_lossy().to_string(); let prog = prog_pbuf.file_stem().unwrap_or(cmd.get_program()).to_string_lossy().to_string();
let mut child = cmd let mut child = cmd
.stdin(process::Stdio::piped()) .stdin(process::Stdio::piped())
.stdout(process::Stdio::piped()) .stdout(process::Stdio::piped())
.stderr(process::Stdio::piped()) .stderr(process::Stdio::piped())
.spawn()?; .spawn()?;
let mut stdin = child.stdin.take().unwrap(); let mut stdin = child.stdin.take().unwrap();
api::HostHeader { log_strategy: logger.strat() }.encode(&mut stdin); api::HostHeader { log_strategy: logger.strat() }.encode(&mut stdin);
stdin.flush()?; stdin.flush()?;
let mut stdout = child.stdout.take().unwrap(); let mut stdout = child.stdout.take().unwrap();
let header = api::ExtensionHeader::decode(&mut stdout); let header = api::ExtensionHeader::decode(&mut stdout);
let child_stderr = child.stderr.take().unwrap(); let child_stderr = child.stderr.take().unwrap();
let (set_onmessage, recv_onmessage) = sync_channel(0); let (set_onmessage, recv_onmessage) = sync_channel(0);
thread::Builder::new().name(format!("stdout-fwd:{prog}")).spawn(move || { thread::Builder::new().name(format!("stdout-fwd:{prog}")).spawn(move || {
let mut onmessage: Box<dyn FnMut(&[u8]) + Send> = recv_onmessage.recv().unwrap(); let mut onmessage: Box<dyn FnMut(&[u8]) + Send> = recv_onmessage.recv().unwrap();
drop(recv_onmessage); drop(recv_onmessage);
loop { loop {
match recv_msg(&mut stdout) { match recv_msg(&mut stdout) {
Ok(msg) => onmessage(&msg[..]), Ok(msg) => onmessage(&msg[..]),
Err(e) if e.kind() == io::ErrorKind::BrokenPipe => break, Err(e) if e.kind() == io::ErrorKind::BrokenPipe => break,
Err(e) => panic!("Failed to read from stdout: {}, {e}", e.kind()), Err(e) => panic!("Failed to read from stdout: {}, {e}", e.kind()),
} }
} }
})?; })?;
thread::Builder::new().name(format!("stderr-fwd:{prog}")).spawn(move || { thread::Builder::new().name(format!("stderr-fwd:{prog}")).spawn(move || {
let mut reader = io::BufReader::new(child_stderr); let mut reader = io::BufReader::new(child_stderr);
loop { loop {
let mut buf = String::new(); let mut buf = String::new();
if 0 == reader.read_line(&mut buf).unwrap() { if 0 == reader.read_line(&mut buf).unwrap() {
break; break;
} }
logger.log(buf); logger.log(buf);
} }
})?; })?;
Ok(Self { Ok(Self { child: Mutex::new(child), stdin: Mutex::new(stdin), set_onmessage, header })
child: Mutex::new(child), }
stdin: Mutex::new(stdin),
set_onmessage,
header,
})
}
} }
impl Drop for Subprocess { impl Drop for Subprocess {
fn drop(&mut self) { self.child.lock().unwrap().wait().expect("Extension exited with error"); } fn drop(&mut self) { self.child.lock().unwrap().wait().expect("Extension exited with error"); }
} }
impl ExtensionPort for Subprocess { impl ExtensionPort for Subprocess {
fn set_onmessage(&self, callback: OnMessage) { fn set_onmessage(&self, callback: OnMessage) { self.set_onmessage.send(callback).unwrap(); }
self.set_onmessage.send(callback).unwrap(); fn header(&self) -> &orchid_api::ExtensionHeader { &self.header }
} fn send(&self, msg: &[u8]) {
fn header(&self) -> &orchid_api::ExtensionHeader { &self.header } if msg.starts_with(&[0, 0, 0, 0x1c]) {
fn send(&self, msg: &[u8]) { panic!("Received unnecessary prefix");
if msg.starts_with(&[0, 0, 0, 0x1c]) { }
panic!("Received unnecessary prefix"); send_msg(&mut *self.stdin.lock().unwrap(), msg).unwrap()
} }
send_msg(&mut *self.stdin.lock().unwrap(), msg).unwrap()
}
} }

View File

@@ -4,14 +4,14 @@ use std::sync::{Mutex, OnceLock};
use itertools::Itertools; use itertools::Itertools;
use never::Never; use never::Never;
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use orchid_base::interner::{intern, Tok}; use orchid_base::interner::{Tok, intern};
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::macros::mtreev_from_api; use orchid_base::macros::mtreev_from_api;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::parse::{Comment, Import}; use orchid_base::parse::{Comment, Import};
use orchid_base::tree::{TokTree, Token}; use orchid_base::tree::{TokTree, Token};
use ordered_float::NotNan; use ordered_float::NotNan;
use substack::{with_iter_stack, Substack}; use substack::{Substack, with_iter_stack};
use crate::api; use crate::api;
use crate::expr::Expr; use crate::expr::Expr;
@@ -23,172 +23,168 @@ pub type ParsTok = Token<'static, AtomHand, Never>;
#[derive(Debug)] #[derive(Debug)]
pub struct Item { pub struct Item {
pub pos: Pos, pub pos: Pos,
pub comments: Vec<Comment>, pub comments: Vec<Comment>,
pub kind: ItemKind, pub kind: ItemKind,
} }
#[derive(Debug)] #[derive(Debug)]
pub enum ItemKind { pub enum ItemKind {
Member(Member), Member(Member),
Export(Tok<String>), Export(Tok<String>),
Import(Import), Import(Import),
Macro(Option<NotNan<f64>>, Vec<Rule>) Macro(Option<NotNan<f64>>, Vec<Rule>),
} }
impl Item { impl Item {
pub fn from_api( pub fn from_api(tree: api::Item, path: Substack<Tok<String>>, sys: &System) -> Self {
tree: api::Item, let kind = match tree.kind {
path: Substack<Tok<String>>, api::ItemKind::Member(m) => ItemKind::Member(Member::from_api(m, path, sys)),
sys: &System api::ItemKind::Import(i) =>
) -> Self { ItemKind::Import(Import { path: Sym::from_api(i).iter().collect(), name: None }),
let kind = match tree.kind { api::ItemKind::Export(e) => ItemKind::Export(Tok::from_api(e)),
api::ItemKind::Member(m) => ItemKind::Member(Member::from_api(m, path, sys)), api::ItemKind::Macro(api::MacroBlock { priority, rules }) => ItemKind::Macro(priority, {
api::ItemKind::Import(i) => Vec::from_iter(rules.into_iter().map(|api| Rule {
ItemKind::Import(Import{ path: Sym::from_api(i).iter().collect(), name: None }), pos: Pos::from_api(&api.location),
api::ItemKind::Export(e) => ItemKind::Export(Tok::from_api(e)), pattern: mtreev_from_api(&api.pattern, &mut |a| {
api::ItemKind::Macro(api::MacroBlock { priority, rules }) => ItemKind::Macro(priority, { MacTok::Atom(AtomHand::from_api(a.clone()))
Vec::from_iter(rules.into_iter().map(|api| Rule { }),
pos: Pos::from_api(&api.location), kind: RuleKind::Remote(sys.clone(), api.id),
pattern: mtreev_from_api(&api.pattern, &mut |a| MacTok::Atom(AtomHand::from_api(a.clone()))), comments: api.comments.iter().map(Comment::from_api).collect_vec(),
kind: RuleKind::Remote(sys.clone(), api.id), }))
comments: api.comments.iter().map(Comment::from_api).collect_vec() }),
})) };
}) let comments = tree.comments.iter().map(Comment::from_api).collect_vec();
}; Self { pos: Pos::from_api(&tree.location), comments, kind }
let comments = tree.comments.iter().map(Comment::from_api).collect_vec(); }
Self { pos: Pos::from_api(&tree.location), comments, kind }
}
} }
#[derive(Debug)] #[derive(Debug)]
pub struct Member { pub struct Member {
pub name: Tok<String>, pub name: Tok<String>,
pub kind: OnceLock<MemberKind>, pub kind: OnceLock<MemberKind>,
pub lazy: Mutex<Option<LazyMemberHandle>>, pub lazy: Mutex<Option<LazyMemberHandle>>,
} }
impl Member { impl Member {
pub fn from_api( pub fn from_api(api: api::Member, path: Substack<Tok<String>>, sys: &System) -> Self {
api: api::Member, let name = Tok::from_api(api.name);
path: Substack<Tok<String>>, let full_path = path.push(name.clone());
sys: &System, let kind = match api.kind {
) -> Self { api::MemberKind::Lazy(id) => {
let name = Tok::from_api(api.name); return LazyMemberHandle(id, sys.clone(), intern(&full_path.unreverse())).into_member(name);
let full_path = path.push(name.clone()); },
let kind = match api.kind { api::MemberKind::Const(c) => MemberKind::Const(Code::from_expr(
api::MemberKind::Lazy(id) => CodeLocator::to_const(full_path.unreverse()),
return LazyMemberHandle(id, sys.clone(), intern(&full_path.unreverse())).into_member(name), Expr::from_api(&c, &mut ()),
api::MemberKind::Const(c) => MemberKind::Const(Code::from_expr( )),
CodeLocator::to_const(full_path.unreverse()), api::MemberKind::Module(m) => MemberKind::Mod(Module::from_api(m, full_path, sys)),
Expr::from_api(&c, &mut ()) };
)), Member { name, kind: OnceLock::from(kind), lazy: Mutex::default() }
api::MemberKind::Module(m) => MemberKind::Mod(Module::from_api(m, full_path, sys)), }
}; pub fn new(name: Tok<String>, kind: MemberKind) -> Self {
Member { name, kind: OnceLock::from(kind), lazy: Mutex::default() } Member { name, kind: OnceLock::from(kind), lazy: Mutex::default() }
} }
pub fn new(name: Tok<String>, kind: MemberKind) -> Self {
Member { name, kind: OnceLock::from(kind), lazy: Mutex::default() }
}
} }
#[derive(Debug)] #[derive(Debug)]
pub enum MemberKind { pub enum MemberKind {
Const(Code), Const(Code),
Mod(Module), Mod(Module),
} }
#[derive(Debug)] #[derive(Debug)]
pub struct Module { pub struct Module {
pub imports: Vec<Sym>, pub imports: Vec<Sym>,
pub exports: Vec<Tok<String>>, pub exports: Vec<Tok<String>>,
pub items: Vec<Item>, pub items: Vec<Item>,
} }
impl Module { impl Module {
pub fn new(items: impl IntoIterator<Item = Item>) -> Self { pub fn new(items: impl IntoIterator<Item = Item>) -> Self {
let items = items.into_iter().collect_vec(); let items = items.into_iter().collect_vec();
let exports = (items.iter()) let exports = (items.iter())
.filter_map(|i| match &i.kind { .filter_map(|i| match &i.kind {
ItemKind::Export(e) => Some(e.clone()), ItemKind::Export(e) => Some(e.clone()),
_ => None, _ => None,
}) })
.collect_vec(); .collect_vec();
Self { imports: vec![], exports, items } Self { imports: vec![], exports, items }
} }
pub fn from_api(m: api::Module, path: Substack<Tok<String>>, sys: &System) -> Self { pub fn from_api(m: api::Module, path: Substack<Tok<String>>, sys: &System) -> Self {
let mut output = Vec::new(); let mut output = Vec::new();
for item in m.items.into_iter() { for item in m.items.into_iter() {
let next = Item::from_api(item, path.clone(), sys); let next = Item::from_api(item, path.clone(), sys);
output.push(next); output.push(next);
} }
Self::new(output) Self::new(output)
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub struct LazyMemberHandle(api::TreeId, System, Tok<Vec<Tok<String>>>); pub struct LazyMemberHandle(api::TreeId, System, Tok<Vec<Tok<String>>>);
impl LazyMemberHandle { impl LazyMemberHandle {
pub fn run(self) -> OrcRes<MemberKind> { pub fn run(self) -> OrcRes<MemberKind> {
match self.1.get_tree(self.0) { match self.1.get_tree(self.0) {
api::MemberKind::Const(c) => Ok(MemberKind::Const(Code { api::MemberKind::Const(c) => Ok(MemberKind::Const(Code {
bytecode: Expr::from_api(&c, &mut ()).into(), bytecode: Expr::from_api(&c, &mut ()).into(),
locator: CodeLocator { steps: self.2, rule_loc: None }, locator: CodeLocator { steps: self.2, rule_loc: None },
source: None, source: None,
})), })),
api::MemberKind::Module(m) => with_iter_stack(self.2.iter().cloned(), |path| { api::MemberKind::Module(m) => with_iter_stack(self.2.iter().cloned(), |path| {
Ok(MemberKind::Mod(Module::from_api(m, path, &self.1))) Ok(MemberKind::Mod(Module::from_api(m, path, &self.1)))
}), }),
api::MemberKind::Lazy(id) => Self(id, self.1, self.2).run(), api::MemberKind::Lazy(id) => Self(id, self.1, self.2).run(),
} }
} }
pub fn into_member(self, name: Tok<String>) -> Member { pub fn into_member(self, name: Tok<String>) -> Member {
Member { name, kind: OnceLock::new(), lazy: Mutex::new(Some(self)) } Member { name, kind: OnceLock::new(), lazy: Mutex::new(Some(self)) }
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub struct Rule { pub struct Rule {
pub pos: Pos, pub pos: Pos,
pub comments: Vec<Comment>, pub comments: Vec<Comment>,
pub pattern: Vec<MacTree>, pub pattern: Vec<MacTree>,
pub kind: RuleKind, pub kind: RuleKind,
} }
#[derive(Debug)] #[derive(Debug)]
pub enum RuleKind { pub enum RuleKind {
Remote(System, api::MacroId), Remote(System, api::MacroId),
Native(Code), Native(Code),
} }
#[derive(Debug)] #[derive(Debug)]
pub struct Code { pub struct Code {
locator: CodeLocator, locator: CodeLocator,
source: Option<Vec<ParsTokTree>>, source: Option<Vec<ParsTokTree>>,
bytecode: OnceLock<Expr>, bytecode: OnceLock<Expr>,
} }
impl Code { impl Code {
pub fn from_expr(locator: CodeLocator, expr: Expr) -> Self { pub fn from_expr(locator: CodeLocator, expr: Expr) -> Self {
Self { locator, source: None, bytecode: expr.into() } Self { locator, source: None, bytecode: expr.into() }
} }
pub fn from_code(locator: CodeLocator, code: Vec<ParsTokTree>) -> Self { pub fn from_code(locator: CodeLocator, code: Vec<ParsTokTree>) -> Self {
Self { locator, source: Some(code), bytecode: OnceLock::new() } Self { locator, source: Some(code), bytecode: OnceLock::new() }
} }
} }
/// Selects a code element /// Selects a code element
/// ///
/// Either the steps point to a constant and rule_loc is None, or the steps point to a module and /// Either the steps point to a constant and rule_loc is None, or the steps
/// rule_loc selects a macro rule within that module /// point to a module and rule_loc selects a macro rule within that module
#[derive(Clone, Debug, Hash, PartialEq, Eq)] #[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct CodeLocator { pub struct CodeLocator {
steps: Tok<Vec<Tok<String>>>, steps: Tok<Vec<Tok<String>>>,
/// Index of a macro block in the module demarked by the steps, and a rule in that macro /// Index of a macro block in the module demarked by the steps, and a rule in
rule_loc: Option<(u16, u16)>, /// that macro
rule_loc: Option<(u16, u16)>,
} }
impl CodeLocator { impl CodeLocator {
pub fn to_const(path: impl IntoIterator<Item = Tok<String>>) -> Self { pub fn to_const(path: impl IntoIterator<Item = Tok<String>>) -> Self {
Self { steps: intern(&path.into_iter().collect_vec()), rule_loc: None } Self { steps: intern(&path.into_iter().collect_vec()), rule_loc: None }
} }
pub fn to_rule(path: impl IntoIterator<Item = Tok<String>>, macro_i: u16, rule_i: u16) -> Self { pub fn to_rule(path: impl IntoIterator<Item = Tok<String>>, macro_i: u16, rule_i: u16) -> Self {
Self { steps: intern(&path.into_iter().collect_vec()), rule_loc: Some((macro_i, rule_i)) } Self { steps: intern(&path.into_iter().collect_vec()), rule_loc: Some((macro_i, rule_i)) }
} }
} }

View File

@@ -4,12 +4,12 @@ version = "0.1.0"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
itertools = "0.13.0" itertools = "0.14.0"
never = "0.1.0" never = "0.1.0"
once_cell = "1.19.0" once_cell = "1.20.2"
orchid-api = { version = "0.1.0", path = "../orchid-api" } orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" } orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
orchid-base = { version = "0.1.0", path = "../orchid-base" } orchid-base = { version = "0.1.0", path = "../orchid-base" }
orchid-extension = { version = "0.1.0", path = "../orchid-extension" } orchid-extension = { version = "0.1.0", path = "../orchid-extension" }
ordered-float = "4.2.1" ordered-float = "4.6.0"

View File

@@ -1,6 +1,6 @@
use orchid_api_derive::Coding; use orchid_api_derive::Coding;
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use orchid_extension::atom::{AtomFactory, MethodSet, Atomic, AtomicFeatures, ToAtom, TypAtom}; use orchid_extension::atom::{AtomFactory, Atomic, AtomicFeatures, MethodSet, ToAtom, TypAtom};
use orchid_extension::atom_thin::{ThinAtom, ThinVariant}; use orchid_extension::atom_thin::{ThinAtom, ThinVariant};
use orchid_extension::conv::TryFromExpr; use orchid_extension::conv::TryFromExpr;
use orchid_extension::expr::Expr; use orchid_extension::expr::Expr;
@@ -9,49 +9,47 @@ use ordered_float::NotNan;
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct Int(pub i64); pub struct Int(pub i64);
impl Atomic for Int { impl Atomic for Int {
type Variant = ThinVariant; type Variant = ThinVariant;
type Data = Self; type Data = Self;
fn reg_reqs() -> MethodSet<Self> { fn reg_reqs() -> MethodSet<Self> { MethodSet::new() }
MethodSet::new()
}
} }
impl ThinAtom for Int {} impl ThinAtom for Int {}
impl TryFromExpr for Int { impl TryFromExpr for Int {
fn try_from_expr(expr: Expr) -> OrcRes<Self> { fn try_from_expr(expr: Expr) -> OrcRes<Self> {
TypAtom::<Int>::try_from_expr(expr).map(|t| t.value) TypAtom::<Int>::try_from_expr(expr).map(|t| t.value)
} }
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct Float(pub NotNan<f64>); pub struct Float(pub NotNan<f64>);
impl Atomic for Float { impl Atomic for Float {
type Variant = ThinVariant; type Variant = ThinVariant;
type Data = Self; type Data = Self;
fn reg_reqs() -> MethodSet<Self> { MethodSet::new() } fn reg_reqs() -> MethodSet<Self> { MethodSet::new() }
} }
impl ThinAtom for Float {} impl ThinAtom for Float {}
impl TryFromExpr for Float { impl TryFromExpr for Float {
fn try_from_expr(expr: Expr) -> OrcRes<Self> { fn try_from_expr(expr: Expr) -> OrcRes<Self> {
TypAtom::<Float>::try_from_expr(expr).map(|t| t.value) TypAtom::<Float>::try_from_expr(expr).map(|t| t.value)
} }
} }
pub enum Numeric { pub enum Numeric {
Int(i64), Int(i64),
Float(NotNan<f64>), Float(NotNan<f64>),
} }
impl TryFromExpr for Numeric { impl TryFromExpr for Numeric {
fn try_from_expr(expr: Expr) -> OrcRes<Self> { fn try_from_expr(expr: Expr) -> OrcRes<Self> {
Int::try_from_expr(expr.clone()) Int::try_from_expr(expr.clone())
.map(|t| Numeric::Int(t.0)) .map(|t| Numeric::Int(t.0))
.or_else(|e| Float::try_from_expr(expr).map(|t| Numeric::Float(t.0)).map_err(|e2| e + e2)) .or_else(|e| Float::try_from_expr(expr).map(|t| Numeric::Float(t.0)).map_err(|e2| e + e2))
} }
} }
impl ToAtom for Numeric { impl ToAtom for Numeric {
fn to_atom_factory(self) -> AtomFactory { fn to_atom_factory(self) -> AtomFactory {
match self { match self {
Self::Float(f) => Float(f).factory(), Self::Float(f) => Float(f).factory(),
Self::Int(i) => Int(i).factory(), Self::Int(i) => Int(i).factory(),
} }
} }
} }

View File

@@ -1,7 +1,7 @@
use std::ops::RangeInclusive; use std::ops::RangeInclusive;
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use orchid_base::number::{num_to_err, parse_num, Numeric}; use orchid_base::number::{Numeric, num_to_err, parse_num};
use orchid_extension::atom::AtomicFeatures; use orchid_extension::atom::AtomicFeatures;
use orchid_extension::lexer::{LexContext, Lexer}; use orchid_extension::lexer::{LexContext, Lexer};
use orchid_extension::tree::{GenTok, GenTokTree}; use orchid_extension::tree::{GenTok, GenTokTree};
@@ -12,16 +12,16 @@ use super::num_atom::{Float, Int};
#[derive(Default)] #[derive(Default)]
pub struct NumLexer; pub struct NumLexer;
impl Lexer for NumLexer { impl Lexer for NumLexer {
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['0'..='9']; const CHAR_FILTER: &'static [RangeInclusive<char>] = &['0'..='9'];
fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> { fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> {
let ends_at = all.find(|c: char| !c.is_ascii_hexdigit() && !"xX._pP".contains(c)); let ends_at = all.find(|c: char| !c.is_ascii_hexdigit() && !"xX._pP".contains(c));
let (chars, tail) = all.split_at(ends_at.unwrap_or(all.len())); let (chars, tail) = all.split_at(ends_at.unwrap_or(all.len()));
let fac = match parse_num(chars) { let fac = match parse_num(chars) {
Ok(Numeric::Float(f)) => Float(f).factory(), Ok(Numeric::Float(f)) => Float(f).factory(),
Ok(Numeric::Uint(uint)) => Int(uint.try_into().unwrap()).factory(), Ok(Numeric::Uint(uint)) => Int(uint.try_into().unwrap()).factory(),
Ok(Numeric::Decimal(dec)) => Float(NotNan::new(dec.try_into().unwrap()).unwrap()).factory(), Ok(Numeric::Decimal(dec)) => Float(NotNan::new(dec.try_into().unwrap()).unwrap()).factory(),
Err(e) => return Err(num_to_err(e, ctx.pos(all)).into()), Err(e) => return Err(num_to_err(e, ctx.pos(all)).into()),
}; };
Ok((tail, GenTok::X(fac).at(ctx.pos(all)..ctx.pos(tail)))) Ok((tail, GenTok::X(fac).at(ctx.pos(all)..ctx.pos(tail))))
} }
} }

View File

@@ -7,40 +7,40 @@ use orchid_extension::entrypoint::ExtReq;
use orchid_extension::fs::DeclFs; use orchid_extension::fs::DeclFs;
use orchid_extension::system::{System, SystemCard}; use orchid_extension::system::{System, SystemCard};
use orchid_extension::system_ctor::SystemCtor; use orchid_extension::system_ctor::SystemCtor;
use orchid_extension::tree::{comments, fun, module, root_mod, MemKind}; use orchid_extension::tree::{MemKind, comments, fun, module, root_mod};
use crate::OrcString;
use crate::number::num_atom::{Float, Int}; use crate::number::num_atom::{Float, Int};
use crate::string::str_atom::{IntStrAtom, StrAtom}; use crate::string::str_atom::{IntStrAtom, StrAtom};
use crate::string::str_lexer::StringLexer; use crate::string::str_lexer::StringLexer;
use crate::OrcString;
#[derive(Default)] #[derive(Default)]
pub struct StdSystem; pub struct StdSystem;
impl SystemCtor for StdSystem { impl SystemCtor for StdSystem {
type Deps = (); type Deps = ();
type Instance = Self; type Instance = Self;
const NAME: &'static str = "orchid::std"; const NAME: &'static str = "orchid::std";
const VERSION: f64 = 0.00_01; const VERSION: f64 = 0.00_01;
fn inst() -> Option<Self::Instance> { Some(StdSystem) } fn inst() -> Option<Self::Instance> { Some(StdSystem) }
} }
impl SystemCard for StdSystem { impl SystemCard for StdSystem {
type Ctor = Self; type Ctor = Self;
type Req = Never; type Req = Never;
fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>> { fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>> {
[Some(Int::dynfo()), Some(Float::dynfo()), Some(StrAtom::dynfo()), Some(IntStrAtom::dynfo())] [Some(Int::dynfo()), Some(Float::dynfo()), Some(StrAtom::dynfo()), Some(IntStrAtom::dynfo())]
} }
} }
impl System for StdSystem { impl System for StdSystem {
fn request(_: ExtReq, req: Self::Req) -> orchid_base::reqnot::Receipt { match req {} } fn request(_: ExtReq, req: Self::Req) -> orchid_base::reqnot::Receipt { match req {} }
fn lexers() -> Vec<orchid_extension::lexer::LexerObj> { vec![&StringLexer] } fn lexers() -> Vec<orchid_extension::lexer::LexerObj> { vec![&StringLexer] }
fn parsers() -> Vec<orchid_extension::parser::ParserObj> { vec![] } fn parsers() -> Vec<orchid_extension::parser::ParserObj> { vec![] }
fn vfs() -> DeclFs { DeclFs::Mod(&[]) } fn vfs() -> DeclFs { DeclFs::Mod(&[]) }
fn env() -> Vec<(Tok<String>, MemKind)> { fn env() -> Vec<(Tok<String>, MemKind)> {
vec![root_mod("std", [], [module(true, "string", [], [comments( vec![root_mod("std", [], [module(true, "string", [], [comments(
["Concatenate two strings"], ["Concatenate two strings"],
fun(true, "concat", |left: OrcString, right: OrcString| { fun(true, "concat", |left: OrcString, right: OrcString| {
StrAtom::new(Arc::new(left.get_string().to_string() + &right.get_string())) StrAtom::new(Arc::new(left.get_string().to_string() + &right.get_string()))
}), }),
)])])] )])])]
} }
} }

View File

@@ -5,9 +5,9 @@ use std::sync::Arc;
use orchid_api_derive::Coding; use orchid_api_derive::Coding;
use orchid_api_traits::{Encode, Request}; use orchid_api_traits::{Encode, Request};
use orchid_base::error::{mk_errv, OrcRes}; use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::intern; use orchid_base::intern;
use orchid_base::interner::{intern, Tok}; use orchid_base::interner::{Tok, intern};
use orchid_extension::atom::{AtomMethod, Atomic, MethodSet, Supports, TypAtom}; use orchid_extension::atom::{AtomMethod, Atomic, MethodSet, Supports, TypAtom};
use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant}; use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
use orchid_extension::conv::TryFromExpr; use orchid_extension::conv::TryFromExpr;
@@ -17,83 +17,83 @@ use orchid_extension::system::SysCtx;
#[derive(Copy, Clone, Coding)] #[derive(Copy, Clone, Coding)]
pub struct StringGetVal; pub struct StringGetVal;
impl Request for StringGetVal { impl Request for StringGetVal {
type Response = Arc<String>; type Response = Arc<String>;
} }
impl AtomMethod for StringGetVal { impl AtomMethod for StringGetVal {
const NAME: &str = "std::string_get_val"; const NAME: &str = "std::string_get_val";
} }
impl Supports<StringGetVal> for StrAtom { impl Supports<StringGetVal> for StrAtom {
fn handle(&self, _: SysCtx, _: StringGetVal) -> <StringGetVal as Request>::Response { fn handle(&self, _: SysCtx, _: StringGetVal) -> <StringGetVal as Request>::Response {
self.0.clone() self.0.clone()
} }
} }
#[derive(Clone)] #[derive(Clone)]
pub struct StrAtom(Arc<String>); pub struct StrAtom(Arc<String>);
impl Atomic for StrAtom { impl Atomic for StrAtom {
type Variant = OwnedVariant; type Variant = OwnedVariant;
type Data = (); type Data = ();
fn reg_reqs() -> MethodSet<Self> { MethodSet::new().handle::<StringGetVal>() } fn reg_reqs() -> MethodSet<Self> { MethodSet::new().handle::<StringGetVal>() }
} }
impl StrAtom { impl StrAtom {
pub fn new(str: Arc<String>) -> Self { Self(str) } pub fn new(str: Arc<String>) -> Self { Self(str) }
pub fn value(&self) -> Arc<String> { self.0.clone() } pub fn value(&self) -> Arc<String> { self.0.clone() }
} }
impl Deref for StrAtom { impl Deref for StrAtom {
type Target = str; type Target = str;
fn deref(&self) -> &Self::Target { &self.0 } fn deref(&self) -> &Self::Target { &self.0 }
} }
impl OwnedAtom for StrAtom { impl OwnedAtom for StrAtom {
type Refs = (); type Refs = ();
fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) } fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
fn serialize(&self, _: SysCtx, sink: &mut (impl io::Write + ?Sized)) -> Self::Refs { fn serialize(&self, _: SysCtx, sink: &mut (impl io::Write + ?Sized)) -> Self::Refs {
self.deref().encode(sink) self.deref().encode(sink)
} }
fn deserialize(mut ctx: impl DeserializeCtx, _: Self::Refs) -> Self { fn deserialize(mut ctx: impl DeserializeCtx, _: Self::Refs) -> Self {
Self::new(Arc::new(ctx.read::<String>())) Self::new(Arc::new(ctx.read::<String>()))
} }
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct IntStrAtom(Tok<String>); pub struct IntStrAtom(Tok<String>);
impl Atomic for IntStrAtom { impl Atomic for IntStrAtom {
type Variant = OwnedVariant; type Variant = OwnedVariant;
type Data = orchid_api::TStr; type Data = orchid_api::TStr;
fn reg_reqs() -> MethodSet<Self> { MethodSet::new() } fn reg_reqs() -> MethodSet<Self> { MethodSet::new() }
} }
impl From<Tok<String>> for IntStrAtom { impl From<Tok<String>> for IntStrAtom {
fn from(value: Tok<String>) -> Self { Self(value) } fn from(value: Tok<String>) -> Self { Self(value) }
} }
impl OwnedAtom for IntStrAtom { impl OwnedAtom for IntStrAtom {
type Refs = (); type Refs = ();
fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(self.0.to_api()) } fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(self.0.to_api()) }
fn print(&self, _ctx: SysCtx) -> String { format!("{:?}i", *self.0) } fn print(&self, _ctx: SysCtx) -> String { format!("{:?}i", *self.0) }
fn serialize(&self, _: SysCtx, write: &mut (impl io::Write + ?Sized)) { self.0.encode(write) } fn serialize(&self, _: SysCtx, write: &mut (impl io::Write + ?Sized)) { self.0.encode(write) }
fn deserialize(ctx: impl DeserializeCtx, _: ()) -> Self { Self(intern(&ctx.decode::<String>())) } fn deserialize(ctx: impl DeserializeCtx, _: ()) -> Self { Self(intern(&ctx.decode::<String>())) }
} }
#[derive(Clone)] #[derive(Clone)]
pub enum OrcString<'a> { pub enum OrcString<'a> {
Val(TypAtom<'a, StrAtom>), Val(TypAtom<'a, StrAtom>),
Int(TypAtom<'a, IntStrAtom>), Int(TypAtom<'a, IntStrAtom>),
} }
impl OrcString<'_> { impl OrcString<'_> {
pub fn get_string(&self) -> Arc<String> { pub fn get_string(&self) -> Arc<String> {
match &self { match &self {
Self::Int(tok) => Tok::from_api(tok.value).arc(), Self::Int(tok) => Tok::from_api(tok.value).arc(),
Self::Val(atom) => atom.request(StringGetVal), Self::Val(atom) => atom.request(StringGetVal),
} }
} }
} }
impl TryFromExpr for OrcString<'static> { impl TryFromExpr for OrcString<'static> {
fn try_from_expr(expr: Expr) -> OrcRes<OrcString<'static>> { fn try_from_expr(expr: Expr) -> OrcRes<OrcString<'static>> {
if let Ok(v) = TypAtom::<StrAtom>::try_from_expr(expr.clone()) { if let Ok(v) = TypAtom::<StrAtom>::try_from_expr(expr.clone()) {
return Ok(OrcString::Val(v)); return Ok(OrcString::Val(v));
} }
match TypAtom::<IntStrAtom>::try_from_expr(expr) { match TypAtom::<IntStrAtom>::try_from_expr(expr) {
Ok(t) => Ok(OrcString::Int(t)), Ok(t) => Ok(OrcString::Int(t)),
Err(e) => Err(mk_errv(intern!(str: "A string was expected"), "", e.pos_iter())), Err(e) => Err(mk_errv(intern!(str: "A string was expected"), "", e.pos_iter())),
} }
} }
} }

View File

@@ -1,11 +1,11 @@
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::{mk_err, mk_errv, OrcErr, OrcRes}; use orchid_base::error::{OrcErr, OrcRes, mk_err, mk_errv};
use orchid_base::interner::intern; use orchid_base::interner::intern;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::tree::{vname_tv, wrap_tokv}; use orchid_base::tree::{vname_tv, wrap_tokv};
use orchid_base::{intern, vname}; use orchid_base::{intern, vname};
use orchid_extension::atom::AtomicFeatures; use orchid_extension::atom::AtomicFeatures;
use orchid_extension::lexer::{err_not_applicable, LexContext, Lexer}; use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
use orchid_extension::tree::{GenTok, GenTokTree}; use orchid_extension::tree::{GenTok, GenTokTree};
use super::str_atom::IntStrAtom; use super::str_atom::IntStrAtom;
@@ -13,126 +13,126 @@ use super::str_atom::IntStrAtom;
/// Reasons why [parse_string] might fail. See [StringError] /// Reasons why [parse_string] might fail. See [StringError]
#[derive(Clone)] #[derive(Clone)]
enum StringErrorKind { enum StringErrorKind {
/// A unicode escape sequence wasn't followed by 4 hex digits /// A unicode escape sequence wasn't followed by 4 hex digits
NotHex, NotHex,
/// A unicode escape sequence contained an unassigned code point /// A unicode escape sequence contained an unassigned code point
BadCodePoint, BadCodePoint,
/// An unrecognized escape sequence was found /// An unrecognized escape sequence was found
BadEscSeq, BadEscSeq,
} }
/// Error produced by [parse_string] /// Error produced by [parse_string]
#[derive(Clone)] #[derive(Clone)]
struct StringError { struct StringError {
/// Character where the error occured /// Character where the error occured
pos: u32, pos: u32,
/// Reason for the error /// Reason for the error
kind: StringErrorKind, kind: StringErrorKind,
} }
impl StringError { impl StringError {
/// Convert into project error for reporting /// Convert into project error for reporting
pub fn into_proj(self, pos: u32) -> OrcErr { pub fn into_proj(self, pos: u32) -> OrcErr {
let start = pos + self.pos; let start = pos + self.pos;
mk_err( mk_err(
intern!(str: "Failed to parse string"), intern!(str: "Failed to parse string"),
match self.kind { match self.kind {
StringErrorKind::NotHex => "Expected a hex digit", StringErrorKind::NotHex => "Expected a hex digit",
StringErrorKind::BadCodePoint => "The specified number is not a Unicode code point", StringErrorKind::BadCodePoint => "The specified number is not a Unicode code point",
StringErrorKind::BadEscSeq => "Unrecognized escape sequence", StringErrorKind::BadEscSeq => "Unrecognized escape sequence",
}, },
[Pos::Range(start..start + 1).into()], [Pos::Range(start..start + 1).into()],
) )
} }
} }
/// Process escape sequences in a string literal /// Process escape sequences in a string literal
fn parse_string(str: &str) -> Result<String, StringError> { fn parse_string(str: &str) -> Result<String, StringError> {
let mut target = String::new(); let mut target = String::new();
let mut iter = str.char_indices().map(|(i, c)| (i as u32, c)); let mut iter = str.char_indices().map(|(i, c)| (i as u32, c));
while let Some((_, c)) = iter.next() { while let Some((_, c)) = iter.next() {
if c != '\\' { if c != '\\' {
target.push(c); target.push(c);
continue; continue;
} }
let (mut pos, code) = iter.next().expect("lexer would have continued"); let (mut pos, code) = iter.next().expect("lexer would have continued");
let next = match code { let next = match code {
c @ ('\\' | '"' | '$') => c, c @ ('\\' | '"' | '$') => c,
'b' => '\x08', 'b' => '\x08',
'f' => '\x0f', 'f' => '\x0f',
'n' => '\n', 'n' => '\n',
'r' => '\r', 'r' => '\r',
't' => '\t', 't' => '\t',
'\n' => 'skipws: loop { '\n' => 'skipws: loop {
match iter.next() { match iter.next() {
None => return Ok(target), None => return Ok(target),
Some((_, c)) => Some((_, c)) =>
if !c.is_whitespace() { if !c.is_whitespace() {
break 'skipws c; break 'skipws c;
}, },
} }
}, },
'u' => { 'u' => {
let acc = ((0..4).rev()) let acc = ((0..4).rev())
.map(|radical| { .map(|radical| {
let (j, c) = (iter.next()).ok_or(StringError { pos, kind: StringErrorKind::NotHex })?; let (j, c) = (iter.next()).ok_or(StringError { pos, kind: StringErrorKind::NotHex })?;
pos = j; pos = j;
let b = u32::from_str_radix(&String::from(c), 16) let b = u32::from_str_radix(&String::from(c), 16)
.map_err(|_| StringError { pos, kind: StringErrorKind::NotHex })?; .map_err(|_| StringError { pos, kind: StringErrorKind::NotHex })?;
Ok(16u32.pow(radical) + b) Ok(16u32.pow(radical) + b)
}) })
.fold_ok(0, u32::wrapping_add)?; .fold_ok(0, u32::wrapping_add)?;
char::from_u32(acc).ok_or(StringError { pos, kind: StringErrorKind::BadCodePoint })? char::from_u32(acc).ok_or(StringError { pos, kind: StringErrorKind::BadCodePoint })?
}, },
_ => return Err(StringError { pos, kind: StringErrorKind::BadEscSeq }), _ => return Err(StringError { pos, kind: StringErrorKind::BadEscSeq }),
}; };
target.push(next); target.push(next);
} }
Ok(target) Ok(target)
} }
#[derive(Default)] #[derive(Default)]
pub struct StringLexer; pub struct StringLexer;
impl Lexer for StringLexer { impl Lexer for StringLexer {
const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['"'..='"']; const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['"'..='"'];
fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> { fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> {
let mut tail = all.strip_prefix('"').ok_or_else(err_not_applicable)?; let mut tail = all.strip_prefix('"').ok_or_else(err_not_applicable)?;
let mut ret = GenTok::X(IntStrAtom::from(intern!(str: "")).factory()).at(ctx.tok_ran(0, all)); let mut ret = GenTok::X(IntStrAtom::from(intern!(str: "")).factory()).at(ctx.tok_ran(0, all));
let mut cur = String::new(); let mut cur = String::new();
let mut errors = vec![]; let mut errors = vec![];
let str_to_gen = |str: &mut String, tail: &str, err: &mut Vec<OrcErr>| { let str_to_gen = |str: &mut String, tail: &str, err: &mut Vec<OrcErr>| {
let str_val = parse_string(&str.split_off(0)) let str_val = parse_string(&str.split_off(0))
.inspect_err(|e| err.push(e.clone().into_proj(ctx.pos(tail) - str.len() as u32))) .inspect_err(|e| err.push(e.clone().into_proj(ctx.pos(tail) - str.len() as u32)))
.unwrap_or_default(); .unwrap_or_default();
GenTok::X(IntStrAtom::from(intern(&*str_val)).factory()) GenTok::X(IntStrAtom::from(intern(&*str_val)).factory())
.at(ctx.tok_ran(str.len() as u32, tail)) .at(ctx.tok_ran(str.len() as u32, tail))
}; };
let add_frag = |prev: GenTokTree<'a>, new: GenTokTree<'a>| { let add_frag = |prev: GenTokTree<'a>, new: GenTokTree<'a>| {
wrap_tokv(vname_tv(&vname!(std::string::concat), new.range.end).chain([prev, new])) wrap_tokv(vname_tv(&vname!(std::string::concat), new.range.end).chain([prev, new]))
}; };
loop { loop {
if let Some(rest) = tail.strip_prefix('"') { if let Some(rest) = tail.strip_prefix('"') {
return Ok((rest, add_frag(ret, str_to_gen(&mut cur, tail, &mut errors)))); return Ok((rest, add_frag(ret, str_to_gen(&mut cur, tail, &mut errors))));
} else if let Some(rest) = tail.strip_prefix('$') { } else if let Some(rest) = tail.strip_prefix('$') {
ret = add_frag(ret, str_to_gen(&mut cur, tail, &mut errors)); ret = add_frag(ret, str_to_gen(&mut cur, tail, &mut errors));
let (new_tail, tree) = ctx.recurse(rest)?; let (new_tail, tree) = ctx.recurse(rest)?;
tail = new_tail; tail = new_tail;
ret = add_frag(ret, tree); ret = add_frag(ret, tree);
} else if tail.starts_with('\\') { } else if tail.starts_with('\\') {
// parse_string will deal with it, we just have to skip the next char // parse_string will deal with it, we just have to skip the next char
tail = &tail[2..]; tail = &tail[2..];
} else { } else {
let mut ch = tail.chars(); let mut ch = tail.chars();
if let Some(c) = ch.next() { if let Some(c) = ch.next() {
cur.push(c); cur.push(c);
tail = ch.as_str(); tail = ch.as_str();
} else { } else {
let range = ctx.pos(all)..ctx.pos(""); let range = ctx.pos(all)..ctx.pos("");
return Err(mk_errv(intern!(str: "No string end"), "String never terminated with \"", [ return Err(mk_errv(intern!(str: "No string end"), "String never terminated with \"", [
Pos::Range(range.clone()).into(), Pos::Range(range.clone()).into(),
])); ]));
} }
} }
} }
} }
} }

View File

@@ -23,9 +23,9 @@
"editor.glyphMargin": false, "editor.glyphMargin": false,
"editor.rulers": [], "editor.rulers": [],
"editor.guides.indentation": false, "editor.guides.indentation": false,
"editor.formatOnSave": true,
"editor.formatOnType": true, "editor.formatOnType": true,
}, },
"editor.formatOnSave": true,
"rust-analyzer.showUnlinkedFileNotification": false, "rust-analyzer.showUnlinkedFileNotification": false,
"rust-analyzer.checkOnSave": true, "rust-analyzer.checkOnSave": true,
"rust-analyzer.check.command": "clippy", "rust-analyzer.check.command": "clippy",

View File

@@ -6,8 +6,8 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
camino = "1.1.7" camino = "1.1.9"
clap = { version = "=4.5.4", features = ["derive"] } clap = { version = "4.5.24", features = ["derive"] }
itertools = "0.13.0" itertools = "0.14.0"
orchid-base = { version = "0.1.0", path = "../orchid-base" } orchid-base = { version = "0.1.0", path = "../orchid-base" }
orchid-host = { version = "0.1.0", path = "../orchid-host" } orchid-host = { version = "0.1.0", path = "../orchid-host" }

View File

@@ -9,44 +9,44 @@ use itertools::Itertools;
use orchid_base::interner::intern; use orchid_base::interner::intern;
use orchid_base::logging::{LogStrategy, Logger}; use orchid_base::logging::{LogStrategy, Logger};
use orchid_base::tree::ttv_fmt; use orchid_base::tree::ttv_fmt;
use orchid_host::extension::{init_systems, Extension}; use orchid_host::extension::{Extension, init_systems};
use orchid_host::lex::lex; use orchid_host::lex::lex;
use orchid_host::subprocess::Subprocess; use orchid_host::subprocess::Subprocess;
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
#[command(version, about, long_about)] #[command(version, about, long_about)]
pub struct Args { pub struct Args {
#[arg(short, long)] #[arg(short, long)]
extension: Vec<Utf8PathBuf>, extension: Vec<Utf8PathBuf>,
#[arg(short, long)] #[arg(short, long)]
system: Vec<String>, system: Vec<String>,
#[command(subcommand)] #[command(subcommand)]
command: Commands, command: Commands,
} }
#[derive(Subcommand, Debug)] #[derive(Subcommand, Debug)]
pub enum Commands { pub enum Commands {
Lex { Lex {
#[arg(short, long)] #[arg(short, long)]
file: Utf8PathBuf, file: Utf8PathBuf,
}, },
} }
fn main() { fn main() {
let args = Args::parse(); let args = Args::parse();
let logger = Logger::new(LogStrategy::StdErr); let logger = Logger::new(LogStrategy::StdErr);
match args.command { match args.command {
Commands::Lex { file } => { Commands::Lex { file } => {
let extensions = (args.extension.iter()) let extensions = (args.extension.iter())
.map(|f| Subprocess::new(Command::new(f.as_os_str()), logger.clone()).unwrap()) .map(|f| Subprocess::new(Command::new(f.as_os_str()), logger.clone()).unwrap())
.map(|cmd| Extension::new_process(Arc::new(cmd), logger.clone()).unwrap()) .map(|cmd| Extension::new_process(Arc::new(cmd), logger.clone()).unwrap())
.collect_vec(); .collect_vec();
let systems = init_systems(&args.system, &extensions).unwrap(); let systems = init_systems(&args.system, &extensions).unwrap();
let mut file = File::open(file.as_std_path()).unwrap(); let mut file = File::open(file.as_std_path()).unwrap();
let mut buf = String::new(); let mut buf = String::new();
file.read_to_string(&mut buf).unwrap(); file.read_to_string(&mut buf).unwrap();
let lexemes = lex(intern(&buf), &systems).unwrap(); let lexemes = lex(intern(&buf), &systems).unwrap();
println!("{}", ttv_fmt(&lexemes)) println!("{}", ttv_fmt(&lexemes))
}, },
} }
} }

View File

@@ -1,16 +1,20 @@
# meta # meta
format_code_in_doc_comments = true format_code_in_doc_comments = true
unstable_features = true unstable_features = true
version = "Two" style_edition = "2024"
# space # space
tab_spaces = 2 tab_spaces = 2
hard_tabs = true
max_width = 100 max_width = 100
error_on_line_overflow = true error_on_line_overflow = true
error_on_unformatted = true
format_macro_matchers = true format_macro_matchers = true
newline_style = "Unix" newline_style = "Unix"
normalize_comments = true normalize_comments = true
wrap_comments = true wrap_comments = true
comment_width = 80
doc_comment_code_block_width = 80
overflow_delimited_expr = true overflow_delimited_expr = true
use_small_heuristics = "Max" use_small_heuristics = "Max"
fn_single_line = true fn_single_line = true

View File

@@ -1,45 +1,45 @@
use std::env::{self, args}; use std::env::{self, args};
use std::io::{stdin, BufRead, BufReader, Write}; use std::io::{BufRead, BufReader, Write, stdin};
use std::process; use std::process;
use std::time::SystemTime; use std::time::SystemTime;
fn main() { fn main() {
let is_child = env::args().any(|arg| arg == "child"); let is_child = env::args().any(|arg| arg == "child");
if is_child { if is_child {
loop { loop {
let mut input = String::new(); let mut input = String::new();
stdin().read_line(&mut input).unwrap(); stdin().read_line(&mut input).unwrap();
if input == "ping\n" { if input == "ping\n" {
println!("pong"); println!("pong");
} else if input == "\n" { } else if input == "\n" {
process::exit(0); process::exit(0);
} else { } else {
panic!("Unrecognized input {input:?}"); panic!("Unrecognized input {input:?}");
} }
} }
} else { } else {
let steps = 1_000_000; let steps = 1_000_000;
let mut child = process::Command::new(args().next().unwrap()) let mut child = process::Command::new(args().next().unwrap())
.arg("child") .arg("child")
.stdin(process::Stdio::piped()) .stdin(process::Stdio::piped())
.stdout(process::Stdio::piped()) .stdout(process::Stdio::piped())
.spawn() .spawn()
.unwrap(); .unwrap();
let mut bufr = BufReader::new(child.stdout.take().unwrap()); let mut bufr = BufReader::new(child.stdout.take().unwrap());
let mut child_stdin = child.stdin.take().unwrap(); let mut child_stdin = child.stdin.take().unwrap();
let time = SystemTime::now(); let time = SystemTime::now();
for _ in 0..steps { for _ in 0..steps {
writeln!(child_stdin, "ping").unwrap(); writeln!(child_stdin, "ping").unwrap();
let mut buf = String::new(); let mut buf = String::new();
bufr.read_line(&mut buf).unwrap(); bufr.read_line(&mut buf).unwrap();
if buf != "pong\n" { if buf != "pong\n" {
panic!("Unrecognized output {buf:?}") panic!("Unrecognized output {buf:?}")
} }
} }
writeln!(child_stdin).unwrap(); writeln!(child_stdin).unwrap();
child.wait().unwrap(); child.wait().unwrap();
let elapsed = time.elapsed().unwrap(); let elapsed = time.elapsed().unwrap();
let avg = elapsed / steps; let avg = elapsed / steps;
println!("A roundtrip takes {avg:?}, {}ms on average", (avg.as_nanos() as f64) / 1_000_000f64); println!("A roundtrip takes {avg:?}, {}ms on average", (avg.as_nanos() as f64) / 1_000_000f64);
} }
} }

View File

@@ -4,4 +4,4 @@ version = "0.1.0"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
clap = { version = "=4.5.4", features = ["derive"] } clap = { version = "4.5.24", features = ["derive"] }

View File

@@ -10,59 +10,59 @@ use clap::{Parser, Subcommand};
#[derive(Parser)] #[derive(Parser)]
pub struct Args { pub struct Args {
#[arg(short, long)] #[arg(short, long)]
verbose: bool, verbose: bool,
#[command(subcommand)] #[command(subcommand)]
command: Commands, command: Commands,
} }
#[derive(Subcommand)] #[derive(Subcommand)]
pub enum Commands { pub enum Commands {
CheckApiRefs, CheckApiRefs,
} }
pub static EXIT_OK: AtomicBool = AtomicBool::new(true); pub static EXIT_OK: AtomicBool = AtomicBool::new(true);
fn main() -> io::Result<ExitCode> { fn main() -> io::Result<ExitCode> {
let args = Args::parse(); let args = Args::parse();
match args.command { match args.command {
Commands::CheckApiRefs => walk_wsp(&mut |_| Ok(true), &mut |file| { Commands::CheckApiRefs => walk_wsp(&mut |_| Ok(true), &mut |file| {
if file.path().extension() == Some(OsStr::new("rs")) && file.file_name() != "lib.rs" { if file.path().extension() == Some(OsStr::new("rs")) && file.file_name() != "lib.rs" {
let mut contents = String::new(); let mut contents = String::new();
File::open(file.path())?.read_to_string(&mut contents)?; File::open(file.path())?.read_to_string(&mut contents)?;
for (l, line) in contents.lines().enumerate() { for (l, line) in contents.lines().enumerate() {
if line.trim().starts_with("use") { if line.trim().starts_with("use") {
if let Some(c) = line.find("orchid_api") { if let Some(c) = line.find("orchid_api") {
if Some(c) != line.find("orchid_api_") { if Some(c) != line.find("orchid_api_") {
let dname = file.path().to_string_lossy().to_string(); let dname = file.path().to_string_lossy().to_string();
eprintln!("orchid_api imported in {dname} at {};{}", l + 1, c + 1) eprintln!("orchid_api imported in {dname} at {};{}", l + 1, c + 1)
} }
} }
} }
} }
} }
Ok(()) Ok(())
})?, })?,
} }
Ok(if EXIT_OK.load(Ordering::Relaxed) { ExitCode::SUCCESS } else { ExitCode::FAILURE }) Ok(if EXIT_OK.load(Ordering::Relaxed) { ExitCode::SUCCESS } else { ExitCode::FAILURE })
} }
fn walk_wsp( fn walk_wsp(
dir_filter: &mut impl FnMut(&DirEntry) -> io::Result<bool>, dir_filter: &mut impl FnMut(&DirEntry) -> io::Result<bool>,
file_handler: &mut impl FnMut(DirEntry) -> io::Result<()>, file_handler: &mut impl FnMut(DirEntry) -> io::Result<()>,
) -> io::Result<()> { ) -> io::Result<()> {
return recurse(&env::current_dir()?, dir_filter, file_handler); return recurse(&env::current_dir()?, dir_filter, file_handler);
fn recurse( fn recurse(
dir: &Path, dir: &Path,
dir_filter: &mut impl FnMut(&DirEntry) -> io::Result<bool>, dir_filter: &mut impl FnMut(&DirEntry) -> io::Result<bool>,
file_handler: &mut impl FnMut(DirEntry) -> io::Result<()>, file_handler: &mut impl FnMut(DirEntry) -> io::Result<()>,
) -> io::Result<()> { ) -> io::Result<()> {
for file in dir.read_dir()?.collect::<Result<Vec<_>, _>>()? { for file in dir.read_dir()?.collect::<Result<Vec<_>, _>>()? {
if file.metadata()?.is_dir() && dir_filter(&file)? { if file.metadata()?.is_dir() && dir_filter(&file)? {
recurse(&file.path(), dir_filter, file_handler)?; recurse(&file.path(), dir_filter, file_handler)?;
} }
file_handler(file)?; file_handler(file)?;
} }
Ok(()) Ok(())
} }
} }