Updated everything and moved to hard tab indentation

This commit is contained in:
2025-01-08 19:20:34 +01:00
parent 7cdfe7e3c4
commit 52c8d1c95a
100 changed files with 5949 additions and 5998 deletions

View File

@@ -8,7 +8,7 @@ root = true
end_of_line = lf
insert_final_newline = true
charset = utf-8
indent_style = space
indent_style = tab
indent_size = 2
[Makefile]

426
Cargo.lock generated
View File

@@ -28,15 +28,15 @@ dependencies = [
[[package]]
name = "allocator-api2"
version = "0.2.16"
version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "anstream"
version = "0.6.15"
version = "0.6.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526"
checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b"
dependencies = [
"anstyle",
"anstyle-parse",
@@ -49,33 +49,33 @@ dependencies = [
[[package]]
name = "anstyle"
version = "1.0.8"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1"
checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9"
[[package]]
name = "anstyle-parse"
version = "0.2.5"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb"
checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.1.1"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a"
checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c"
dependencies = [
"windows-sys",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.4"
version = "3.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8"
checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125"
dependencies = [
"anstyle",
"windows-sys",
@@ -83,15 +83,15 @@ dependencies = [
[[package]]
name = "arrayvec"
version = "0.7.4"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]]
name = "autocfg"
version = "1.1.0"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
[[package]]
name = "bitvec"
@@ -116,9 +116,9 @@ dependencies = [
[[package]]
name = "borsh"
version = "1.5.1"
version = "1.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6362ed55def622cddc70a4746a68554d7b687713770de539e59a739b249f8ed"
checksum = "2506947f73ad44e344215ccd6403ac2ae18cd8e046e581a441bf8d199f257f03"
dependencies = [
"borsh-derive",
"cfg_aliases",
@@ -126,16 +126,15 @@ dependencies = [
[[package]]
name = "borsh-derive"
version = "1.5.1"
version = "1.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3ef8005764f53cd4dca619f5bf64cafd4664dada50ece25e4d81de54c80cc0b"
checksum = "c2593a3b8b938bd68373196c9832f516be11fa487ef4ae745eb282e6a56a7244"
dependencies = [
"once_cell",
"proc-macro-crate",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.52",
"syn_derive",
"proc-macro2 1.0.92",
"quote 1.0.38",
"syn 2.0.95",
]
[[package]]
@@ -155,22 +154,28 @@ version = "0.6.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659"
dependencies = [
"proc-macro2 1.0.78",
"quote 1.0.35",
"proc-macro2 1.0.92",
"quote 1.0.38",
"syn 1.0.109",
]
[[package]]
name = "bytes"
version = "1.6.1"
name = "byteorder"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a12916984aab3fa6e39d655a33e09c0071eb36d6ab3aea5c2d78551f1df6d952"
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "bytes"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
[[package]]
name = "camino"
version = "1.1.7"
version = "1.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239"
checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3"
[[package]]
name = "cfg-if"
@@ -186,9 +191,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "clap"
version = "4.5.4"
version = "4.5.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0"
checksum = "9560b07a799281c7e0958b9296854d6fafd4c5f31444a7e5bb1ad6dde5ccf1bd"
dependencies = [
"clap_builder",
"clap_derive",
@@ -196,51 +201,51 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.2"
version = "4.5.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4"
checksum = "874e0dd3eb68bf99058751ac9712f622e61e6f393a94f7128fa26e3f02f5c7cd"
dependencies = [
"anstream",
"anstyle",
"clap_lex",
"strsim 0.11.1",
"strsim",
]
[[package]]
name = "clap_derive"
version = "4.5.4"
version = "4.5.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64"
checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c"
dependencies = [
"heck",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.52",
"proc-macro2 1.0.92",
"quote 1.0.38",
"syn 2.0.95",
]
[[package]]
name = "clap_lex"
version = "0.7.2"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97"
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
[[package]]
name = "colorchoice"
version = "1.0.2"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0"
checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
[[package]]
name = "const_panic"
version = "0.2.8"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6051f239ecec86fde3410901ab7860d458d160371533842974fc61f96d15879b"
checksum = "53857514f72ee4a2b583de67401e3ff63a5472ca4acf289d09a9ea7636dfec17"
[[package]]
name = "cpufeatures"
version = "0.2.12"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3"
dependencies = [
"libc",
]
@@ -257,9 +262,9 @@ dependencies = [
[[package]]
name = "darling"
version = "0.20.8"
version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391"
checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989"
dependencies = [
"darling_core",
"darling_macro",
@@ -267,27 +272,27 @@ dependencies = [
[[package]]
name = "darling_core"
version = "0.20.8"
version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f"
checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5"
dependencies = [
"fnv",
"ident_case",
"proc-macro2 1.0.78",
"quote 1.0.35",
"strsim 0.10.0",
"syn 2.0.52",
"proc-macro2 1.0.92",
"quote 1.0.38",
"strsim",
"syn 2.0.95",
]
[[package]]
name = "darling_macro"
version = "0.20.8"
version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f"
checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806"
dependencies = [
"darling_core",
"quote 1.0.35",
"syn 2.0.52",
"quote 1.0.38",
"syn 2.0.95",
]
[[package]]
@@ -319,9 +324,9 @@ checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125"
[[package]]
name = "either"
version = "1.10.0"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a"
checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
[[package]]
name = "equivalent"
@@ -359,9 +364,9 @@ dependencies = [
[[package]]
name = "getrandom"
version = "0.2.14"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
dependencies = [
"cfg-if",
"libc",
@@ -377,12 +382,6 @@ dependencies = [
"ahash 0.7.8",
]
[[package]]
name = "hashbrown"
version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
[[package]]
name = "hashbrown"
version = "0.15.2"
@@ -408,12 +407,12 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]]
name = "indexmap"
version = "2.2.6"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26"
checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f"
dependencies = [
"equivalent",
"hashbrown 0.14.5",
"hashbrown 0.15.2",
]
[[package]]
@@ -422,15 +421,6 @@ version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
[[package]]
name = "itertools"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
dependencies = [
"either",
]
[[package]]
name = "itertools"
version = "0.14.0"
@@ -442,15 +432,15 @@ dependencies = [
[[package]]
name = "itoa"
version = "1.0.11"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
[[package]]
name = "konst"
version = "0.3.9"
version = "0.3.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50a0ba6de5f7af397afff922f22c149ff605c766cd3269cf6c1cd5e466dbe3b9"
checksum = "4381b9b00c55f251f2ebe9473aef7c117e96828def1a7cb3bd3f0f903c6894e9"
dependencies = [
"const_panic",
"konst_kernel",
@@ -460,18 +450,18 @@ dependencies = [
[[package]]
name = "konst_kernel"
version = "0.3.9"
version = "0.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be0a455a1719220fd6adf756088e1c69a85bf14b6a9e24537a5cc04f503edb2b"
checksum = "e4b1eb7788f3824c629b1116a7a9060d6e898c358ebff59070093d51103dcc3c"
dependencies = [
"typewit",
]
[[package]]
name = "konst_proc_macros"
version = "0.3.0"
version = "0.3.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e28ab1dc35e09d60c2b8c90d12a9a8d9666c876c10a3739a3196db0103b6043"
checksum = "00af7901ba50898c9e545c24d5c580c96a982298134e8037d8978b6594782c07"
[[package]]
name = "lazy_static"
@@ -481,9 +471,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "libc"
version = "0.2.153"
version = "0.2.169"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
[[package]]
name = "memchr"
@@ -508,9 +498,9 @@ dependencies = [
[[package]]
name = "once_cell"
version = "1.19.0"
version = "1.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
[[package]]
name = "orchid-api"
@@ -526,18 +516,18 @@ name = "orchid-api-derive"
version = "0.1.0"
dependencies = [
"darling",
"itertools 0.13.0",
"itertools",
"orchid-api-traits",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.52",
"proc-macro2 1.0.92",
"quote 1.0.38",
"syn 2.0.95",
]
[[package]]
name = "orchid-api-traits"
version = "0.1.0"
dependencies = [
"itertools 0.13.0",
"itertools",
"never",
"ordered-float",
]
@@ -549,7 +539,7 @@ dependencies = [
"derive_destructure",
"dyn-clone",
"hashbrown 0.15.2",
"itertools 0.14.0",
"itertools",
"lazy_static",
"never",
"num-traits",
@@ -571,7 +561,7 @@ dependencies = [
"derive_destructure",
"dyn-clone",
"hashbrown 0.15.2",
"itertools 0.14.0",
"itertools",
"konst",
"lazy_static",
"never",
@@ -592,7 +582,7 @@ version = "0.1.0"
dependencies = [
"derive_destructure",
"hashbrown 0.15.2",
"itertools 0.14.0",
"itertools",
"lazy_static",
"never",
"num-traits",
@@ -609,7 +599,7 @@ dependencies = [
name = "orchid-std"
version = "0.1.0"
dependencies = [
"itertools 0.13.0",
"itertools",
"never",
"once_cell",
"orchid-api",
@@ -626,16 +616,16 @@ version = "0.1.0"
dependencies = [
"camino",
"clap",
"itertools 0.13.0",
"itertools",
"orchid-base",
"orchid-host",
]
[[package]]
name = "ordered-float"
version = "4.2.1"
version = "4.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19ff2cf528c6c03d9ed653d6c4ce1dc0582dc4af309790ad92f07c1cd551b0be"
checksum = "7bb71e1b3fa6ca1c61f383464aaf2bb0e2f8e772a1f01d486832464de363b951"
dependencies = [
"num-traits",
]
@@ -648,42 +638,22 @@ checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]]
name = "ppv-lite86"
version = "0.2.17"
version = "0.2.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04"
dependencies = [
"zerocopy",
]
[[package]]
name = "proc-macro-crate"
version = "3.1.0"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284"
checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b"
dependencies = [
"toml_edit",
]
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2 1.0.78",
"quote 1.0.35",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2 1.0.78",
"quote 1.0.35",
"version_check",
]
[[package]]
name = "proc-macro2"
version = "0.4.30"
@@ -695,9 +665,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.78"
version = "1.0.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae"
checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
dependencies = [
"unicode-ident",
]
@@ -717,8 +687,8 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac"
dependencies = [
"proc-macro2 1.0.78",
"quote 1.0.35",
"proc-macro2 1.0.92",
"quote 1.0.38",
"syn 1.0.109",
]
@@ -733,11 +703,11 @@ dependencies = [
[[package]]
name = "quote"
version = "1.0.35"
version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc"
dependencies = [
"proc-macro2 1.0.78",
"proc-macro2 1.0.92",
]
[[package]]
@@ -787,9 +757,9 @@ dependencies = [
[[package]]
name = "rkyv"
version = "0.7.44"
version = "0.7.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cba464629b3394fc4dbc6f940ff8f5b4ff5c7aef40f29166fd4ad12acbc99c0"
checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b"
dependencies = [
"bitvec",
"bytecheck",
@@ -805,20 +775,20 @@ dependencies = [
[[package]]
name = "rkyv_derive"
version = "0.7.44"
version = "0.7.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7dddfff8de25e6f62b9d64e6e432bf1c6736c57d20323e15ee10435fbda7c65"
checksum = "503d1d27590a2b0a3a4ca4c94755aa2875657196ecbf401a42eff41d7de532c0"
dependencies = [
"proc-macro2 1.0.78",
"quote 1.0.35",
"proc-macro2 1.0.92",
"quote 1.0.38",
"syn 1.0.109",
]
[[package]]
name = "rust-embed"
version = "8.3.0"
version = "8.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb78f46d0066053d16d4ca7b898e9343bc3530f71c61d5ad84cd404ada068745"
checksum = "fa66af4a4fdd5e7ebc276f115e895611a34739a9c1c01028383d612d550953c0"
dependencies = [
"rust-embed-impl",
"rust-embed-utils",
@@ -827,22 +797,22 @@ dependencies = [
[[package]]
name = "rust-embed-impl"
version = "8.3.0"
version = "8.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b91ac2a3c6c0520a3fb3dd89321177c3c692937c4eb21893378219da10c44fc8"
checksum = "6125dbc8867951125eec87294137f4e9c2c96566e61bf72c45095a7c77761478"
dependencies = [
"proc-macro2 1.0.78",
"quote 1.0.35",
"proc-macro2 1.0.92",
"quote 1.0.38",
"rust-embed-utils",
"syn 2.0.52",
"syn 2.0.95",
"walkdir",
]
[[package]]
name = "rust-embed-utils"
version = "8.3.0"
version = "8.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86f69089032567ffff4eada41c573fc43ff466c7db7c5688b2e7969584345581"
checksum = "2e5347777e9aacb56039b0e1f28785929a8a3b709e87482e7442c72e7c12529d"
dependencies = [
"sha2",
"walkdir",
@@ -850,9 +820,9 @@ dependencies = [
[[package]]
name = "rust_decimal"
version = "1.35.0"
version = "1.36.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1790d1c4c0ca81211399e0e0af16333276f375209e71a37b67698a373db5b47a"
checksum = "b082d80e3e3cc52b2ed634388d436fe1f4de6af5786cc2de9ba9737527bdf555"
dependencies = [
"arrayvec",
"borsh",
@@ -887,31 +857,32 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
[[package]]
name = "serde"
version = "1.0.204"
version = "1.0.217"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12"
checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.204"
version = "1.0.217"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222"
checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0"
dependencies = [
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.52",
"proc-macro2 1.0.92",
"quote 1.0.38",
"syn 2.0.95",
]
[[package]]
name = "serde_json"
version = "1.0.120"
version = "1.0.135"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5"
checksum = "2b0d7ba2887406110130a978386c4e1befb98c674b4fba677954e4db976630d9"
dependencies = [
"itoa",
"memchr",
"ryu",
"serde",
]
@@ -929,20 +900,14 @@ dependencies = [
[[package]]
name = "simdutf8"
version = "0.1.4"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a"
checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e"
[[package]]
name = "stdio-perftest"
version = "0.1.0"
[[package]]
name = "strsim"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "strsim"
version = "0.11.1"
@@ -972,34 +937,22 @@ version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2 1.0.78",
"quote 1.0.35",
"proc-macro2 1.0.92",
"quote 1.0.38",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.52"
version = "2.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07"
checksum = "46f71c0377baf4ef1cc3e3402ded576dccc315800fbc62dfc7fe04b009773b4a"
dependencies = [
"proc-macro2 1.0.78",
"quote 1.0.35",
"proc-macro2 1.0.92",
"quote 1.0.38",
"unicode-ident",
]
[[package]]
name = "syn_derive"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b"
dependencies = [
"proc-macro-error",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.52",
]
[[package]]
name = "tap"
version = "1.0.1"
@@ -1008,9 +961,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "tinyvec"
version = "1.8.0"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938"
checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8"
dependencies = [
"tinyvec_macros",
]
@@ -1023,15 +976,15 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "toml_datetime"
version = "0.6.6"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf"
checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41"
[[package]]
name = "toml_edit"
version = "0.21.1"
version = "0.22.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1"
checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5"
dependencies = [
"indexmap",
"toml_datetime",
@@ -1044,8 +997,8 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b79e2e9c9ab44c6d7c20d5976961b47e8f49ac199154daa514b77cd1ab536625"
dependencies = [
"proc-macro2 1.0.78",
"quote 1.0.35",
"proc-macro2 1.0.92",
"quote 1.0.38",
"syn 1.0.109",
]
@@ -1057,9 +1010,9 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
[[package]]
name = "typewit"
version = "1.9.0"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6fb9ae6a3cafaf0a5d14c2302ca525f9ae8e07a0f0e6949de88d882c37a6e24"
checksum = "cb77c29baba9e4d3a6182d51fa75e3215c7fd1dab8f4ea9d107c716878e55fc0"
dependencies = [
"typewit_proc_macros",
]
@@ -1072,9 +1025,9 @@ checksum = "e36a83ea2b3c704935a01b4642946aadd445cea40b10935e3f8bd8052b8193d6"
[[package]]
name = "unicode-ident"
version = "1.0.12"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83"
[[package]]
name = "unicode-xid"
@@ -1090,21 +1043,21 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "uuid"
version = "1.10.0"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314"
checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a"
[[package]]
name = "version_check"
version = "0.9.4"
version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
name = "walkdir"
version = "2.4.0"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee"
checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
dependencies = [
"same-file",
"winapi-util",
@@ -1116,42 +1069,20 @@ version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.6"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"winapi",
"windows-sys",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-sys"
version = "0.52.0"
version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets",
]
@@ -1222,9 +1153,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "winnow"
version = "0.5.40"
version = "0.6.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876"
checksum = "39281189af81c07ec09db316b302a3e67bf9bd7cbf6c820b50e35fee9c2fa980"
dependencies = [
"memchr",
]
@@ -1247,20 +1178,21 @@ dependencies = [
[[package]]
name = "zerocopy"
version = "0.7.32"
version = "0.7.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be"
checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
dependencies = [
"byteorder",
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.7.32"
version = "0.7.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.52",
"proc-macro2 1.0.92",
"quote 1.0.38",
"syn 2.0.95",
]

View File

@@ -9,9 +9,9 @@ proc-macro = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
quote = "1.0.35"
syn = { version = "2.0.52" }
quote = "1.0.38"
syn = { version = "2.0.95" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
proc-macro2 = "1.0.78"
darling = "0.20.8"
itertools = "0.13.0"
proc-macro2 = "1.0.92"
darling = "0.20.10"
itertools = "0.14.0"

View File

@@ -3,28 +3,28 @@ use quote::ToTokens;
use syn::spanned::Spanned;
pub fn add_trait_bounds(mut generics: syn::Generics, bound: syn::TypeParamBound) -> syn::Generics {
for param in &mut generics.params {
if let syn::GenericParam::Type(ref mut type_param) = *param {
type_param.bounds.push(bound.clone())
}
}
generics
for param in &mut generics.params {
if let syn::GenericParam::Type(ref mut type_param) = *param {
type_param.bounds.push(bound.clone())
}
}
generics
}
pub fn destructure(fields: &syn::Fields) -> Option<pm2::TokenStream> {
match fields {
syn::Fields::Unit => None,
syn::Fields::Named(_) => {
let field_list = fields.iter().map(|f| f.ident.as_ref().unwrap());
Some(quote! { { #(#field_list),* } })
},
syn::Fields::Unnamed(un) => {
let field_list = (0..fields.len()).map(|i| pos_field_name(i, un.span()));
Some(quote! { ( #(#field_list),* ) })
},
}
match fields {
syn::Fields::Unit => None,
syn::Fields::Named(_) => {
let field_list = fields.iter().map(|f| f.ident.as_ref().unwrap());
Some(quote! { { #(#field_list),* } })
},
syn::Fields::Unnamed(un) => {
let field_list = (0..fields.len()).map(|i| pos_field_name(i, un.span()));
Some(quote! { ( #(#field_list),* ) })
},
}
}
pub fn pos_field_name(i: usize, span: pm2::Span) -> pm2::TokenStream {
syn::Ident::new(&format!("field_{i}"), span).to_token_stream()
syn::Ident::new(&format!("field_{i}"), span).to_token_stream()
}

View File

@@ -4,53 +4,53 @@ use proc_macro2 as pm2;
use crate::common::add_trait_bounds;
pub fn derive(input: TokenStream) -> TokenStream {
// Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as syn::DeriveInput);
let generics = add_trait_bounds(input.generics, parse_quote!(orchid_api_traits::Decode));
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let name = input.ident;
let decode = decode_body(&input.data);
let expanded = quote! {
impl #impl_generics orchid_api_traits::Decode for #name #ty_generics #where_clause {
fn decode<R: std::io::Read + ?Sized>(read: &mut R) -> Self { #decode }
}
};
TokenStream::from(expanded)
// Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as syn::DeriveInput);
let generics = add_trait_bounds(input.generics, parse_quote!(orchid_api_traits::Decode));
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let name = input.ident;
let decode = decode_body(&input.data);
let expanded = quote! {
impl #impl_generics orchid_api_traits::Decode for #name #ty_generics #where_clause {
fn decode<R: std::io::Read + ?Sized>(read: &mut R) -> Self { #decode }
}
};
TokenStream::from(expanded)
}
fn decode_fields(fields: &syn::Fields) -> pm2::TokenStream {
match fields {
syn::Fields::Unit => quote! {},
syn::Fields::Named(_) => {
let names = fields.iter().map(|f| f.ident.as_ref().unwrap());
quote! { { #( #names: orchid_api_traits::Decode::decode(read), )* } }
},
syn::Fields::Unnamed(_) => {
let exprs = fields.iter().map(|_| quote! { orchid_api_traits::Decode::decode(read), });
quote! { ( #( #exprs )* ) }
},
}
match fields {
syn::Fields::Unit => quote! {},
syn::Fields::Named(_) => {
let names = fields.iter().map(|f| f.ident.as_ref().unwrap());
quote! { { #( #names: orchid_api_traits::Decode::decode(read), )* } }
},
syn::Fields::Unnamed(_) => {
let exprs = fields.iter().map(|_| quote! { orchid_api_traits::Decode::decode(read), });
quote! { ( #( #exprs )* ) }
},
}
}
fn decode_body(data: &syn::Data) -> proc_macro2::TokenStream {
match data {
syn::Data::Union(_) => panic!("Unions can't be deserialized"),
syn::Data::Struct(str) => {
let fields = decode_fields(&str.fields);
quote! { Self #fields }
},
syn::Data::Enum(en) => {
let opts = en.variants.iter().enumerate().map(|(i, v @ syn::Variant { ident, .. })| {
let fields = decode_fields(&v.fields);
let id = i as u8;
quote! { #id => Self::#ident #fields, }
});
quote! {
match <u8 as orchid_api_traits::Decode>::decode(read) {
#(#opts)*
x => panic!("Unrecognized enum kind {x}")
}
}
},
}
match data {
syn::Data::Union(_) => panic!("Unions can't be deserialized"),
syn::Data::Struct(str) => {
let fields = decode_fields(&str.fields);
quote! { Self #fields }
},
syn::Data::Enum(en) => {
let opts = en.variants.iter().enumerate().map(|(i, v @ syn::Variant { ident, .. })| {
let fields = decode_fields(&v.fields);
let id = i as u8;
quote! { #id => Self::#ident #fields, }
});
quote! {
match <u8 as orchid_api_traits::Decode>::decode(read) {
#(#opts)*
x => panic!("Unrecognized enum kind {x}")
}
}
},
}
}

View File

@@ -6,61 +6,61 @@ use syn::spanned::Spanned;
use crate::common::{add_trait_bounds, destructure, pos_field_name};
pub fn derive(input: TokenStream) -> TokenStream {
// Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as syn::DeriveInput);
let e_generics = add_trait_bounds(input.generics, parse_quote!(orchid_api_traits::Encode));
let (e_impl_generics, e_ty_generics, e_where_clause) = e_generics.split_for_impl();
let name = input.ident;
let encode = encode_body(&input.data);
let expanded = quote! {
impl #e_impl_generics orchid_api_traits::Encode for #name #e_ty_generics #e_where_clause {
fn encode<W: std::io::Write + ?Sized>(&self, write: &mut W) { #encode }
}
};
TokenStream::from(expanded)
// Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as syn::DeriveInput);
let e_generics = add_trait_bounds(input.generics, parse_quote!(orchid_api_traits::Encode));
let (e_impl_generics, e_ty_generics, e_where_clause) = e_generics.split_for_impl();
let name = input.ident;
let encode = encode_body(&input.data);
let expanded = quote! {
impl #e_impl_generics orchid_api_traits::Encode for #name #e_ty_generics #e_where_clause {
fn encode<W: std::io::Write + ?Sized>(&self, write: &mut W) { #encode }
}
};
TokenStream::from(expanded)
}
fn encode_body(data: &syn::Data) -> Option<pm2::TokenStream> {
match data {
syn::Data::Union(_) => panic!("Unions can't be deserialized"),
syn::Data::Struct(str) => {
let dest = destructure(&str.fields)?;
let body = encode_items(&str.fields);
Some(quote! {
let Self #dest = &self;
#body
})
},
syn::Data::Enum(en) => {
let options = en.variants.iter().enumerate().map(|(i, v @ syn::Variant { ident, .. })| {
let dest = destructure(&v.fields).unwrap_or_default();
let body = encode_items(&v.fields);
quote! {
Self::#ident #dest => {
(#i as u8).encode(write);
#body
}
}
});
Some(quote! {
match self {
#(#options)*
_ => unreachable!("Autogenerated encode impl for all possible variants"),
}
})
},
}
match data {
syn::Data::Union(_) => panic!("Unions can't be deserialized"),
syn::Data::Struct(str) => {
let dest = destructure(&str.fields)?;
let body = encode_items(&str.fields);
Some(quote! {
let Self #dest = &self;
#body
})
},
syn::Data::Enum(en) => {
let options = en.variants.iter().enumerate().map(|(i, v @ syn::Variant { ident, .. })| {
let dest = destructure(&v.fields).unwrap_or_default();
let body = encode_items(&v.fields);
quote! {
Self::#ident #dest => {
(#i as u8).encode(write);
#body
}
}
});
Some(quote! {
match self {
#(#options)*
_ => unreachable!("Autogenerated encode impl for all possible variants"),
}
})
},
}
}
fn encode_names<T: ToTokens>(names: impl Iterator<Item = T>) -> pm2::TokenStream {
quote! { #( #names .encode(write); )* }
quote! { #( #names .encode(write); )* }
}
fn encode_items(fields: &syn::Fields) -> Option<pm2::TokenStream> {
match fields {
syn::Fields::Unit => None,
syn::Fields::Named(_) => Some(encode_names(fields.iter().map(|f| f.ident.as_ref().unwrap()))),
syn::Fields::Unnamed(un) =>
Some(encode_names((0..fields.len()).map(|i| pos_field_name(i, un.span())))),
}
match fields {
syn::Fields::Unit => None,
syn::Fields::Named(_) => Some(encode_names(fields.iter().map(|f| f.ident.as_ref().unwrap()))),
syn::Fields::Unnamed(un) =>
Some(encode_names((0..fields.len()).map(|i| pos_field_name(i, un.span())))),
}
}

View File

@@ -7,118 +7,118 @@ use proc_macro2 as pm2;
use syn::DeriveInput;
pub fn derive(input: TokenStream) -> TokenStream {
// Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as syn::DeriveInput);
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
let name = &input.ident;
let extendable = is_extendable(&input);
let is_leaf_val = if extendable { quote!(TLFalse) } else { quote!(TLTrue) };
match get_ancestry(&input) {
None => TokenStream::from(quote! {
impl #impl_generics orchid_api_traits::InHierarchy for #name #ty_generics #where_clause {
type IsRoot = orchid_api_traits::TLTrue;
type IsLeaf = orchid_api_traits:: #is_leaf_val ;
}
}),
Some(ancestry) => {
let parent = ancestry[0].clone();
let casts = gen_casts(&ancestry[..], &quote!(#name));
TokenStream::from(quote! {
#casts
impl #impl_generics orchid_api_traits::InHierarchy for #name #ty_generics #where_clause {
type IsRoot = orchid_api_traits::TLFalse;
type IsLeaf = orchid_api_traits:: #is_leaf_val ;
}
impl #impl_generics orchid_api_traits::Extends for #name #ty_generics #where_clause {
type Parent = #parent;
}
})
},
}
// Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as syn::DeriveInput);
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
let name = &input.ident;
let extendable = is_extendable(&input);
let is_leaf_val = if extendable { quote!(TLFalse) } else { quote!(TLTrue) };
match get_ancestry(&input) {
None => TokenStream::from(quote! {
impl #impl_generics orchid_api_traits::InHierarchy for #name #ty_generics #where_clause {
type IsRoot = orchid_api_traits::TLTrue;
type IsLeaf = orchid_api_traits:: #is_leaf_val ;
}
}),
Some(ancestry) => {
let parent = ancestry[0].clone();
let casts = gen_casts(&ancestry[..], &quote!(#name));
TokenStream::from(quote! {
#casts
impl #impl_generics orchid_api_traits::InHierarchy for #name #ty_generics #where_clause {
type IsRoot = orchid_api_traits::TLFalse;
type IsLeaf = orchid_api_traits:: #is_leaf_val ;
}
impl #impl_generics orchid_api_traits::Extends for #name #ty_generics #where_clause {
type Parent = #parent;
}
})
},
}
}
fn gen_casts(ancestry: &[pm2::TokenStream], this: &pm2::TokenStream) -> pm2::TokenStream {
let from_impls = iter::once(this).chain(ancestry.iter()).tuple_windows().map(|(prev, cur)| {
quote! {
impl From<#this> for #cur {
fn from(value: #this) -> Self {
#cur::#prev(value.into())
}
}
}
});
let try_from_impls = (1..=ancestry.len()).map(|len| {
let (orig, inter) = ancestry[..len].split_last().unwrap();
fn gen_chk(r: &[pm2::TokenStream], last: &pm2::TokenStream) -> pm2::TokenStream {
match r.split_last() {
None => quote! { #last (_) => true },
Some((ty, tail)) => {
let sub = gen_chk(tail, last);
quote! {
#ty ( value ) => match value {
#ty:: #sub ,
_ => false
}
}
},
}
}
let chk = gen_chk(inter, this);
fn gen_unpk(r: &[pm2::TokenStream], last: &pm2::TokenStream) -> pm2::TokenStream {
match r.split_last() {
None => quote! { #last ( value ) => value },
Some((ty, tail)) => {
let sub = gen_unpk(tail, last);
quote! {
#ty ( value ) => match value {
#ty:: #sub ,
_ => unreachable!("Checked above!"),
}
}
},
}
}
let unpk = gen_unpk(inter, this);
quote! {
impl TryFrom<#orig> for #this {
type Error = #orig;
fn try_from(value: #orig) -> Result<Self, Self::Error> {
let can_cast = match &value {
#orig:: #chk ,
_ => false
};
if !can_cast { return Err(value) }
Ok ( match value {
#orig:: #unpk ,
_ => unreachable!("Checked above!")
} )
}
}
}
});
from_impls.chain(try_from_impls).flatten().collect()
let from_impls = iter::once(this).chain(ancestry.iter()).tuple_windows().map(|(prev, cur)| {
quote! {
impl From<#this> for #cur {
fn from(value: #this) -> Self {
#cur::#prev(value.into())
}
}
}
});
let try_from_impls = (1..=ancestry.len()).map(|len| {
let (orig, inter) = ancestry[..len].split_last().unwrap();
fn gen_chk(r: &[pm2::TokenStream], last: &pm2::TokenStream) -> pm2::TokenStream {
match r.split_last() {
None => quote! { #last (_) => true },
Some((ty, tail)) => {
let sub = gen_chk(tail, last);
quote! {
#ty ( value ) => match value {
#ty:: #sub ,
_ => false
}
}
},
}
}
let chk = gen_chk(inter, this);
fn gen_unpk(r: &[pm2::TokenStream], last: &pm2::TokenStream) -> pm2::TokenStream {
match r.split_last() {
None => quote! { #last ( value ) => value },
Some((ty, tail)) => {
let sub = gen_unpk(tail, last);
quote! {
#ty ( value ) => match value {
#ty:: #sub ,
_ => unreachable!("Checked above!"),
}
}
},
}
}
let unpk = gen_unpk(inter, this);
quote! {
impl TryFrom<#orig> for #this {
type Error = #orig;
fn try_from(value: #orig) -> Result<Self, Self::Error> {
let can_cast = match &value {
#orig:: #chk ,
_ => false
};
if !can_cast { return Err(value) }
Ok ( match value {
#orig:: #unpk ,
_ => unreachable!("Checked above!")
} )
}
}
}
});
from_impls.chain(try_from_impls).flatten().collect()
}
fn get_ancestry(input: &DeriveInput) -> Option<Vec<pm2::TokenStream>> {
input.attrs.iter().find(|a| a.path().get_ident().is_some_and(|i| *i == "extends")).map(|attr| {
match &attr.meta {
syn::Meta::List(list) => (list.tokens.clone().into_iter())
.batching(|it| {
let grp: pm2::TokenStream =
it.take_while(|t| {
if let TokenTree::Punct(punct) = t { punct.as_char() != ',' } else { true }
})
.collect();
(!grp.is_empty()).then_some(grp)
})
.collect(),
_ => panic!("The correct format of the parent macro is #[parent(SomeParentType)]"),
}
})
input.attrs.iter().find(|a| a.path().get_ident().is_some_and(|i| *i == "extends")).map(|attr| {
match &attr.meta {
syn::Meta::List(list) => (list.tokens.clone().into_iter())
.batching(|it| {
let grp: pm2::TokenStream =
it.take_while(|t| {
if let TokenTree::Punct(punct) = t { punct.as_char() != ',' } else { true }
})
.collect();
(!grp.is_empty()).then_some(grp)
})
.collect(),
_ => panic!("The correct format of the parent macro is #[parent(SomeParentType)]"),
}
})
}
fn is_extendable(input: &DeriveInput) -> bool {
input.attrs.iter().any(|a| a.path().get_ident().is_some_and(|i| *i == "extendable"))
input.attrs.iter().any(|a| a.path().get_ident().is_some_and(|i| *i == "extendable"))
}
#[test]

View File

@@ -23,5 +23,5 @@ pub fn hierarchy(input: TokenStream) -> TokenStream { hierarchy::derive(input) }
#[proc_macro_derive(Coding)]
pub fn coding(input: TokenStream) -> TokenStream {
decode(input.clone()).into_iter().chain(encode(input)).collect()
decode(input.clone()).into_iter().chain(encode(input)).collect()
}

View File

@@ -6,6 +6,6 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
itertools = "0.13.0"
itertools = "0.14.0"
never = "0.1.0"
ordered-float = "4.2"
ordered-float = "4.6.0"

View File

@@ -13,36 +13,36 @@ use ordered_float::NotNan;
use crate::encode_enum;
pub trait Decode {
/// Decode an instance from the beginning of the buffer. Return the decoded
/// data and the remaining buffer.
fn decode<R: Read + ?Sized>(read: &mut R) -> Self;
/// Decode an instance from the beginning of the buffer. Return the decoded
/// data and the remaining buffer.
fn decode<R: Read + ?Sized>(read: &mut R) -> Self;
}
pub trait Encode {
/// Append an instance of the struct to the buffer
fn encode<W: Write + ?Sized>(&self, write: &mut W);
/// Append an instance of the struct to the buffer
fn encode<W: Write + ?Sized>(&self, write: &mut W);
}
pub trait Coding: Encode + Decode + Clone {
fn get_decoder<T>(map: impl Fn(Self) -> T + 'static) -> impl Fn(&mut dyn Read) -> T {
move |r| map(Self::decode(r))
}
fn get_decoder<T>(map: impl Fn(Self) -> T + 'static) -> impl Fn(&mut dyn Read) -> T {
move |r| map(Self::decode(r))
}
}
impl<T: Encode + Decode + Clone> Coding for T {}
macro_rules! num_impl {
($number:ty) => {
impl Decode for $number {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let mut bytes = [0u8; (<$number>::BITS / 8) as usize];
read.read_exact(&mut bytes).unwrap();
<$number>::from_be_bytes(bytes)
}
}
impl Encode for $number {
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
write.write_all(&self.to_be_bytes()).expect("Could not write number")
}
}
};
($number:ty) => {
impl Decode for $number {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let mut bytes = [0u8; (<$number>::BITS / 8) as usize];
read.read_exact(&mut bytes).unwrap();
<$number>::from_be_bytes(bytes)
}
}
impl Encode for $number {
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
write.write_all(&self.to_be_bytes()).expect("Could not write number")
}
}
};
}
num_impl!(u128);
num_impl!(u64);
@@ -56,14 +56,14 @@ num_impl!(i16);
num_impl!(i8);
macro_rules! nonzero_impl {
($name:ty) => {
impl Decode for $name {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { Self::new(Decode::decode(read)).unwrap() }
}
impl Encode for $name {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { self.get().encode(write) }
}
};
($name:ty) => {
impl Decode for $name {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { Self::new(Decode::decode(read)).unwrap() }
}
impl Encode for $name {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { self.get().encode(write) }
}
};
}
nonzero_impl!(std::num::NonZeroU8);
@@ -78,111 +78,111 @@ nonzero_impl!(std::num::NonZeroI64);
nonzero_impl!(std::num::NonZeroI128);
impl<T: Encode + ?Sized> Encode for &T {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { (**self).encode(write) }
fn encode<W: Write + ?Sized>(&self, write: &mut W) { (**self).encode(write) }
}
macro_rules! float_impl {
($t:ty, $size:expr) => {
impl Decode for NotNan<$t> {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let mut bytes = [0u8; $size];
read.read_exact(&mut bytes).unwrap();
NotNan::new(<$t>::from_be_bytes(bytes)).expect("Float was NaN")
}
}
impl Encode for NotNan<$t> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
write.write_all(&self.as_ref().to_be_bytes()).expect("Could not write number")
}
}
};
($t:ty, $size:expr) => {
impl Decode for NotNan<$t> {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let mut bytes = [0u8; $size];
read.read_exact(&mut bytes).unwrap();
NotNan::new(<$t>::from_be_bytes(bytes)).expect("Float was NaN")
}
}
impl Encode for NotNan<$t> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
write.write_all(&self.as_ref().to_be_bytes()).expect("Could not write number")
}
}
};
}
float_impl!(f64, 8);
float_impl!(f32, 4);
impl Decode for String {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let len = u64::decode(read).try_into().unwrap();
let mut data = vec![0u8; len];
read.read_exact(&mut data).unwrap();
std::str::from_utf8(&data).expect("String invalid UTF-8").to_owned()
}
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let len = u64::decode(read).try_into().unwrap();
let mut data = vec![0u8; len];
read.read_exact(&mut data).unwrap();
std::str::from_utf8(&data).expect("String invalid UTF-8").to_owned()
}
}
impl Encode for String {
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
u64::try_from(self.len()).unwrap().encode(write);
write.write_all(self.as_bytes()).unwrap()
}
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
u64::try_from(self.len()).unwrap().encode(write);
write.write_all(self.as_bytes()).unwrap()
}
}
impl Encode for str {
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
u64::try_from(self.len()).unwrap().encode(write);
write.write_all(self.as_bytes()).unwrap()
}
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
u64::try_from(self.len()).unwrap().encode(write);
write.write_all(self.as_bytes()).unwrap()
}
}
impl<T: Decode> Decode for Vec<T> {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let len = u64::decode(read).try_into().unwrap();
iter::repeat_with(|| T::decode(read)).take(len).collect()
}
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let len = u64::decode(read).try_into().unwrap();
iter::repeat_with(|| T::decode(read)).take(len).collect()
}
}
impl<T: Encode> Encode for Vec<T> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
u64::try_from(self.len()).unwrap().encode(write);
self.iter().for_each(|t| t.encode(write));
}
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
u64::try_from(self.len()).unwrap().encode(write);
self.iter().for_each(|t| t.encode(write));
}
}
impl<T: Encode> Encode for [T] {
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
u64::try_from(self.len()).unwrap().encode(write);
self.iter().for_each(|t| t.encode(write));
}
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
u64::try_from(self.len()).unwrap().encode(write);
self.iter().for_each(|t| t.encode(write));
}
}
impl<T: Decode> Decode for Option<T> {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
match u8::decode(read) {
0 => None,
1 => Some(T::decode(read)),
x => panic!("{x} is not a valid option value"),
}
}
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
match u8::decode(read) {
0 => None,
1 => Some(T::decode(read)),
x => panic!("{x} is not a valid option value"),
}
}
}
impl<T: Encode> Encode for Option<T> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
let t = if let Some(t) = self { t } else { return 0u8.encode(write) };
1u8.encode(write);
t.encode(write);
}
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
let t = if let Some(t) = self { t } else { return 0u8.encode(write) };
1u8.encode(write);
t.encode(write);
}
}
impl<T: Decode, E: Decode> Decode for Result<T, E> {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
match u8::decode(read) {
0 => Self::Ok(T::decode(read)),
1 => Self::Err(E::decode(read)),
x => panic!("Invalid Result tag {x}"),
}
}
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
match u8::decode(read) {
0 => Self::Ok(T::decode(read)),
1 => Self::Err(E::decode(read)),
x => panic!("Invalid Result tag {x}"),
}
}
}
impl<T: Encode, E: Encode> Encode for Result<T, E> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
match self {
Ok(t) => encode_enum(write, 0, |w| t.encode(w)),
Err(e) => encode_enum(write, 1, |w| e.encode(w)),
}
}
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
match self {
Ok(t) => encode_enum(write, 0, |w| t.encode(w)),
Err(e) => encode_enum(write, 1, |w| e.encode(w)),
}
}
}
impl<K: Decode + Eq + Hash, V: Decode> Decode for HashMap<K, V> {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let len = u64::decode(read).try_into().unwrap();
iter::repeat_with(|| <(K, V)>::decode(read)).take(len).collect()
}
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let len = u64::decode(read).try_into().unwrap();
iter::repeat_with(|| <(K, V)>::decode(read)).take(len).collect()
}
}
impl<K: Encode + Eq + Hash, V: Encode> Encode for HashMap<K, V> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
u64::try_from(self.len()).unwrap().encode(write);
self.iter().for_each(|pair| pair.encode(write));
}
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
u64::try_from(self.len()).unwrap().encode(write);
self.iter().for_each(|pair| pair.encode(write));
}
}
macro_rules! tuple {
(($($t:ident)*) ($($T:ident)*)) => {
@@ -216,40 +216,40 @@ tuple!((t u v x y z a b c d e f g h i) (T U V X Y Z A B C D E F G H I));
tuple!((t u v x y z a b c d e f g h i j) (T U V X Y Z A B C D E F G H I J)); // 16
impl Decode for () {
fn decode<R: Read + ?Sized>(_: &mut R) -> Self {}
fn decode<R: Read + ?Sized>(_: &mut R) -> Self {}
}
impl Encode for () {
fn encode<W: Write + ?Sized>(&self, _: &mut W) {}
fn encode<W: Write + ?Sized>(&self, _: &mut W) {}
}
impl Decode for Never {
fn decode<R: Read + ?Sized>(_: &mut R) -> Self {
unreachable!("A value of Never cannot exist so it can't have been serialized");
}
fn decode<R: Read + ?Sized>(_: &mut R) -> Self {
unreachable!("A value of Never cannot exist so it can't have been serialized");
}
}
impl Encode for Never {
fn encode<W: Write + ?Sized>(&self, _: &mut W) { match *self {} }
fn encode<W: Write + ?Sized>(&self, _: &mut W) { match *self {} }
}
impl Decode for bool {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let mut buf = [0];
read.read_exact(&mut buf).unwrap();
buf[0] != 0
}
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
let mut buf = [0];
read.read_exact(&mut buf).unwrap();
buf[0] != 0
}
}
impl Encode for bool {
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
write.write_all(&[if *self { 0xff } else { 0 }]).unwrap()
}
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
write.write_all(&[if *self { 0xff } else { 0 }]).unwrap()
}
}
impl<T: Decode, const N: usize> Decode for [T; N] {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
// TODO: figure out how to do this in safe rust on the stack
((0..N).map(|_| T::decode(read)).collect::<Vec<_>>().try_into())
.unwrap_or_else(|_| unreachable!("The length of this iterator is statically known"))
}
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
// TODO: figure out how to do this in safe rust on the stack
((0..N).map(|_| T::decode(read)).collect::<Vec<_>>().try_into())
.unwrap_or_else(|_| unreachable!("The length of this iterator is statically known"))
}
}
impl<T: Encode, const N: usize> Encode for [T; N] {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { self.iter().for_each(|t| t.encode(write)) }
fn encode<W: Write + ?Sized>(&self, write: &mut W) { self.iter().for_each(|t| t.encode(write)) }
}
macro_rules! two_end_range {
@@ -271,14 +271,14 @@ two_end_range!(x, Range, .., x.start, x.end);
two_end_range!(x, RangeInclusive, ..=, x.start(), x.end());
macro_rules! smart_ptr {
($name:tt) => {
impl<T: Decode> Decode for $name<T> {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { $name::new(T::decode(read)) }
}
impl<T: Encode> Encode for $name<T> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { (**self).encode(write) }
}
};
($name:tt) => {
impl<T: Decode> Decode for $name<T> {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { $name::new(T::decode(read)) }
}
impl<T: Encode> Encode for $name<T> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { (**self).encode(write) }
}
};
}
smart_ptr!(Arc);
@@ -288,15 +288,15 @@ smart_ptr!(Box);
impl<T: ?Sized + ToOwned> Decode for Cow<'_, T>
where T::Owned: Decode
{
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { Cow::Owned(T::Owned::decode(read)) }
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { Cow::Owned(T::Owned::decode(read)) }
}
impl<T: ?Sized + Encode + ToOwned> Encode for Cow<'_, T> {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { (**self).encode(write) }
fn encode<W: Write + ?Sized>(&self, write: &mut W) { (**self).encode(write) }
}
impl Decode for char {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { char::from_u32(u32::decode(read)).unwrap() }
fn decode<R: Read + ?Sized>(read: &mut R) -> Self { char::from_u32(u32::decode(read)).unwrap() }
}
impl Encode for char {
fn encode<W: Write + ?Sized>(&self, write: &mut W) { (*self as u32).encode(write) }
fn encode<W: Write + ?Sized>(&self, write: &mut W) { (*self as u32).encode(write) }
}

View File

@@ -5,32 +5,32 @@ use itertools::{Chunk, Itertools};
use crate::Encode;
pub fn encode_enum<W: Write + ?Sized>(write: &mut W, id: u8, f: impl FnOnce(&mut W)) {
id.encode(write);
f(write)
id.encode(write);
f(write)
}
pub fn write_exact<W: Write + ?Sized>(write: &mut W, bytes: &'static [u8]) {
write.write_all(bytes).expect("Failed to write exact bytes")
write.write_all(bytes).expect("Failed to write exact bytes")
}
pub fn print_bytes(b: &[u8]) -> String {
(b.iter().map(|b| format!("{b:02x}")))
.chunks(4)
.into_iter()
.map(|mut c: Chunk<_>| c.join(" "))
.join(" ")
(b.iter().map(|b| format!("{b:02x}")))
.chunks(4)
.into_iter()
.map(|mut c: Chunk<_>| c.join(" "))
.join(" ")
}
pub fn read_exact<R: Read + ?Sized>(read: &mut R, bytes: &'static [u8]) {
let mut data = vec![0u8; bytes.len()];
read.read_exact(&mut data).expect("Failed to read bytes");
if data != bytes {
panic!("Wrong bytes!\nExpected: {}\nFound: {}", print_bytes(bytes), print_bytes(&data));
}
let mut data = vec![0u8; bytes.len()];
read.read_exact(&mut data).expect("Failed to read bytes");
if data != bytes {
panic!("Wrong bytes!\nExpected: {}\nFound: {}", print_bytes(bytes), print_bytes(&data));
}
}
pub fn enc_vec(enc: &impl Encode) -> Vec<u8> {
let mut vec = Vec::new();
enc.encode(&mut vec);
vec
let mut vec = Vec::new();
enc.encode(&mut vec);
vec
}

View File

@@ -11,54 +11,54 @@ impl TLBool for TLFalse {}
/// A type that implements [Hierarchy]. Used to select implementations of traits
/// on the hierarchy
pub trait InHierarchy: Clone {
/// Indicates that this hierarchy element is a leaf. Leaves can never have
/// children
type IsLeaf: TLBool;
/// Indicates that this hierarchy element is a root. Roots can never have
/// parents
type IsRoot: TLBool;
/// Indicates that this hierarchy element is a leaf. Leaves can never have
/// children
type IsLeaf: TLBool;
/// Indicates that this hierarchy element is a root. Roots can never have
/// parents
type IsRoot: TLBool;
}
/// A type that derives from a parent type.
pub trait Extends: InHierarchy<IsRoot = TLFalse> + Into<Self::Parent> {
/// Specify the immediate parent of this type. This guides the
type Parent: InHierarchy<IsLeaf = TLFalse>
+ TryInto<Self>
+ UnderRootImpl<<Self::Parent as InHierarchy>::IsRoot>;
/// Specify the immediate parent of this type. This guides the
type Parent: InHierarchy<IsLeaf = TLFalse>
+ TryInto<Self>
+ UnderRootImpl<<Self::Parent as InHierarchy>::IsRoot>;
}
pub trait UnderRootImpl<IsRoot: TLBool>: Sized {
type __Root: UnderRoot<IsRoot = TLTrue, Root = Self::__Root>;
fn __into_root(self) -> Self::__Root;
fn __try_from_root(root: Self::__Root) -> Result<Self, Self::__Root>;
type __Root: UnderRoot<IsRoot = TLTrue, Root = Self::__Root>;
fn __into_root(self) -> Self::__Root;
fn __try_from_root(root: Self::__Root) -> Result<Self, Self::__Root>;
}
pub trait UnderRoot: InHierarchy {
type Root: UnderRoot<IsRoot = TLTrue, Root = Self::Root>;
fn into_root(self) -> Self::Root;
fn try_from_root(root: Self::Root) -> Result<Self, Self::Root>;
type Root: UnderRoot<IsRoot = TLTrue, Root = Self::Root>;
fn into_root(self) -> Self::Root;
fn try_from_root(root: Self::Root) -> Result<Self, Self::Root>;
}
impl<T: InHierarchy + UnderRootImpl<T::IsRoot>> UnderRoot for T {
type Root = <Self as UnderRootImpl<<Self as InHierarchy>::IsRoot>>::__Root;
fn into_root(self) -> Self::Root { self.__into_root() }
fn try_from_root(root: Self::Root) -> Result<Self, Self::Root> { Self::__try_from_root(root) }
type Root = <Self as UnderRootImpl<<Self as InHierarchy>::IsRoot>>::__Root;
fn into_root(self) -> Self::Root { self.__into_root() }
fn try_from_root(root: Self::Root) -> Result<Self, Self::Root> { Self::__try_from_root(root) }
}
impl<T: InHierarchy<IsRoot = TLTrue>> UnderRootImpl<TLTrue> for T {
type __Root = Self;
fn __into_root(self) -> Self::__Root { self }
fn __try_from_root(root: Self::__Root) -> Result<Self, Self::__Root> { Ok(root) }
type __Root = Self;
fn __into_root(self) -> Self::__Root { self }
fn __try_from_root(root: Self::__Root) -> Result<Self, Self::__Root> { Ok(root) }
}
impl<T: InHierarchy<IsRoot = TLFalse> + Extends> UnderRootImpl<TLFalse> for T {
type __Root = <<Self as Extends>::Parent as UnderRootImpl<
<<Self as Extends>::Parent as InHierarchy>::IsRoot,
>>::__Root;
fn __into_root(self) -> Self::__Root {
<Self as Into<<Self as Extends>::Parent>>::into(self).into_root()
}
fn __try_from_root(root: Self::__Root) -> Result<Self, Self::__Root> {
let parent = <Self as Extends>::Parent::try_from_root(root)?;
parent.clone().try_into().map_err(|_| parent.into_root())
}
type __Root = <<Self as Extends>::Parent as UnderRootImpl<
<<Self as Extends>::Parent as InHierarchy>::IsRoot,
>>::__Root;
fn __into_root(self) -> Self::__Root {
<Self as Into<<Self as Extends>::Parent>>::into(self).into_root()
}
fn __try_from_root(root: Self::__Root) -> Result<Self, Self::__Root> {
let parent = <Self as Extends>::Parent::try_from_root(root)?;
parent.clone().try_into().map_err(|_| parent.into_root())
}
}

View File

@@ -2,20 +2,20 @@ use super::coding::Coding;
use crate::helpers::enc_vec;
pub trait Request: Coding + Sized + Send + 'static {
type Response: Coding + Send + 'static;
type Response: Coding + Send + 'static;
}
pub fn respond<R: Request>(_: &R, rep: R::Response) -> Vec<u8> { enc_vec(&rep) }
pub fn respond_with<R: Request>(r: &R, f: impl FnOnce(&R) -> R::Response) -> Vec<u8> {
respond(r, f(r))
respond(r, f(r))
}
pub trait Channel: 'static {
type Req: Coding + Sized + Send + 'static;
type Notif: Coding + Sized + Send + 'static;
type Req: Coding + Sized + Send + 'static;
type Notif: Coding + Sized + Send + 'static;
}
pub trait MsgSet: Send + Sync + 'static {
type In: Channel;
type Out: Channel;
type In: Channel;
type Out: Channel;
}

View File

@@ -6,6 +6,6 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
ordered-float = "4.2.0"
ordered-float = "4.6.0"
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }

View File

@@ -3,7 +3,9 @@ use std::num::NonZeroU64;
use orchid_api_derive::{Coding, Hierarchy};
use orchid_api_traits::Request;
use crate::{ExprTicket, Expression, ExtHostReq, HostExtNotif, HostExtReq, OrcResult, SysId, TStrv};
use crate::{
ExprTicket, Expression, ExtHostReq, HostExtNotif, HostExtReq, OrcResult, SysId, TStrv,
};
pub type AtomData = Vec<u8>;
@@ -15,34 +17,34 @@ pub struct AtomId(pub NonZeroU64);
/// This has the same semantics as [Atom] except in that the owner is implied.
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding)]
pub struct LocalAtom {
pub drop: Option<AtomId>,
pub data: AtomData,
pub drop: Option<AtomId>,
pub data: AtomData,
}
impl LocalAtom {
pub fn associate(self, owner: SysId) -> Atom { Atom { owner, drop: self.drop, data: self.data } }
pub fn associate(self, owner: SysId) -> Atom { Atom { owner, drop: self.drop, data: self.data } }
}
/// An atom representation that can be serialized and sent around. Atoms
/// represent the smallest increment of work.
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding)]
pub struct Atom {
/// Instance ID of the system that created the atom
pub owner: SysId,
/// Indicates how the owner should be notified when this atom is dropped.
/// Construction is always explicit and atoms are never cloned.
///
/// Atoms with `drop == None` are also known as trivial, they can be
/// duplicated and stored with no regard to expression lifetimes. NOTICE
/// that this only applies to the atom. If it's referenced with an
/// [ExprTicket], the ticket itself can still expire.
///
/// Notice also that the atoms still expire when the system is dropped, and
/// are not portable across instances of the same system, so this doesn't
/// imply that the atom is serializable.
pub drop: Option<AtomId>,
/// Data stored in the atom. This could be a key into a map, or the raw data
/// of the atom if it isn't too big.
pub data: AtomData,
/// Instance ID of the system that created the atom
pub owner: SysId,
/// Indicates how the owner should be notified when this atom is dropped.
/// Construction is always explicit and atoms are never cloned.
///
/// Atoms with `drop == None` are also known as trivial, they can be
/// duplicated and stored with no regard to expression lifetimes. NOTICE
/// that this only applies to the atom. If it's referenced with an
/// [ExprTicket], the ticket itself can still expire.
///
/// Notice also that the atoms still expire when the system is dropped, and
/// are not portable across instances of the same system, so this doesn't
/// imply that the atom is serializable.
pub drop: Option<AtomId>,
/// Data stored in the atom. This could be a key into a map, or the raw data
/// of the atom if it isn't too big.
pub data: AtomData,
}
/// Attempt to apply an atom as a function to an expression
@@ -50,7 +52,7 @@ pub struct Atom {
#[extends(AtomReq, HostExtReq)]
pub struct CallRef(pub Atom, pub ExprTicket);
impl Request for CallRef {
type Response = Expression;
type Response = Expression;
}
/// Attempt to apply an atom as a function, consuming the atom and enabling the
@@ -60,21 +62,21 @@ impl Request for CallRef {
#[extends(AtomReq, HostExtReq)]
pub struct FinalCall(pub Atom, pub ExprTicket);
impl Request for FinalCall {
type Response = Expression;
type Response = Expression;
}
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(AtomReq, HostExtReq)]
pub struct SerializeAtom(pub Atom);
impl Request for SerializeAtom {
type Response = Option<(Vec<u8>, Vec<ExprTicket>)>;
type Response = Option<(Vec<u8>, Vec<ExprTicket>)>;
}
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(HostExtReq)]
pub struct DeserAtom(pub SysId, pub Vec<u8>, pub Vec<ExprTicket>);
impl Request for DeserAtom {
type Response = Atom;
type Response = Atom;
}
/// A request blindly routed to the system that provides an atom.
@@ -82,26 +84,26 @@ impl Request for DeserAtom {
#[extends(AtomReq, HostExtReq)]
pub struct Fwded(pub Atom, pub TStrv, pub Vec<u8>);
impl Request for Fwded {
type Response = Option<Vec<u8>>;
type Response = Option<Vec<u8>>;
}
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(ExtHostReq)]
pub struct Fwd(pub Atom, pub TStrv, pub Vec<u8>);
impl Request for Fwd {
type Response = Option<Vec<u8>>;
type Response = Option<Vec<u8>>;
}
#[derive(Clone, Debug, Coding)]
pub enum NextStep {
Continue(Expression),
Halt,
Continue(Expression),
Halt,
}
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(AtomReq, HostExtReq)]
pub struct Command(pub Atom);
impl Request for Command {
type Response = OrcResult<NextStep>;
type Response = OrcResult<NextStep>;
}
/// Notification that an atom is being dropped because its associated expression
@@ -115,7 +117,7 @@ pub struct AtomDrop(pub SysId, pub AtomId);
#[extends(AtomReq, HostExtReq)]
pub struct AtomPrint(pub Atom);
impl Request for AtomPrint {
type Response = String;
type Response = String;
}
/// Requests that apply to an existing atom instance
@@ -123,24 +125,24 @@ impl Request for AtomPrint {
#[extends(HostExtReq)]
#[extendable]
pub enum AtomReq {
CallRef(CallRef),
FinalCall(FinalCall),
Fwded(Fwded),
Command(Command),
AtomPrint(AtomPrint),
SerializeAtom(SerializeAtom),
CallRef(CallRef),
FinalCall(FinalCall),
Fwded(Fwded),
Command(Command),
AtomPrint(AtomPrint),
SerializeAtom(SerializeAtom),
}
impl AtomReq {
/// Obtain the first [Atom] argument of the request. All requests in this
/// subclass have at least one atom argument.
pub fn get_atom(&self) -> &Atom {
match self {
Self::CallRef(CallRef(a, ..))
| Self::Command(Command(a))
| Self::FinalCall(FinalCall(a, ..))
| Self::Fwded(Fwded(a, ..))
| Self::AtomPrint(AtomPrint(a))
| Self::SerializeAtom(SerializeAtom(a)) => a,
}
}
/// Obtain the first [Atom] argument of the request. All requests in this
/// subclass have at least one atom argument.
pub fn get_atom(&self) -> &Atom {
match self {
Self::CallRef(CallRef(a, ..))
| Self::Command(Command(a))
| Self::FinalCall(FinalCall(a, ..))
| Self::Fwded(Fwded(a, ..))
| Self::AtomPrint(AtomPrint(a))
| Self::SerializeAtom(SerializeAtom(a)) => a,
}
}
}

View File

@@ -10,11 +10,11 @@ pub struct ErrId(pub NonZeroU16);
#[derive(Clone, Debug, Coding)]
pub struct ErrLocation {
/// Description of the relation of this location to the error. If not used,
/// set to empty string
pub message: Arc<String>,
/// Location in code where the error emerged. This is usually [Location::Gen].
pub location: Location,
/// Description of the relation of this location to the error. If not used,
/// set to empty string
pub message: Arc<String>,
/// Location in code where the error emerged. This is usually [Location::Gen].
pub location: Location,
}
/// Programming errors raised by extensions. At runtime these produce the
@@ -24,14 +24,14 @@ pub struct ErrLocation {
/// and a bottom if the file name isn't a string.
#[derive(Clone, Debug, Coding)]
pub struct OrcError {
/// General description of the kind of error.
pub description: TStr,
/// Specific information about the exact error, preferably containing concrete
/// values.
pub message: Arc<String>,
/// Specific code fragments that have contributed to the emergence of the
/// error.
pub locations: Vec<ErrLocation>,
/// General description of the kind of error.
pub description: TStr,
/// Specific information about the exact error, preferably containing concrete
/// values.
pub message: Arc<String>,
/// Specific code fragments that have contributed to the emergence of the
/// error.
pub locations: Vec<ErrLocation>,
}
/// If this is an [`Err`] then the [`Vec`] must not be empty.

View File

@@ -43,9 +43,9 @@ pub struct Release(pub SysId, pub ExprTicket);
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(ExprNotif, ExtHostNotif)]
pub struct Move {
pub dec: SysId,
pub inc: SysId,
pub expr: ExprTicket,
pub dec: SysId,
pub inc: SysId,
pub expr: ExprTicket,
}
/// A description of a new expression. It is used as the return value of
@@ -53,48 +53,48 @@ pub struct Move {
/// [crate::tree::Tree].
#[derive(Clone, Debug, Coding)]
pub enum ExpressionKind {
/// Apply the lhs as a function to the rhs
Call(Box<Expression>, Box<Expression>),
/// Lambda function. The number operates as an argument name
Lambda(u64, Box<Expression>),
/// Binds the argument passed to the lambda with the same ID in the same
/// template
Arg(u64),
/// Insert the specified host-expression in the template here. When the clause
/// is used in the const tree, this variant is forbidden.
Slot(ExprTicket),
/// The lhs must be fully processed before the rhs can be processed.
/// Equivalent to Haskell's function of the same name
Seq(Box<Expression>, Box<Expression>),
/// Insert a new atom in the tree. When the clause is used in the const tree,
/// the atom must be trivial. This is always a newly constructed atom, if you
/// want to reference an existing atom, use the corresponding [ExprTicket].
/// Because the atom is newly constructed, it also must belong to this system.
NewAtom(Atom),
/// A reference to a constant
Const(TStrv),
/// A static runtime error.
Bottom(Vec<OrcError>),
/// Apply the lhs as a function to the rhs
Call(Box<Expression>, Box<Expression>),
/// Lambda function. The number operates as an argument name
Lambda(u64, Box<Expression>),
/// Binds the argument passed to the lambda with the same ID in the same
/// template
Arg(u64),
/// Insert the specified host-expression in the template here. When the clause
/// is used in the const tree, this variant is forbidden.
Slot(ExprTicket),
/// The lhs must be fully processed before the rhs can be processed.
/// Equivalent to Haskell's function of the same name
Seq(Box<Expression>, Box<Expression>),
/// Insert a new atom in the tree. When the clause is used in the const tree,
/// the atom must be trivial. This is always a newly constructed atom, if you
/// want to reference an existing atom, use the corresponding [ExprTicket].
/// Because the atom is newly constructed, it also must belong to this system.
NewAtom(Atom),
/// A reference to a constant
Const(TStrv),
/// A static runtime error.
Bottom(Vec<OrcError>),
}
#[derive(Clone, Debug, Coding)]
pub struct Expression {
pub kind: ExpressionKind,
pub location: Location,
pub kind: ExpressionKind,
pub location: Location,
}
#[derive(Clone, Debug, Coding)]
pub enum InspectedKind {
Atom(Atom),
Bottom(Vec<OrcError>),
Opaque,
Atom(Atom),
Bottom(Vec<OrcError>),
Opaque,
}
#[derive(Clone, Debug, Coding)]
pub struct Inspected {
pub kind: InspectedKind,
pub location: Location,
pub refcount: u32,
pub kind: InspectedKind,
pub location: Location,
pub refcount: u32,
}
/// Obtain information about an expression. Used to act upon arguments by
@@ -103,24 +103,24 @@ pub struct Inspected {
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(ExprReq, ExtHostReq)]
pub struct Inspect {
pub target: ExprTicket,
pub target: ExprTicket,
}
impl Request for Inspect {
type Response = Inspected;
type Response = Inspected;
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(ExtHostReq)]
#[extendable]
pub enum ExprReq {
Inspect(Inspect),
Inspect(Inspect),
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(ExtHostNotif)]
#[extendable]
pub enum ExprNotif {
Acquire(Acquire),
Release(Release),
Move(Move),
Acquire(Acquire),
Release(Release),
Move(Move),
}

View File

@@ -12,10 +12,10 @@ use crate::{ExtHostReq, HostExtReq};
#[extends(ExtHostReq)]
#[extendable]
pub enum IntReq {
InternStr(InternStr),
InternStrv(InternStrv),
ExternStr(ExternStr),
ExternStrv(ExternStrv),
InternStr(InternStr),
InternStrv(InternStrv),
ExternStr(ExternStr),
ExternStrv(ExternStrv),
}
/// replica -> master to intern a string on the master. Repeatable.
@@ -25,7 +25,7 @@ pub enum IntReq {
#[extends(IntReq, ExtHostReq)]
pub struct InternStr(pub Arc<String>);
impl Request for InternStr {
type Response = TStr;
type Response = TStr;
}
/// replica -> master to find the interned string corresponding to a key.
@@ -37,7 +37,7 @@ impl Request for InternStr {
#[extends(IntReq, ExtHostReq)]
pub struct ExternStr(pub TStr);
impl Request for ExternStr {
type Response = Arc<String>;
type Response = Arc<String>;
}
/// replica -> master to intern a vector of interned strings
///
@@ -48,7 +48,7 @@ impl Request for ExternStr {
#[extends(IntReq, ExtHostReq)]
pub struct InternStrv(pub Arc<Vec<TStr>>);
impl Request for InternStrv {
type Response = TStrv;
type Response = TStrv;
}
/// replica -> master to find the vector of interned strings corresponding to a
/// token
@@ -60,7 +60,7 @@ impl Request for InternStrv {
#[extends(IntReq, ExtHostReq)]
pub struct ExternStrv(pub TStrv);
impl Request for ExternStrv {
type Response = Arc<Vec<TStr>>;
type Response = Arc<Vec<TStr>>;
}
/// A substitute for an interned string in serialized datastructures.
@@ -77,13 +77,13 @@ pub struct TStrv(pub NonZeroU64);
#[extends(HostExtReq)]
pub struct Sweep;
impl Request for Sweep {
type Response = Retained;
type Response = Retained;
}
/// List of keys in this replica that couldn't be sweeped because local
/// datastructures reference their value.
#[derive(Clone, Debug, Coding)]
pub struct Retained {
pub strings: Vec<TStr>,
pub vecs: Vec<TStrv>,
pub strings: Vec<TStr>,
pub vecs: Vec<TStrv>,
}

View File

@@ -14,33 +14,33 @@ pub struct CharFilter(pub Vec<RangeInclusive<char>>);
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(HostExtReq)]
pub struct LexExpr {
pub sys: SysId,
pub id: ParsId,
pub text: TStr,
pub pos: u32,
pub sys: SysId,
pub id: ParsId,
pub text: TStr,
pub pos: u32,
}
impl Request for LexExpr {
type Response = Option<OrcResult<LexedExpr>>;
type Response = Option<OrcResult<LexedExpr>>;
}
#[derive(Clone, Debug, Coding)]
pub struct LexedExpr {
pub pos: u32,
pub expr: TokenTree,
pub pos: u32,
pub expr: TokenTree,
}
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(ExtHostReq)]
pub struct SubLex {
pub id: ParsId,
pub pos: u32,
pub id: ParsId,
pub pos: u32,
}
impl Request for SubLex {
type Response = Option<SubLexed>;
type Response = Option<SubLexed>;
}
#[derive(Clone, Debug, Coding)]
pub struct SubLexed {
pub pos: u32,
pub ticket: TreeTicket,
pub pos: u32,
pub ticket: TreeTicket,
}

View File

@@ -6,29 +6,29 @@ use crate::{TStr, TStrv};
#[derive(Clone, Debug, Coding)]
pub enum Location {
/// Location inaccessible. Locations are always debugging aids and never
/// mandatory.
None,
/// Associated with a slot when wrapped in an expression.
SlotTarget,
/// Used in functions to denote the generated code that carries on the
/// location of the call.
Inherit,
Gen(CodeGenInfo),
/// Range and file
SourceRange(SourceRange),
/// Range only, file implied. Most notably used by parsers
Range(Range<u32>),
/// Location inaccessible. Locations are always debugging aids and never
/// mandatory.
None,
/// Associated with a slot when wrapped in an expression.
SlotTarget,
/// Used in functions to denote the generated code that carries on the
/// location of the call.
Inherit,
Gen(CodeGenInfo),
/// Range and file
SourceRange(SourceRange),
/// Range only, file implied. Most notably used by parsers
Range(Range<u32>),
}
#[derive(Clone, Debug, Coding)]
pub struct SourceRange {
pub path: TStrv,
pub range: Range<u32>,
pub path: TStrv,
pub range: Range<u32>,
}
#[derive(Clone, Debug, Coding)]
pub struct CodeGenInfo {
pub generator: TStrv,
pub details: TStr,
pub generator: TStrv,
pub details: TStr,
}

View File

@@ -4,8 +4,8 @@ use crate::ExtHostNotif;
#[derive(Clone, Debug, Coding)]
pub enum LogStrategy {
StdErr,
File(String),
StdErr,
File(String),
}
#[derive(Clone, Debug, Coding, Hierarchy)]

View File

@@ -5,79 +5,82 @@ use orchid_api_derive::{Coding, Hierarchy};
use orchid_api_traits::Request;
use ordered_float::NotNan;
use crate::{Atom, Comment, ExtHostReq, HostExtReq, Location, OrcResult, Paren, ParsId, SysId, TStr, TStrv};
use crate::{
Atom, Comment, ExtHostReq, HostExtReq, Location, OrcResult, Paren, ParsId, SysId, TStr, TStrv,
};
#[derive(Clone, Copy, Debug, Coding, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MacroTreeId(pub NonZeroU64);
#[derive(Clone, Debug, Coding)]
pub struct MacroTree {
pub location: Location,
pub token: MacroToken,
pub location: Location,
pub token: MacroToken,
}
#[derive(Clone, Debug, Coding)]
pub enum MacroToken {
S(Paren, Vec<MacroTree>),
Name(TStrv),
Slot(MacroTreeId),
Lambda(Vec<MacroTree>, Vec<MacroTree>),
Ph(Placeholder),
Atom(Atom),
S(Paren, Vec<MacroTree>),
Name(TStrv),
Slot(MacroTreeId),
Lambda(Vec<MacroTree>, Vec<MacroTree>),
Ph(Placeholder),
Atom(Atom),
}
#[derive(Clone, Debug, Coding)]
pub struct MacroBlock {
pub priority: Option<NotNan<f64>>,
pub rules: Vec<MacroRule>,
pub priority: Option<NotNan<f64>>,
pub rules: Vec<MacroRule>,
}
#[derive(Clone, Debug, Coding)]
pub struct MacroRule {
pub location: Location,
pub comments: Vec<Comment>,
pub pattern: Vec<MacroTree>,
pub id: MacroId,
pub location: Location,
pub comments: Vec<Comment>,
pub pattern: Vec<MacroTree>,
pub id: MacroId,
}
/// A specific macro rule with a specific pattern across invocations
#[derive(Clone, Copy, Debug, Coding, PartialEq, Eq, Hash)]
pub struct MacroId(pub NonZeroU64);
/// After a pattern matches, this call executes the body of the macro. This request returns None
/// if an inner nested request raised an exception
/// After a pattern matches, this call executes the body of the macro. This
/// request returns None if an inner nested request raised an exception
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(HostExtReq)]
pub struct ApplyMacro {
pub sys: SysId,
pub id: MacroId,
/// Recursion token
pub run_id: ParsId,
/// Must contain exactly the keys that were specified as placeholders in the pattern
pub params: HashMap<TStr, Vec<MacroTree>>,
pub sys: SysId,
pub id: MacroId,
/// Recursion token
pub run_id: ParsId,
/// Must contain exactly the keys that were specified as placeholders in the
/// pattern
pub params: HashMap<TStr, Vec<MacroTree>>,
}
impl Request for ApplyMacro {
type Response = Option<OrcResult<Vec<MacroTree>>>;
type Response = Option<OrcResult<Vec<MacroTree>>>;
}
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(ExtHostReq)]
pub struct RunMacros {
pub run_id: ParsId,
pub query: Vec<MacroTree>,
pub run_id: ParsId,
pub query: Vec<MacroTree>,
}
impl Request for RunMacros {
type Response = Option<Vec<MacroTree>>;
type Response = Option<Vec<MacroTree>>;
}
#[derive(Clone, Debug, Coding)]
pub struct Placeholder {
pub name: TStr,
pub kind: PhKind,
pub name: TStr,
pub kind: PhKind,
}
#[derive(Clone, Copy, Debug, Coding)]
pub enum PhKind {
Scalar,
Vector { priority: u8, at_least_one: bool },
Scalar,
Vector { priority: u8, at_least_one: bool },
}

View File

@@ -11,11 +11,11 @@ pub struct ParsId(pub NonZeroU64);
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(HostExtReq)]
pub struct ParseLine {
pub sys: SysId,
pub comments: Vec<Comment>,
pub exported: bool,
pub line: Vec<TokenTree>,
pub sys: SysId,
pub comments: Vec<Comment>,
pub exported: bool,
pub line: Vec<TokenTree>,
}
impl Request for ParseLine {
type Response = OrcResult<Vec<TokenTree>>;
type Response = OrcResult<Vec<TokenTree>>;
}

View File

@@ -25,63 +25,63 @@
use std::io::{Read, Write};
use orchid_api_derive::{Coding, Hierarchy};
use orchid_api_traits::{read_exact, write_exact, Channel, Decode, Encode, MsgSet, Request};
use orchid_api_traits::{Channel, Decode, Encode, MsgSet, Request, read_exact, write_exact};
use crate::{atom, expr, interner, lexer, logging, macros, parser, system, tree, vfs};
static HOST_INTRO: &[u8] = b"Orchid host, binary API v0\n";
pub struct HostHeader {
pub log_strategy: logging::LogStrategy,
pub log_strategy: logging::LogStrategy,
}
impl Decode for HostHeader {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
read_exact(read, HOST_INTRO);
Self { log_strategy: logging::LogStrategy::decode(read) }
}
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
read_exact(read, HOST_INTRO);
Self { log_strategy: logging::LogStrategy::decode(read) }
}
}
impl Encode for HostHeader {
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
write_exact(write, HOST_INTRO);
self.log_strategy.encode(write)
}
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
write_exact(write, HOST_INTRO);
self.log_strategy.encode(write)
}
}
static EXT_INTRO: &[u8] = b"Orchid extension, binary API v0\n";
pub struct ExtensionHeader {
pub name: String,
pub systems: Vec<system::SystemDecl>,
pub name: String,
pub systems: Vec<system::SystemDecl>,
}
impl Decode for ExtensionHeader {
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
read_exact(read, EXT_INTRO);
Self { name: String::decode(read), systems: Vec::decode(read) }
}
fn decode<R: Read + ?Sized>(read: &mut R) -> Self {
read_exact(read, EXT_INTRO);
Self { name: String::decode(read), systems: Vec::decode(read) }
}
}
impl Encode for ExtensionHeader {
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
write_exact(write, EXT_INTRO);
self.name.encode(write);
self.systems.encode(write)
}
fn encode<W: Write + ?Sized>(&self, write: &mut W) {
write_exact(write, EXT_INTRO);
self.name.encode(write);
self.systems.encode(write)
}
}
#[derive(Clone, Debug, Copy, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
pub struct Ping;
impl Request for Ping {
type Response = ();
type Response = ();
}
/// Requests running from the extension to the host
#[derive(Clone, Coding, Hierarchy)]
#[extendable]
pub enum ExtHostReq {
Ping(Ping),
IntReq(interner::IntReq),
Fwd(atom::Fwd),
SysFwd(system::SysFwd),
ExprReq(expr::ExprReq),
SubLex(lexer::SubLex),
RunMacros(macros::RunMacros),
Ping(Ping),
IntReq(interner::IntReq),
Fwd(atom::Fwd),
SysFwd(system::SysFwd),
ExprReq(expr::ExprReq),
SubLex(lexer::SubLex),
RunMacros(macros::RunMacros),
}
/// Notifications sent from the extension to the host
@@ -89,93 +89,93 @@ pub enum ExtHostReq {
#[derive(Debug, Clone, Coding, Hierarchy)]
#[extendable]
pub enum ExtHostNotif {
ExprNotif(expr::ExprNotif),
Log(logging::Log),
ExprNotif(expr::ExprNotif),
Log(logging::Log),
}
pub struct ExtHostChannel;
impl Channel for ExtHostChannel {
type Notif = ExtHostNotif;
type Req = ExtHostReq;
type Notif = ExtHostNotif;
type Req = ExtHostReq;
}
/// Requests running from the host to the extension
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extendable]
pub enum HostExtReq {
Ping(Ping),
SysReq(system::SysReq),
Sweep(interner::Sweep),
AtomReq(atom::AtomReq),
DeserAtom(atom::DeserAtom),
LexExpr(lexer::LexExpr),
ParseLine(parser::ParseLine),
GetMember(tree::GetMember),
VfsReq(vfs::VfsReq),
ApplyMacro(macros::ApplyMacro),
Ping(Ping),
SysReq(system::SysReq),
Sweep(interner::Sweep),
AtomReq(atom::AtomReq),
DeserAtom(atom::DeserAtom),
LexExpr(lexer::LexExpr),
ParseLine(parser::ParseLine),
GetMember(tree::GetMember),
VfsReq(vfs::VfsReq),
ApplyMacro(macros::ApplyMacro),
}
/// Notifications sent from the host to the extension
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extendable]
pub enum HostExtNotif {
SystemDrop(system::SystemDrop),
AtomDrop(atom::AtomDrop),
/// The host can assume that after this notif is sent, a correctly written
/// extension will eventually exit.
Exit,
SystemDrop(system::SystemDrop),
AtomDrop(atom::AtomDrop),
/// The host can assume that after this notif is sent, a correctly written
/// extension will eventually exit.
Exit,
}
pub struct HostExtChannel;
impl Channel for HostExtChannel {
type Notif = HostExtNotif;
type Req = HostExtReq;
type Notif = HostExtNotif;
type Req = HostExtReq;
}
/// Message set viewed from the extension's perspective
pub struct ExtMsgSet;
impl MsgSet for ExtMsgSet {
type In = HostExtChannel;
type Out = ExtHostChannel;
type In = HostExtChannel;
type Out = ExtHostChannel;
}
/// Message Set viewed from the host's perspective
pub struct HostMsgSet;
impl MsgSet for HostMsgSet {
type In = ExtHostChannel;
type Out = HostExtChannel;
type In = ExtHostChannel;
type Out = HostExtChannel;
}
#[cfg(test)]
mod tests {
use orchid_api_traits::enc_vec;
use ordered_float::NotNan;
use orchid_api_traits::enc_vec;
use ordered_float::NotNan;
use super::*;
use super::*;
#[test]
fn host_header_enc() {
let hh = HostHeader { log_strategy: logging::LogStrategy::File("SomeFile".to_string()) };
let mut enc = &enc_vec(&hh)[..];
eprintln!("Encoded to {enc:?}");
HostHeader::decode(&mut enc);
assert_eq!(enc, []);
}
#[test]
fn host_header_enc() {
let hh = HostHeader { log_strategy: logging::LogStrategy::File("SomeFile".to_string()) };
let mut enc = &enc_vec(&hh)[..];
eprintln!("Encoded to {enc:?}");
HostHeader::decode(&mut enc);
assert_eq!(enc, []);
}
#[test]
fn ext_header_enc() {
let eh = ExtensionHeader {
name: "my_extension".to_string(),
systems: vec![system::SystemDecl {
id: system::SysDeclId(1.try_into().unwrap()),
name: "misc".to_string(),
depends: vec!["std".to_string()],
priority: NotNan::new(1f64).unwrap(),
}],
};
let mut enc = &enc_vec(&eh)[..];
eprintln!("Encoded to {enc:?}");
ExtensionHeader::decode(&mut enc);
assert_eq!(enc, [])
}
#[test]
fn ext_header_enc() {
let eh = ExtensionHeader {
name: "my_extension".to_string(),
systems: vec![system::SystemDecl {
id: system::SysDeclId(1.try_into().unwrap()),
name: "misc".to_string(),
depends: vec!["std".to_string()],
priority: NotNan::new(1f64).unwrap(),
}],
};
let mut enc = &enc_vec(&eh)[..];
eprintln!("Encoded to {enc:?}");
ExtensionHeader::decode(&mut enc);
assert_eq!(enc, [])
}
}

View File

@@ -19,21 +19,21 @@ pub struct SysId(pub NonZeroU16);
/// extension header, so it cannot rely on the interner.
#[derive(Debug, Clone, Coding)]
pub struct SystemDecl {
/// ID of the system, unique within the library
pub id: SysDeclId,
/// This can be depended upon. Exactly one of each kind will be loaded
pub name: String,
/// If multiple instances of a system are found, the highest priority will be
/// used. This can be used for version counting, but also for fallbacks if a
/// negative number is found.
///
/// Systems cannot depend on specific versions and older versions of systems
/// are never loaded. Compatibility can be determined on a per-system basis
/// through an algorithm chosen by the provider.
pub priority: NotNan<f64>,
/// List of systems needed for this one to work correctly. These will be
/// looked up, and an error produced if they aren't found.
pub depends: Vec<String>,
/// ID of the system, unique within the library
pub id: SysDeclId,
/// This can be depended upon. Exactly one of each kind will be loaded
pub name: String,
/// If multiple instances of a system are found, the highest priority will be
/// used. This can be used for version counting, but also for fallbacks if a
/// negative number is found.
///
/// Systems cannot depend on specific versions and older versions of systems
/// are never loaded. Compatibility can be determined on a per-system basis
/// through an algorithm chosen by the provider.
pub priority: NotNan<f64>,
/// List of systems needed for this one to work correctly. These will be
/// looked up, and an error produced if they aren't found.
pub depends: Vec<String>,
}
/// Host -> extension; instantiate a system according to its [SystemDecl].
@@ -43,26 +43,26 @@ pub struct SystemDecl {
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(SysReq, HostExtReq)]
pub struct NewSystem {
/// ID of the system
pub system: SysDeclId,
/// ID of the system instance, unique for the host
pub id: SysId,
/// Instance IDs for dependencies, in the order that the names appear in the
/// declaration
pub depends: Vec<SysId>,
/// ID of the system
pub system: SysDeclId,
/// ID of the system instance, unique for the host
pub id: SysId,
/// Instance IDs for dependencies, in the order that the names appear in the
/// declaration
pub depends: Vec<SysId>,
}
impl Request for NewSystem {
type Response = SystemInst;
type Response = SystemInst;
}
#[derive(Clone, Debug, Coding)]
pub struct SystemInst {
/// The set of possible starting characters of tokens the lexer of this system
/// can process. The lexer will notify this system if it encounters one of
/// these characters.9
pub lex_filter: CharFilter,
pub line_types: Vec<TStr>,
pub const_root: HashMap<TStr, MemberKind>,
/// The set of possible starting characters of tokens the lexer of this system
/// can process. The lexer will notify this system if it encounters one of
/// these characters.9
pub lex_filter: CharFilter,
pub line_types: Vec<TStr>,
pub const_root: HashMap<TStr, MemberKind>,
}
#[derive(Clone, Debug, Coding, Hierarchy)]
@@ -73,20 +73,20 @@ pub struct SystemDrop(pub SysId);
#[extends(SysReq, HostExtReq)]
pub struct SysFwded(pub SysId, pub Vec<u8>);
impl Request for SysFwded {
type Response = Vec<u8>;
type Response = Vec<u8>;
}
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(ExtHostReq)]
pub struct SysFwd(pub SysId, pub Vec<u8>);
impl Request for SysFwd {
type Response = Vec<u8>;
type Response = Vec<u8>;
}
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(HostExtReq)]
#[extendable]
pub enum SysReq {
NewSystem(NewSystem),
SysFwded(SysFwded),
NewSystem(NewSystem),
SysFwded(SysFwded),
}

View File

@@ -7,7 +7,7 @@ use orchid_api_traits::Request;
use ordered_float::NotNan;
use crate::{
Atom, Expression, HostExtReq, Location, MacroBlock, OrcError, Placeholder, SysId, TStr, TStrv,
Atom, Expression, HostExtReq, Location, MacroBlock, OrcError, Placeholder, SysId, TStr, TStrv,
};
/// A token tree from a lexer recursion request. Its lifetime is the lex call,
@@ -22,42 +22,42 @@ pub struct TreeTicket(pub NonZeroU64);
#[derive(Clone, Debug, Coding)]
pub struct TokenTree {
pub token: Token,
pub range: Range<u32>,
pub token: Token,
pub range: Range<u32>,
}
#[derive(Clone, Debug, Coding)]
pub enum Token {
/// Lambda function head, from the opening \ until the beginning of the body.
LambdaHead(Vec<TokenTree>),
/// A name segment or an operator.
Name(TStr),
/// ::
NS,
/// Line break.
BR,
/// ( Round parens ), [ Square brackets ] or { Curly braces }
S(Paren, Vec<TokenTree>),
/// A new atom
Atom(Atom),
/// Anchor to insert a subtree
Slot(TreeTicket),
/// A static compile-time error returned by failing lexers if
/// the rest of the source is likely still meaningful
Bottom(Vec<OrcError>),
/// A comment
Comment(Arc<String>),
/// Placeholder
Ph(Placeholder),
/// Macro block head
Macro(Option<NotNan<f64>>),
/// Lambda function head, from the opening \ until the beginning of the body.
LambdaHead(Vec<TokenTree>),
/// A name segment or an operator.
Name(TStr),
/// ::
NS,
/// Line break.
BR,
/// ( Round parens ), [ Square brackets ] or { Curly braces }
S(Paren, Vec<TokenTree>),
/// A new atom
Atom(Atom),
/// Anchor to insert a subtree
Slot(TreeTicket),
/// A static compile-time error returned by failing lexers if
/// the rest of the source is likely still meaningful
Bottom(Vec<OrcError>),
/// A comment
Comment(Arc<String>),
/// Placeholder
Ph(Placeholder),
/// Macro block head
Macro(Option<NotNan<f64>>),
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, Coding)]
pub enum Paren {
Round,
Square,
Curly,
Round,
Square,
Curly,
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
@@ -65,46 +65,46 @@ pub struct TreeId(pub NonZeroU64);
#[derive(Clone, Debug, Coding)]
pub struct Item {
pub location: Location,
pub comments: Vec<Comment>,
pub kind: ItemKind,
pub location: Location,
pub comments: Vec<Comment>,
pub kind: ItemKind,
}
#[derive(Clone, Debug, Coding)]
pub enum ItemKind {
Member(Member),
Macro(MacroBlock),
Export(TStr),
Import(TStrv),
Member(Member),
Macro(MacroBlock),
Export(TStr),
Import(TStrv),
}
#[derive(Clone, Debug, Coding)]
pub struct Comment {
pub text: TStr,
pub location: Location,
pub text: TStr,
pub location: Location,
}
#[derive(Clone, Debug, Coding)]
pub struct Member {
pub name: TStr,
pub kind: MemberKind,
pub name: TStr,
pub kind: MemberKind,
}
#[derive(Clone, Debug, Coding)]
pub enum MemberKind {
Const(Expression),
Module(Module),
Lazy(TreeId),
Const(Expression),
Module(Module),
Lazy(TreeId),
}
#[derive(Clone, Debug, Coding)]
pub struct Module {
pub items: Vec<Item>,
pub items: Vec<Item>,
}
#[derive(Clone, Copy, Debug, Coding, Hierarchy)]
#[extends(HostExtReq)]
pub struct GetMember(pub SysId, pub TreeId);
impl Request for GetMember {
type Response = MemberKind;
type Response = MemberKind;
}

View File

@@ -14,34 +14,34 @@ pub struct VfsId(pub NonZeroU16);
#[derive(Clone, Debug, Coding)]
pub enum Loaded {
Code(String),
Collection(Vec<TStr>),
Code(String),
Collection(Vec<TStr>),
}
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(VfsReq, HostExtReq)]
pub struct VfsRead(pub SysId, pub VfsId, pub Vec<TStr>);
impl Request for VfsRead {
type Response = OrcResult<Loaded>;
type Response = OrcResult<Loaded>;
}
#[derive(Clone, Debug, Coding)]
pub enum EagerVfs {
Lazy(VfsId),
Eager(HashMap<TStr, EagerVfs>),
Lazy(VfsId),
Eager(HashMap<TStr, EagerVfs>),
}
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(VfsReq, HostExtReq)]
pub struct GetVfs(pub SysId);
impl Request for GetVfs {
type Response = EagerVfs;
type Response = EagerVfs;
}
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(HostExtReq)]
#[extendable]
pub enum VfsReq {
GetVfs(GetVfs),
VfsRead(VfsRead),
GetVfs(GetVfs),
VfsRead(VfsRead),
}

View File

@@ -10,14 +10,14 @@ derive_destructure = "1.0.0"
dyn-clone = "1.0.17"
hashbrown = "0.15.2"
itertools = "0.14.0"
lazy_static = "1.4.0"
lazy_static = "1.5.0"
never = "0.1.0"
num-traits = "0.2.19"
orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
ordered-float = "4.2.0"
rust-embed = "8.3.0"
rust_decimal = "1.35.0"
substack = "1.1.0"
ordered-float = "4.6.0"
rust-embed = "8.5.0"
rust_decimal = "1.36.0"
substack = "1.1.1"
trait-set = "0.3.0"

View File

@@ -3,27 +3,27 @@ use std::ops::Deref;
use std::sync::Arc;
pub enum ArcCow<'a, T: ?Sized + ToOwned> {
Borrowed(&'a T),
Owned(Arc<T::Owned>),
Borrowed(&'a T),
Owned(Arc<T::Owned>),
}
impl<T: ?Sized + ToOwned> ArcCow<'_, T> {
pub fn owned(value: T::Owned) -> Self { Self::Owned(Arc::new(value)) }
pub fn owned(value: T::Owned) -> Self { Self::Owned(Arc::new(value)) }
}
impl<T: ?Sized + ToOwned> Clone for ArcCow<'_, T> {
fn clone(&self) -> Self {
match self {
Self::Borrowed(r) => Self::Borrowed(r),
Self::Owned(b) => Self::Owned(b.clone()),
}
}
fn clone(&self) -> Self {
match self {
Self::Borrowed(r) => Self::Borrowed(r),
Self::Owned(b) => Self::Owned(b.clone()),
}
}
}
impl<T: ?Sized + ToOwned> Deref for ArcCow<'_, T> {
type Target = T;
fn deref(&self) -> &Self::Target {
match self {
Self::Borrowed(t) => t,
Self::Owned(b) => b.as_ref().borrow(),
}
}
type Target = T;
fn deref(&self) -> &Self::Target {
match self {
Self::Borrowed(t) => t,
Self::Owned(b) => b.as_ref().borrow(),
}
}
}

View File

@@ -8,54 +8,54 @@ use crate::api;
pub type CRange = RangeInclusive<char>;
pub trait ICFilter: fmt::Debug {
fn ranges(&self) -> &[RangeInclusive<char>];
fn ranges(&self) -> &[RangeInclusive<char>];
}
impl ICFilter for [RangeInclusive<char>] {
fn ranges(&self) -> &[RangeInclusive<char>] { self }
fn ranges(&self) -> &[RangeInclusive<char>] { self }
}
impl ICFilter for api::CharFilter {
fn ranges(&self) -> &[RangeInclusive<char>] { &self.0 }
fn ranges(&self) -> &[RangeInclusive<char>] { &self.0 }
}
fn try_merge_char_ranges(left: CRange, right: CRange) -> Result<CRange, (CRange, CRange)> {
match *left.end() as u32 + 1 < *right.start() as u32 {
true => Err((left, right)),
false => Ok(*left.start()..=*right.end()),
}
match *left.end() as u32 + 1 < *right.start() as u32 {
true => Err((left, right)),
false => Ok(*left.start()..=*right.end()),
}
}
/// Process the character ranges to make them adhere to the structural
/// requirements of [CharFilter]
pub fn mk_char_filter(items: impl IntoIterator<Item = CRange>) -> api::CharFilter {
api::CharFilter(
(items.into_iter())
.filter(|r| *r.start() as u32 <= *r.end() as u32)
.sorted_by_key(|r| *r.start() as u32)
.coalesce(try_merge_char_ranges)
.collect_vec(),
)
api::CharFilter(
(items.into_iter())
.filter(|r| *r.start() as u32 <= *r.end() as u32)
.sorted_by_key(|r| *r.start() as u32)
.coalesce(try_merge_char_ranges)
.collect_vec(),
)
}
/// Decide whether a char filter matches a character via binary search
pub fn char_filter_match(cf: &(impl ICFilter + ?Sized), c: char) -> bool {
match cf.ranges().binary_search_by_key(&c, |l| *l.end()) {
Ok(_) => true, // c is the end of a range
Err(i) if i == cf.ranges().len() => false, // all ranges end before c
Err(i) => cf.ranges()[i].contains(&c), /* c between cf.0[i-1]?.end and cf.0[i].end,
* check [i] */
}
match cf.ranges().binary_search_by_key(&c, |l| *l.end()) {
Ok(_) => true, // c is the end of a range
Err(i) if i == cf.ranges().len() => false, // all ranges end before c
Err(i) => cf.ranges()[i].contains(&c), /* c between cf.0[i-1]?.end and cf.0[i].end,
* check [i] */
}
}
/// Merge two char filters into a filter that matches if either of the
/// constituents would match.
pub fn char_filter_union(
l: &(impl ICFilter + ?Sized),
r: &(impl ICFilter + ?Sized),
l: &(impl ICFilter + ?Sized),
r: &(impl ICFilter + ?Sized),
) -> api::CharFilter {
api::CharFilter(
(l.ranges().iter().merge_by(r.ranges(), |l, r| l.start() <= r.start()))
.cloned()
.coalesce(try_merge_char_ranges)
.collect_vec(),
)
api::CharFilter(
(l.ranges().iter().merge_by(r.ranges(), |l, r| l.start() <= r.start()))
.cloned()
.coalesce(try_merge_char_ranges)
.collect_vec(),
)
}

View File

@@ -6,19 +6,19 @@ use never::Never;
/// variety of types for different purposes. Very broadly, if the operation
/// succeeds, the result should represent _both_ inputs.
pub trait Combine: Sized {
/// Information about the failure
type Error;
/// Information about the failure
type Error;
/// Merge two values into a value that represents both, if this is possible.
fn combine(self, other: Self) -> Result<Self, Self::Error>;
/// Merge two values into a value that represents both, if this is possible.
fn combine(self, other: Self) -> Result<Self, Self::Error>;
}
impl Combine for Never {
type Error = Never;
fn combine(self, _: Self) -> Result<Self, Self::Error> { match self {} }
type Error = Never;
fn combine(self, _: Self) -> Result<Self, Self::Error> { match self {} }
}
impl Combine for () {
type Error = Never;
fn combine(self, (): Self) -> Result<Self, Self::Error> { Ok(()) }
type Error = Never;
fn combine(self, (): Self) -> Result<Self, Self::Error> { Ok(()) }
}

View File

@@ -12,158 +12,158 @@ use crate::location::Pos;
/// processing got stuck, a command that is likely to be incorrect
#[derive(Clone, Debug)]
pub struct ErrPos {
/// The suspected origin
pub position: Pos,
/// Any information about the role of this origin
pub message: Option<Arc<String>>,
/// The suspected origin
pub position: Pos,
/// Any information about the role of this origin
pub message: Option<Arc<String>>,
}
impl ErrPos {
pub fn new(msg: &str, position: Pos) -> Self {
Self { message: Some(Arc::new(msg.to_string())), position }
}
fn from_api(api: &api::ErrLocation) -> Self {
Self {
message: Some(api.message.clone()).filter(|s| !s.is_empty()),
position: Pos::from_api(&api.location),
}
}
fn to_api(&self) -> api::ErrLocation {
api::ErrLocation {
message: self.message.clone().unwrap_or_default(),
location: self.position.to_api(),
}
}
pub fn new(msg: &str, position: Pos) -> Self {
Self { message: Some(Arc::new(msg.to_string())), position }
}
fn from_api(api: &api::ErrLocation) -> Self {
Self {
message: Some(api.message.clone()).filter(|s| !s.is_empty()),
position: Pos::from_api(&api.location),
}
}
fn to_api(&self) -> api::ErrLocation {
api::ErrLocation {
message: self.message.clone().unwrap_or_default(),
location: self.position.to_api(),
}
}
}
impl From<Pos> for ErrPos {
fn from(origin: Pos) -> Self { Self { position: origin, message: None } }
fn from(origin: Pos) -> Self { Self { position: origin, message: None } }
}
#[derive(Clone, Debug)]
pub struct OrcErr {
pub description: Tok<String>,
pub message: Arc<String>,
pub positions: Vec<ErrPos>,
pub description: Tok<String>,
pub message: Arc<String>,
pub positions: Vec<ErrPos>,
}
impl OrcErr {
fn to_api(&self) -> api::OrcError {
api::OrcError {
description: self.description.to_api(),
message: self.message.clone(),
locations: self.positions.iter().map(ErrPos::to_api).collect(),
}
}
fn from_api(api: &api::OrcError) -> Self {
Self {
description: Tok::from_api(api.description),
message: api.message.clone(),
positions: api.locations.iter().map(ErrPos::from_api).collect(),
}
}
fn to_api(&self) -> api::OrcError {
api::OrcError {
description: self.description.to_api(),
message: self.message.clone(),
locations: self.positions.iter().map(ErrPos::to_api).collect(),
}
}
fn from_api(api: &api::OrcError) -> Self {
Self {
description: Tok::from_api(api.description),
message: api.message.clone(),
positions: api.locations.iter().map(ErrPos::from_api).collect(),
}
}
}
impl Eq for OrcErr {}
impl PartialEq for OrcErr {
fn eq(&self, other: &Self) -> bool { self.description == other.description }
fn eq(&self, other: &Self) -> bool { self.description == other.description }
}
impl From<OrcErr> for Vec<OrcErr> {
fn from(value: OrcErr) -> Self { vec![value] }
fn from(value: OrcErr) -> Self { vec![value] }
}
impl fmt::Display for OrcErr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let pstr = self.positions.iter().map(|p| format!("{p:?}")).join("; ");
write!(f, "{}: {} @ {}", self.description, self.message, pstr)
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let pstr = self.positions.iter().map(|p| format!("{p:?}")).join("; ");
write!(f, "{}: {} @ {}", self.description, self.message, pstr)
}
}
#[derive(Clone, Debug)]
pub struct EmptyErrv;
impl fmt::Display for EmptyErrv {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "OrcErrv must not be empty")
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "OrcErrv must not be empty")
}
}
#[derive(Clone, Debug)]
pub struct OrcErrv(Vec<OrcErr>);
impl OrcErrv {
pub fn new(errors: impl IntoIterator<Item = OrcErr>) -> Result<Self, EmptyErrv> {
let v = errors.into_iter().collect_vec();
if v.is_empty() { Err(EmptyErrv) } else { Ok(Self(v)) }
}
#[must_use]
pub fn extended<T>(mut self, errors: impl IntoIterator<Item = T>) -> Self
where Self: Extend<T> {
self.extend(errors);
self
}
#[must_use]
pub fn len(&self) -> usize { self.0.len() }
#[must_use]
pub fn is_empty(&self) -> bool { self.len() == 0 }
#[must_use]
pub fn any(&self, f: impl FnMut(&OrcErr) -> bool) -> bool { self.0.iter().any(f) }
#[must_use]
pub fn keep_only(self, f: impl FnMut(&OrcErr) -> bool) -> Option<Self> {
let v = self.0.into_iter().filter(f).collect_vec();
if v.is_empty() { None } else { Some(Self(v)) }
}
#[must_use]
pub fn one(&self) -> Option<&OrcErr> { (self.0.len() == 1).then(|| &self.0[9]) }
pub fn pos_iter(&self) -> impl Iterator<Item = ErrPos> + '_ {
self.0.iter().flat_map(|e| e.positions.iter().cloned())
}
pub fn to_api(&self) -> Vec<api::OrcError> { self.0.iter().map(OrcErr::to_api).collect() }
pub fn from_api<'a>(api: impl IntoIterator<Item = &'a api::OrcError>) -> Self {
Self(api.into_iter().map(OrcErr::from_api).collect())
}
pub fn new(errors: impl IntoIterator<Item = OrcErr>) -> Result<Self, EmptyErrv> {
let v = errors.into_iter().collect_vec();
if v.is_empty() { Err(EmptyErrv) } else { Ok(Self(v)) }
}
#[must_use]
pub fn extended<T>(mut self, errors: impl IntoIterator<Item = T>) -> Self
where Self: Extend<T> {
self.extend(errors);
self
}
#[must_use]
pub fn len(&self) -> usize { self.0.len() }
#[must_use]
pub fn is_empty(&self) -> bool { self.len() == 0 }
#[must_use]
pub fn any(&self, f: impl FnMut(&OrcErr) -> bool) -> bool { self.0.iter().any(f) }
#[must_use]
pub fn keep_only(self, f: impl FnMut(&OrcErr) -> bool) -> Option<Self> {
let v = self.0.into_iter().filter(f).collect_vec();
if v.is_empty() { None } else { Some(Self(v)) }
}
#[must_use]
pub fn one(&self) -> Option<&OrcErr> { (self.0.len() == 1).then(|| &self.0[9]) }
pub fn pos_iter(&self) -> impl Iterator<Item = ErrPos> + '_ {
self.0.iter().flat_map(|e| e.positions.iter().cloned())
}
pub fn to_api(&self) -> Vec<api::OrcError> { self.0.iter().map(OrcErr::to_api).collect() }
pub fn from_api<'a>(api: impl IntoIterator<Item = &'a api::OrcError>) -> Self {
Self(api.into_iter().map(OrcErr::from_api).collect())
}
}
impl From<OrcErr> for OrcErrv {
fn from(value: OrcErr) -> Self { Self(vec![value]) }
fn from(value: OrcErr) -> Self { Self(vec![value]) }
}
impl Add for OrcErrv {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output { Self(self.0.into_iter().chain(rhs.0).collect_vec()) }
type Output = Self;
fn add(self, rhs: Self) -> Self::Output { Self(self.0.into_iter().chain(rhs.0).collect_vec()) }
}
impl Extend<OrcErr> for OrcErrv {
fn extend<T: IntoIterator<Item = OrcErr>>(&mut self, iter: T) { self.0.extend(iter) }
fn extend<T: IntoIterator<Item = OrcErr>>(&mut self, iter: T) { self.0.extend(iter) }
}
impl Extend<OrcErrv> for OrcErrv {
fn extend<T: IntoIterator<Item = OrcErrv>>(&mut self, iter: T) {
self.0.extend(iter.into_iter().flatten())
}
fn extend<T: IntoIterator<Item = OrcErrv>>(&mut self, iter: T) {
self.0.extend(iter.into_iter().flatten())
}
}
impl IntoIterator for OrcErrv {
type IntoIter = std::vec::IntoIter<OrcErr>;
type Item = OrcErr;
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
type IntoIter = std::vec::IntoIter<OrcErr>;
type Item = OrcErr;
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
}
impl fmt::Display for OrcErrv {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0.iter().join("\n"))
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0.iter().join("\n"))
}
}
pub type OrcRes<T> = Result<T, OrcErrv>;
pub fn mk_err(
description: Tok<String>,
message: impl AsRef<str>,
posv: impl IntoIterator<Item = ErrPos>,
description: Tok<String>,
message: impl AsRef<str>,
posv: impl IntoIterator<Item = ErrPos>,
) -> OrcErr {
OrcErr {
description,
message: Arc::new(message.as_ref().to_string()),
positions: posv.into_iter().collect(),
}
OrcErr {
description,
message: Arc::new(message.as_ref().to_string()),
positions: posv.into_iter().collect(),
}
}
pub fn mk_errv(
description: Tok<String>,
message: impl AsRef<str>,
posv: impl IntoIterator<Item = ErrPos>,
description: Tok<String>,
message: impl AsRef<str>,
posv: impl IntoIterator<Item = ErrPos>,
) -> OrcErrv {
mk_err(description, message, posv).into()
mk_err(description, message, posv).into()
}
pub trait Reporter {
fn report(&self, e: impl Into<OrcErrv>);
fn report(&self, e: impl Into<OrcErrv>);
}

View File

@@ -1,67 +1,67 @@
//! Multiple-listener-single-delivery event system.
use std::mem;
use std::sync::mpsc::{self, sync_channel};
use std::sync::Mutex;
use std::sync::mpsc::{self, sync_channel};
struct Reply<T, U> {
resub: bool,
outcome: Result<U, T>,
resub: bool,
outcome: Result<U, T>,
}
struct Listener<T, E> {
sink: mpsc::SyncSender<T>,
source: mpsc::Receiver<Reply<T, E>>,
sink: mpsc::SyncSender<T>,
source: mpsc::Receiver<Reply<T, E>>,
}
pub struct Event<T, U> {
listeners: Mutex<Vec<Listener<T, U>>>,
listeners: Mutex<Vec<Listener<T, U>>>,
}
impl<T, U> Event<T, U> {
pub const fn new() -> Self { Self { listeners: Mutex::new(Vec::new()) } }
pub const fn new() -> Self { Self { listeners: Mutex::new(Vec::new()) } }
pub fn dispatch(&self, mut ev: T) -> Option<U> {
let mut listeners = self.listeners.lock().unwrap();
let mut alt_list = Vec::with_capacity(listeners.len());
mem::swap(&mut *listeners, &mut alt_list);
let mut items = alt_list.into_iter();
while let Some(l) = items.next() {
l.sink.send(ev).unwrap();
let Reply { resub, outcome } = l.source.recv().unwrap();
if resub {
listeners.push(l);
}
match outcome {
Ok(res) => {
listeners.extend(items);
return Some(res);
},
Err(next) => {
ev = next;
},
}
}
None
}
pub fn dispatch(&self, mut ev: T) -> Option<U> {
let mut listeners = self.listeners.lock().unwrap();
let mut alt_list = Vec::with_capacity(listeners.len());
mem::swap(&mut *listeners, &mut alt_list);
let mut items = alt_list.into_iter();
while let Some(l) = items.next() {
l.sink.send(ev).unwrap();
let Reply { resub, outcome } = l.source.recv().unwrap();
if resub {
listeners.push(l);
}
match outcome {
Ok(res) => {
listeners.extend(items);
return Some(res);
},
Err(next) => {
ev = next;
},
}
}
None
}
pub fn get_one<V>(&self, mut filter: impl FnMut(&T) -> bool, f: impl FnOnce(T) -> (U, V)) -> V {
let mut listeners = self.listeners.lock().unwrap();
let (sink, request) = sync_channel(0);
let (response, source) = sync_channel(0);
listeners.push(Listener { sink, source });
mem::drop(listeners);
loop {
let t = request.recv().unwrap();
if filter(&t) {
let (u, v) = f(t);
response.send(Reply { resub: false, outcome: Ok(u) }).unwrap();
return v;
}
response.send(Reply { resub: true, outcome: Err(t) }).unwrap();
}
}
pub fn get_one<V>(&self, mut filter: impl FnMut(&T) -> bool, f: impl FnOnce(T) -> (U, V)) -> V {
let mut listeners = self.listeners.lock().unwrap();
let (sink, request) = sync_channel(0);
let (response, source) = sync_channel(0);
listeners.push(Listener { sink, source });
mem::drop(listeners);
loop {
let t = request.recv().unwrap();
if filter(&t) {
let (u, v) = f(t);
response.send(Reply { resub: false, outcome: Ok(u) }).unwrap();
return v;
}
response.send(Reply { resub: true, outcome: Err(t) }).unwrap();
}
}
}
impl<T, U> Default for Event<T, U> {
fn default() -> Self { Self::new() }
fn default() -> Self { Self::new() }
}

View File

@@ -6,45 +6,45 @@ use std::sync::{Mutex, MutexGuard, OnceLock};
use hashbrown::HashMap;
pub struct IdStore<T> {
table: OnceLock<Mutex<HashMap<NonZeroU64, T>>>,
id: AtomicU64,
table: OnceLock<Mutex<HashMap<NonZeroU64, T>>>,
id: AtomicU64,
}
impl<T> IdStore<T> {
pub const fn new() -> Self { Self { table: OnceLock::new(), id: AtomicU64::new(1) } }
pub fn add(&self, t: T) -> IdRecord<'_, T> {
let tbl = self.table.get_or_init(Mutex::default);
let mut tbl_g = tbl.lock().unwrap();
let id: NonZeroU64 = self.id.fetch_add(1, Ordering::Relaxed).try_into().unwrap();
assert!(tbl_g.insert(id, t).is_none(), "atom ID wraparound");
IdRecord(id, tbl_g)
}
pub fn get(&self, id: impl Into<NonZeroU64>) -> Option<IdRecord<'_, T>> {
let tbl = self.table.get_or_init(Mutex::default);
let tbl_g = tbl.lock().unwrap();
let id64 = id.into();
if tbl_g.contains_key(&id64) { Some(IdRecord(id64, tbl_g)) } else { None }
}
pub fn is_empty(&self) -> bool { self.len() == 0 }
pub fn len(&self) -> usize { self.table.get().map(|t| t.lock().unwrap().len()).unwrap_or(0) }
pub const fn new() -> Self { Self { table: OnceLock::new(), id: AtomicU64::new(1) } }
pub fn add(&self, t: T) -> IdRecord<'_, T> {
let tbl = self.table.get_or_init(Mutex::default);
let mut tbl_g = tbl.lock().unwrap();
let id: NonZeroU64 = self.id.fetch_add(1, Ordering::Relaxed).try_into().unwrap();
assert!(tbl_g.insert(id, t).is_none(), "atom ID wraparound");
IdRecord(id, tbl_g)
}
pub fn get(&self, id: impl Into<NonZeroU64>) -> Option<IdRecord<'_, T>> {
let tbl = self.table.get_or_init(Mutex::default);
let tbl_g = tbl.lock().unwrap();
let id64 = id.into();
if tbl_g.contains_key(&id64) { Some(IdRecord(id64, tbl_g)) } else { None }
}
pub fn is_empty(&self) -> bool { self.len() == 0 }
pub fn len(&self) -> usize { self.table.get().map(|t| t.lock().unwrap().len()).unwrap_or(0) }
}
impl<T> Default for IdStore<T> {
fn default() -> Self { Self::new() }
fn default() -> Self { Self::new() }
}
pub struct IdRecord<'a, T>(NonZeroU64, MutexGuard<'a, HashMap<NonZeroU64, T>>);
impl<T> IdRecord<'_, T> {
pub fn id(&self) -> NonZeroU64 { self.0 }
pub fn remove(mut self) -> T { self.1.remove(&self.0).unwrap() }
pub fn id(&self) -> NonZeroU64 { self.0 }
pub fn remove(mut self) -> T { self.1.remove(&self.0).unwrap() }
}
impl<T> Deref for IdRecord<'_, T> {
type Target = T;
fn deref(&self) -> &Self::Target {
self.1.get(&self.0).expect("Existence checked on construction")
}
type Target = T;
fn deref(&self) -> &Self::Target {
self.1.get(&self.0).expect("Existence checked on construction")
}
}
impl<T> DerefMut for IdRecord<'_, T> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.1.get_mut(&self.0).expect("Existence checked on construction")
}
fn deref_mut(&mut self) -> &mut Self::Target {
self.1.get_mut(&self.0).expect("Existence checked on construction")
}
}

View File

@@ -2,7 +2,7 @@ use std::borrow::Borrow;
use std::hash::BuildHasher as _;
use std::num::NonZeroU64;
use std::ops::{Deref, DerefMut};
use std::sync::{atomic, Arc, Mutex, MutexGuard};
use std::sync::{Arc, Mutex, MutexGuard, atomic};
use std::{fmt, hash, mem};
use hashbrown::{HashMap, HashSet};
@@ -19,148 +19,149 @@ struct ForceSized<T>(T);
#[derive(Clone)]
pub struct Tok<T: Interned> {
data: Arc<T>,
marker: ForceSized<T::Marker>,
data: Arc<T>,
marker: ForceSized<T::Marker>,
}
impl<T: Interned> Tok<T> {
pub fn new(data: Arc<T>, marker: T::Marker) -> Self { Self { data, marker: ForceSized(marker) } }
pub fn to_api(&self) -> T::Marker { self.marker.0 }
pub fn from_api<M>(marker: M) -> Self where M: InternMarker<Interned = T> {
deintern(marker)
}
pub fn arc(&self) -> Arc<T> { self.data.clone() }
pub fn new(data: Arc<T>, marker: T::Marker) -> Self { Self { data, marker: ForceSized(marker) } }
pub fn to_api(&self) -> T::Marker { self.marker.0 }
pub fn from_api<M>(marker: M) -> Self
where M: InternMarker<Interned = T> {
deintern(marker)
}
pub fn arc(&self) -> Arc<T> { self.data.clone() }
}
impl<T: Interned> Deref for Tok<T> {
type Target = T;
type Target = T;
fn deref(&self) -> &Self::Target { self.data.as_ref() }
fn deref(&self) -> &Self::Target { self.data.as_ref() }
}
impl<T: Interned> Ord for Tok<T> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.to_api().cmp(&other.to_api()) }
fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.to_api().cmp(&other.to_api()) }
}
impl<T: Interned> PartialOrd for Tok<T> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) }
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) }
}
impl<T: Interned> Eq for Tok<T> {}
impl<T: Interned> PartialEq for Tok<T> {
fn eq(&self, other: &Self) -> bool { self.cmp(other).is_eq() }
fn eq(&self, other: &Self) -> bool { self.cmp(other).is_eq() }
}
impl<T: Interned> hash::Hash for Tok<T> {
fn hash<H: hash::Hasher>(&self, state: &mut H) { self.to_api().hash(state) }
fn hash<H: hash::Hasher>(&self, state: &mut H) { self.to_api().hash(state) }
}
impl<T: Interned + fmt::Display> fmt::Display for Tok<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", &*self.data)
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", &*self.data)
}
}
impl<T: Interned + fmt::Debug> fmt::Debug for Tok<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Token({} -> {:?})", self.to_api().get_id(), self.data.as_ref())
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Token({} -> {:?})", self.to_api().get_id(), self.data.as_ref())
}
}
impl<T: Interned + Encode> Encode for Tok<T> {
fn encode<W: std::io::Write + ?Sized>(&self, write: &mut W) { self.data.encode(write) }
fn encode<W: std::io::Write + ?Sized>(&self, write: &mut W) { self.data.encode(write) }
}
impl<T: Interned + Decode> Decode for Tok<T> {
fn decode<R: std::io::Read + ?Sized>(read: &mut R) -> Self { intern(&T::decode(read)) }
fn decode<R: std::io::Read + ?Sized>(read: &mut R) -> Self { intern(&T::decode(read)) }
}
pub trait Interned: Eq + hash::Hash + Clone + fmt::Debug + Internable<Interned = Self> {
type Marker: InternMarker<Interned = Self> + Sized;
fn intern(
self: Arc<Self>,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Self::Marker;
fn bimap(interner: &mut TypedInterners) -> &mut Bimap<Self>;
type Marker: InternMarker<Interned = Self> + Sized;
fn intern(
self: Arc<Self>,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Self::Marker;
fn bimap(interner: &mut TypedInterners) -> &mut Bimap<Self>;
}
pub trait Internable: fmt::Debug {
type Interned: Interned;
fn get_owned(&self) -> Arc<Self::Interned>;
type Interned: Interned;
fn get_owned(&self) -> Arc<Self::Interned>;
}
pub trait InternMarker: Copy + PartialEq + Eq + PartialOrd + Ord + hash::Hash + Sized {
type Interned: Interned<Marker = Self>;
fn resolve(
self,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Tok<Self::Interned>;
fn get_id(self) -> NonZeroU64;
fn from_id(id: NonZeroU64) -> Self;
type Interned: Interned<Marker = Self>;
fn resolve(
self,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Tok<Self::Interned>;
fn get_id(self) -> NonZeroU64;
fn from_id(id: NonZeroU64) -> Self;
}
impl Interned for String {
type Marker = api::TStr;
fn intern(
self: Arc<Self>,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Self::Marker {
req.request(api::InternStr(self))
}
fn bimap(interners: &mut TypedInterners) -> &mut Bimap<Self> { &mut interners.strings }
type Marker = api::TStr;
fn intern(
self: Arc<Self>,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Self::Marker {
req.request(api::InternStr(self))
}
fn bimap(interners: &mut TypedInterners) -> &mut Bimap<Self> { &mut interners.strings }
}
impl InternMarker for api::TStr {
type Interned = String;
fn resolve(
self,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Tok<Self::Interned> {
Tok::new(req.request(api::ExternStr(self)), self)
}
fn get_id(self) -> NonZeroU64 { self.0 }
fn from_id(id: NonZeroU64) -> Self { Self(id) }
type Interned = String;
fn resolve(
self,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Tok<Self::Interned> {
Tok::new(req.request(api::ExternStr(self)), self)
}
fn get_id(self) -> NonZeroU64 { self.0 }
fn from_id(id: NonZeroU64) -> Self { Self(id) }
}
impl Internable for str {
type Interned = String;
fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_string()) }
type Interned = String;
fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_string()) }
}
impl Internable for String {
type Interned = String;
fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_string()) }
type Interned = String;
fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_string()) }
}
impl Interned for Vec<Tok<String>> {
type Marker = api::TStrv;
fn intern(
self: Arc<Self>,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Self::Marker {
req.request(api::InternStrv(Arc::new(self.iter().map(|t| t.to_api()).collect())))
}
fn bimap(interners: &mut TypedInterners) -> &mut Bimap<Self> { &mut interners.vecs }
type Marker = api::TStrv;
fn intern(
self: Arc<Self>,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Self::Marker {
req.request(api::InternStrv(Arc::new(self.iter().map(|t| t.to_api()).collect())))
}
fn bimap(interners: &mut TypedInterners) -> &mut Bimap<Self> { &mut interners.vecs }
}
impl InternMarker for api::TStrv {
type Interned = Vec<Tok<String>>;
fn resolve(
self,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Tok<Self::Interned> {
let data =
Arc::new(req.request(api::ExternStrv(self)).iter().map(|m| deintern(*m)).collect_vec());
Tok::new(data, self)
}
fn get_id(self) -> NonZeroU64 { self.0 }
fn from_id(id: NonZeroU64) -> Self { Self(id) }
type Interned = Vec<Tok<String>>;
fn resolve(
self,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Tok<Self::Interned> {
let data =
Arc::new(req.request(api::ExternStrv(self)).iter().map(|m| deintern(*m)).collect_vec());
Tok::new(data, self)
}
fn get_id(self) -> NonZeroU64 { self.0 }
fn from_id(id: NonZeroU64) -> Self { Self(id) }
}
impl Internable for [Tok<String>] {
type Interned = Vec<Tok<String>>;
fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_vec()) }
type Interned = Vec<Tok<String>>;
fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_vec()) }
}
impl Internable for Vec<Tok<String>> {
type Interned = Vec<Tok<String>>;
fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_vec()) }
type Interned = Vec<Tok<String>>;
fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_vec()) }
}
impl Internable for Vec<api::TStr> {
type Interned = Vec<Tok<String>>;
fn get_owned(&self) -> Arc<Self::Interned> {
Arc::new(self.iter().map(|ts| deintern(*ts)).collect())
}
type Interned = Vec<Tok<String>>;
fn get_owned(&self) -> Arc<Self::Interned> {
Arc::new(self.iter().map(|ts| deintern(*ts)).collect())
}
}
impl Internable for [api::TStr] {
type Interned = Vec<Tok<String>>;
fn get_owned(&self) -> Arc<Self::Interned> {
Arc::new(self.iter().map(|ts| deintern(*ts)).collect())
}
type Interned = Vec<Tok<String>>;
fn get_owned(&self) -> Arc<Self::Interned> {
Arc::new(self.iter().map(|ts| deintern(*ts)).collect())
}
}
/// The number of references held to any token by the interner.
@@ -168,138 +169,138 @@ const BASE_RC: usize = 3;
#[test]
fn base_rc_correct() {
let tok = Tok::new(Arc::new("foo".to_string()), api::TStr(1.try_into().unwrap()));
let mut bimap = Bimap::default();
bimap.insert(tok.clone());
assert_eq!(Arc::strong_count(&tok.data), BASE_RC + 1, "the bimap plus the current instance");
let tok = Tok::new(Arc::new("foo".to_string()), api::TStr(1.try_into().unwrap()));
let mut bimap = Bimap::default();
bimap.insert(tok.clone());
assert_eq!(Arc::strong_count(&tok.data), BASE_RC + 1, "the bimap plus the current instance");
}
pub struct Bimap<T: Interned> {
intern: HashMap<Arc<T>, Tok<T>>,
by_id: HashMap<T::Marker, Tok<T>>,
intern: HashMap<Arc<T>, Tok<T>>,
by_id: HashMap<T::Marker, Tok<T>>,
}
impl<T: Interned> Bimap<T> {
pub fn insert(&mut self, token: Tok<T>) {
self.intern.insert(token.data.clone(), token.clone());
self.by_id.insert(token.to_api(), token);
}
pub fn insert(&mut self, token: Tok<T>) {
self.intern.insert(token.data.clone(), token.clone());
self.by_id.insert(token.to_api(), token);
}
pub fn by_marker(&self, marker: T::Marker) -> Option<Tok<T>> { self.by_id.get(&marker).cloned() }
pub fn by_marker(&self, marker: T::Marker) -> Option<Tok<T>> { self.by_id.get(&marker).cloned() }
pub fn by_value<Q: Eq + hash::Hash>(&self, q: &Q) -> Option<Tok<T>>
where T: Borrow<Q> {
(self.intern.raw_entry())
.from_hash(self.intern.hasher().hash_one(q), |k| k.as_ref().borrow() == q)
.map(|p| p.1.clone())
}
pub fn by_value<Q: Eq + hash::Hash>(&self, q: &Q) -> Option<Tok<T>>
where T: Borrow<Q> {
(self.intern.raw_entry())
.from_hash(self.intern.hasher().hash_one(q), |k| k.as_ref().borrow() == q)
.map(|p| p.1.clone())
}
pub fn sweep_replica(&mut self) -> Vec<T::Marker> {
(self.intern)
.extract_if(|k, _| Arc::strong_count(k) == BASE_RC)
.map(|(_, v)| {
self.by_id.remove(&v.to_api());
v.to_api()
})
.collect()
}
pub fn sweep_replica(&mut self) -> Vec<T::Marker> {
(self.intern)
.extract_if(|k, _| Arc::strong_count(k) == BASE_RC)
.map(|(_, v)| {
self.by_id.remove(&v.to_api());
v.to_api()
})
.collect()
}
pub fn sweep_master(&mut self, retained: HashSet<T::Marker>) {
self.intern.retain(|k, v| BASE_RC < Arc::strong_count(k) || retained.contains(&v.to_api()))
}
pub fn sweep_master(&mut self, retained: HashSet<T::Marker>) {
self.intern.retain(|k, v| BASE_RC < Arc::strong_count(k) || retained.contains(&v.to_api()))
}
}
impl<T: Interned> Default for Bimap<T> {
fn default() -> Self { Self { by_id: HashMap::new(), intern: HashMap::new() } }
fn default() -> Self { Self { by_id: HashMap::new(), intern: HashMap::new() } }
}
pub trait UpComm {
fn up<R: Request>(&self, req: R) -> R::Response;
fn up<R: Request>(&self, req: R) -> R::Response;
}
#[derive(Default)]
pub struct TypedInterners {
strings: Bimap<String>,
vecs: Bimap<Vec<Tok<String>>>,
strings: Bimap<String>,
vecs: Bimap<Vec<Tok<String>>>,
}
#[derive(Default)]
pub struct Interner {
interners: TypedInterners,
master: Option<Box<dyn DynRequester<Transfer = api::IntReq>>>,
interners: TypedInterners,
master: Option<Box<dyn DynRequester<Transfer = api::IntReq>>>,
}
static ID: atomic::AtomicU64 = atomic::AtomicU64::new(1);
static INTERNER: Mutex<Option<Interner>> = Mutex::new(None);
pub fn interner() -> impl DerefMut<Target = Interner> {
struct G(MutexGuard<'static, Option<Interner>>);
impl Deref for G {
type Target = Interner;
fn deref(&self) -> &Self::Target { self.0.as_ref().expect("Guard pre-initialized") }
}
impl DerefMut for G {
fn deref_mut(&mut self) -> &mut Self::Target {
self.0.as_mut().expect("Guard pre-iniitialized")
}
}
let mut g = INTERNER.lock().unwrap();
g.get_or_insert_with(Interner::default);
G(g)
struct G(MutexGuard<'static, Option<Interner>>);
impl Deref for G {
type Target = Interner;
fn deref(&self) -> &Self::Target { self.0.as_ref().expect("Guard pre-initialized") }
}
impl DerefMut for G {
fn deref_mut(&mut self) -> &mut Self::Target {
self.0.as_mut().expect("Guard pre-iniitialized")
}
}
let mut g = INTERNER.lock().unwrap();
g.get_or_insert_with(Interner::default);
G(g)
}
pub fn init_replica(req: impl DynRequester<Transfer = api::IntReq> + 'static) {
let mut g = INTERNER.lock().unwrap();
assert!(g.is_none(), "Attempted to initialize replica interner after first use");
*g = Some(Interner {
master: Some(Box::new(req)),
interners: TypedInterners { strings: Bimap::default(), vecs: Bimap::default() },
})
let mut g = INTERNER.lock().unwrap();
assert!(g.is_none(), "Attempted to initialize replica interner after first use");
*g = Some(Interner {
master: Some(Box::new(req)),
interners: TypedInterners { strings: Bimap::default(), vecs: Bimap::default() },
})
}
pub fn intern<T: Interned>(t: &(impl Internable<Interned = T> + ?Sized)) -> Tok<T> {
let data = t.get_owned();
let mut g = interner();
let job = format!("{t:?} in {}", if g.master.is_some() { "replica" } else { "master" });
eprintln!("Interning {job}");
let typed = T::bimap(&mut g.interners);
if let Some(tok) = typed.by_value(&data) {
return tok;
}
let marker = match &mut g.master {
Some(c) => data.clone().intern(&**c),
None =>
T::Marker::from_id(NonZeroU64::new(ID.fetch_add(1, atomic::Ordering::Relaxed)).unwrap()),
};
let tok = Tok::new(data, marker);
T::bimap(&mut g.interners).insert(tok.clone());
mem::drop(g);
eprintln!("Interned {job}");
tok
let data = t.get_owned();
let mut g = interner();
let job = format!("{t:?} in {}", if g.master.is_some() { "replica" } else { "master" });
eprintln!("Interning {job}");
let typed = T::bimap(&mut g.interners);
if let Some(tok) = typed.by_value(&data) {
return tok;
}
let marker = match &mut g.master {
Some(c) => data.clone().intern(&**c),
None =>
T::Marker::from_id(NonZeroU64::new(ID.fetch_add(1, atomic::Ordering::Relaxed)).unwrap()),
};
let tok = Tok::new(data, marker);
T::bimap(&mut g.interners).insert(tok.clone());
mem::drop(g);
eprintln!("Interned {job}");
tok
}
fn deintern<M: InternMarker>(marker: M) -> Tok<M::Interned> {
let mut g = interner();
if let Some(tok) = M::Interned::bimap(&mut g.interners).by_marker(marker) {
return tok;
}
let master = g.master.as_mut().expect("ID not in local interner and this is master");
let token = marker.resolve(&**master);
M::Interned::bimap(&mut g.interners).insert(token.clone());
token
let mut g = interner();
if let Some(tok) = M::Interned::bimap(&mut g.interners).by_marker(marker) {
return tok;
}
let master = g.master.as_mut().expect("ID not in local interner and this is master");
let token = marker.resolve(&**master);
M::Interned::bimap(&mut g.interners).insert(token.clone());
token
}
pub fn merge_retained(into: &mut api::Retained, from: &api::Retained) {
into.strings = into.strings.iter().chain(&from.strings).copied().unique().collect();
into.vecs = into.vecs.iter().chain(&from.vecs).copied().unique().collect();
into.strings = into.strings.iter().chain(&from.strings).copied().unique().collect();
into.vecs = into.vecs.iter().chain(&from.vecs).copied().unique().collect();
}
pub fn sweep_replica() -> api::Retained {
let mut g = interner();
assert!(g.master.is_some(), "Not a replica");
api::Retained {
strings: g.interners.strings.sweep_replica(),
vecs: g.interners.vecs.sweep_replica(),
}
let mut g = interner();
assert!(g.master.is_some(), "Not a replica");
api::Retained {
strings: g.interners.strings.sweep_replica(),
vecs: g.interners.vecs.sweep_replica(),
}
}
/// Create a thread-local token instance and copy it. This ensures that the
@@ -308,47 +309,47 @@ pub fn sweep_replica() -> api::Retained {
/// expression (i.e. a literal).
#[macro_export]
macro_rules! intern {
($ty:ty : $expr:expr) => {{
thread_local! {
static VALUE: $crate::interner::Tok<<$ty as $crate::interner::Internable>::Interned>
= $crate::interner::intern::<
<$ty as $crate::interner::Internable>::Interned
>($expr as &$ty);
}
VALUE.with(|v| v.clone())
}};
($ty:ty : $expr:expr) => {{
thread_local! {
static VALUE: $crate::interner::Tok<<$ty as $crate::interner::Internable>::Interned>
= $crate::interner::intern::<
<$ty as $crate::interner::Internable>::Interned
>($expr as &$ty);
}
VALUE.with(|v| v.clone())
}};
}
pub fn sweep_master(retained: api::Retained) {
let mut g = interner();
assert!(g.master.is_none(), "Not master");
g.interners.strings.sweep_master(retained.strings.into_iter().collect());
g.interners.vecs.sweep_master(retained.vecs.into_iter().collect());
let mut g = interner();
assert!(g.master.is_none(), "Not master");
g.interners.strings.sweep_master(retained.strings.into_iter().collect());
g.interners.vecs.sweep_master(retained.vecs.into_iter().collect());
}
#[cfg(test)]
mod test {
use std::num::NonZero;
use std::num::NonZero;
use orchid_api_traits::{enc_vec, Decode};
use orchid_api_traits::{Decode, enc_vec};
use super::*;
use crate::api;
use super::*;
use crate::api;
#[test]
fn test_i() {
let _: Tok<String> = intern!(str: "foo");
let _: Tok<Vec<Tok<String>>> = intern!([Tok<String>]: &[
intern!(str: "bar"),
intern!(str: "baz")
]);
}
#[test]
fn test_i() {
let _: Tok<String> = intern!(str: "foo");
let _: Tok<Vec<Tok<String>>> = intern!([Tok<String>]: &[
intern!(str: "bar"),
intern!(str: "baz")
]);
}
#[test]
fn test_coding() {
let coded = api::TStr(NonZero::new(3u64).unwrap());
let mut enc = &enc_vec(&coded)[..];
api::TStr::decode(&mut enc);
assert_eq!(enc, [], "Did not consume all of {enc:?}")
}
#[test]
fn test_coding() {
let coded = api::TStr(NonZero::new(3u64).unwrap());
let mut enc = &enc_vec(&coded)[..];
api::TStr::decode(&mut enc);
assert_eq!(enc, [], "Did not consume all of {enc:?}")
}
}

View File

@@ -8,38 +8,38 @@ use never::Never;
/// Combine two hashmaps via an infallible value merger. See also
/// [try_join_maps]
pub fn join_maps<K: Eq + Hash, V>(
left: HashMap<K, V>,
right: HashMap<K, V>,
mut merge: impl FnMut(&K, V, V) -> V,
left: HashMap<K, V>,
right: HashMap<K, V>,
mut merge: impl FnMut(&K, V, V) -> V,
) -> HashMap<K, V> {
let (val, ev) = try_join_maps::<K, V, Never>(left, right, |k, l, r| Ok(merge(k, l, r)));
if let Some(e) = ev.first() {
match *e {}
}
val
let (val, ev) = try_join_maps::<K, V, Never>(left, right, |k, l, r| Ok(merge(k, l, r)));
if let Some(e) = ev.first() {
match *e {}
}
val
}
/// Combine two hashmaps via a fallible value merger. See also [join_maps]
pub fn try_join_maps<K: Eq + Hash, V, E>(
left: HashMap<K, V>,
mut right: HashMap<K, V>,
mut merge: impl FnMut(&K, V, V) -> Result<V, E>,
left: HashMap<K, V>,
mut right: HashMap<K, V>,
mut merge: impl FnMut(&K, V, V) -> Result<V, E>,
) -> (HashMap<K, V>, Vec<E>) {
let mut mixed = HashMap::with_capacity(left.len() + right.len());
let mut errors = Vec::new();
for (key, lval) in left {
let val = match right.remove(&key) {
None => lval,
Some(rval) => match merge(&key, lval, rval) {
Ok(v) => v,
Err(e) => {
errors.push(e);
continue;
},
},
};
mixed.insert(key, val);
}
mixed.extend(right);
(mixed, errors)
let mut mixed = HashMap::with_capacity(left.len() + right.len());
let mut errors = Vec::new();
for (key, lval) in left {
let val = match right.remove(&key) {
None => lval,
Some(rval) => match merge(&key, lval, rval) {
Ok(v) => v,
Err(e) => {
errors.push(e);
continue;
},
},
};
mixed.insert(key, val);
}
mixed.extend(right);
(mixed, errors)
}

View File

@@ -1,18 +1,20 @@
use orchid_api as api;
pub mod box_cow;
pub mod boxed_iter;
pub mod char_filter;
pub mod clone;
pub mod combine;
pub mod event;
pub mod msg;
pub mod box_cow;
pub mod char_filter;
pub mod error;
pub mod event;
pub mod id_store;
pub mod interner;
pub mod join;
pub mod location;
pub mod logging;
pub mod macros;
mod match_mapping;
pub mod msg;
pub mod name;
pub mod number;
pub mod parse;
@@ -22,5 +24,3 @@ pub mod sequence;
pub mod side;
pub mod tokens;
pub mod tree;
pub mod macros;
mod match_mapping;

View File

@@ -1,148 +1,142 @@
//! Structures that show where code or semantic elements came from
use crate::match_mapping;
use std::fmt;
use std::hash::Hash;
use std::ops::Range;
use trait_set::trait_set;
use crate::interner::{intern, Tok};
use crate::interner::{Tok, intern};
use crate::name::Sym;
use crate::{api, intern, sym};
use crate::{api, intern, match_mapping, sym};
trait_set! {
pub trait GetSrc = FnMut(&Sym) -> Tok<String>;
pub trait GetSrc = FnMut(&Sym) -> Tok<String>;
}
#[derive(Debug, Clone)]
pub enum Pos {
None,
SlotTarget,
/// Used in functions to denote the generated code that carries on the
/// location of the call. Not allowed in the const tree.
Inherit,
Gen(CodeGenInfo),
/// Range and file
SourceRange(SourceRange),
/// Range only, file implied. Most notably used by parsers
Range(Range<u32>),
None,
SlotTarget,
/// Used in functions to denote the generated code that carries on the
/// location of the call. Not allowed in the const tree.
Inherit,
Gen(CodeGenInfo),
/// Range and file
SourceRange(SourceRange),
/// Range only, file implied. Most notably used by parsers
Range(Range<u32>),
}
impl Pos {
pub fn pretty_print(&self, get_src: &mut impl GetSrc) -> String {
match self {
Self::Gen(g) => g.to_string(),
Self::SourceRange(sr) => sr.pretty_print(&get_src(&sr.path)),
// Can't pretty print partial and meta-location
other => format!("{other:?}"),
}
}
pub fn from_api(api: &api::Location) -> Self {
match_mapping!(api, api::Location => Pos {
None, Inherit, SlotTarget,
Range(r.clone()),
Gen(cgi => CodeGenInfo::from_api(cgi)),
SourceRange(sr => SourceRange::from_api(sr))
})
}
pub fn to_api(&self) -> api::Location {
match_mapping!(self, Pos => api::Location {
None, Inherit, SlotTarget,
Range(r.clone()),
Gen(cgi.to_api()),
SourceRange(sr.to_api()),
})
}
pub fn pretty_print(&self, get_src: &mut impl GetSrc) -> String {
match self {
Self::Gen(g) => g.to_string(),
Self::SourceRange(sr) => sr.pretty_print(&get_src(&sr.path)),
// Can't pretty print partial and meta-location
other => format!("{other:?}"),
}
}
pub fn from_api(api: &api::Location) -> Self {
match_mapping!(api, api::Location => Pos {
None, Inherit, SlotTarget,
Range(r.clone()),
Gen(cgi => CodeGenInfo::from_api(cgi)),
SourceRange(sr => SourceRange::from_api(sr))
})
}
pub fn to_api(&self) -> api::Location {
match_mapping!(self, Pos => api::Location {
None, Inherit, SlotTarget,
Range(r.clone()),
Gen(cgi.to_api()),
SourceRange(sr.to_api()),
})
}
}
/// Exact source code location. Includes where the code was loaded from, what
/// the original source code was, and a byte range.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct SourceRange {
pub(crate) path: Sym,
pub(crate) range: Range<u32>,
pub(crate) path: Sym,
pub(crate) range: Range<u32>,
}
impl SourceRange {
pub fn new(range: &Range<u32>, path: &Sym) -> Self {
Self { range: range.clone(), path: path.clone() }
}
/// Create a dud [SourceRange] for testing. Its value is unspecified and
/// volatile.
pub fn mock() -> Self { Self { range: 0..1, path: sym!(test) } }
/// Path the source text was loaded from
pub fn path(&self) -> Sym { self.path.clone() }
/// Byte range
pub fn range(&self) -> Range<u32> { self.range.clone() }
/// 0-based index of first byte
pub fn start(&self) -> u32 { self.range.start }
/// 0-based index of last byte + 1
pub fn end(&self) -> u32 { self.range.end }
/// Syntactic location
pub fn pos(&self) -> Pos { Pos::SourceRange(self.clone()) }
/// Transform the numeric byte range
pub fn map_range(&self, map: impl FnOnce(Range<u32>) -> Range<u32>) -> Self {
Self { range: map(self.range()), path: self.path() }
}
pub fn pretty_print(&self, src: &str) -> String {
let (sl, sc) = pos2lc(src, self.range.start);
let (el, ec) = pos2lc(src, self.range.end);
match (el == sl, ec <= sc + 1) {
(true, true) => format!("{sl}:{sc}"),
(true, false) => format!("{sl}:{sc}..{ec}"),
(false, _) => format!("{sl}:{sc}..{el}:{ec}"),
}
}
pub fn zw(path: Sym, pos: u32) -> Self {
Self { path, range: pos..pos }
}
fn from_api(api: &api::SourceRange) -> Self {
Self { path: Sym::from_api(api.path), range: api.range.clone() }
}
fn to_api(&self) -> api::SourceRange {
api::SourceRange { path: self.path.to_api(), range: self.range.clone() }
}
pub fn new(range: &Range<u32>, path: &Sym) -> Self {
Self { range: range.clone(), path: path.clone() }
}
/// Create a dud [SourceRange] for testing. Its value is unspecified and
/// volatile.
pub fn mock() -> Self { Self { range: 0..1, path: sym!(test) } }
/// Path the source text was loaded from
pub fn path(&self) -> Sym { self.path.clone() }
/// Byte range
pub fn range(&self) -> Range<u32> { self.range.clone() }
/// 0-based index of first byte
pub fn start(&self) -> u32 { self.range.start }
/// 0-based index of last byte + 1
pub fn end(&self) -> u32 { self.range.end }
/// Syntactic location
pub fn pos(&self) -> Pos { Pos::SourceRange(self.clone()) }
/// Transform the numeric byte range
pub fn map_range(&self, map: impl FnOnce(Range<u32>) -> Range<u32>) -> Self {
Self { range: map(self.range()), path: self.path() }
}
pub fn pretty_print(&self, src: &str) -> String {
let (sl, sc) = pos2lc(src, self.range.start);
let (el, ec) = pos2lc(src, self.range.end);
match (el == sl, ec <= sc + 1) {
(true, true) => format!("{sl}:{sc}"),
(true, false) => format!("{sl}:{sc}..{ec}"),
(false, _) => format!("{sl}:{sc}..{el}:{ec}"),
}
}
pub fn zw(path: Sym, pos: u32) -> Self { Self { path, range: pos..pos } }
fn from_api(api: &api::SourceRange) -> Self {
Self { path: Sym::from_api(api.path), range: api.range.clone() }
}
fn to_api(&self) -> api::SourceRange {
api::SourceRange { path: self.path.to_api(), range: self.range.clone() }
}
}
/// Information about a code generator attached to the generated code
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct CodeGenInfo {
/// formatted like a Rust namespace
pub generator: Sym,
/// Unformatted user message with relevant circumstances and parameters
pub details: Tok<String>,
/// formatted like a Rust namespace
pub generator: Sym,
/// Unformatted user message with relevant circumstances and parameters
pub details: Tok<String>,
}
impl CodeGenInfo {
/// A codegen marker with no user message and parameters
pub fn no_details(generator: Sym) -> Self { Self { generator, details: intern!(str: "") } }
/// A codegen marker with a user message or parameters
pub fn details(generator: Sym, details: impl AsRef<str>) -> Self {
Self { generator, details: intern(details.as_ref()) }
}
/// Syntactic location
pub fn pos(&self) -> Pos { Pos::Gen(self.clone()) }
fn from_api(api: &api::CodeGenInfo) -> Self {
Self {
generator: Sym::from_api(api.generator),
details: Tok::from_api(api.details),
}
}
fn to_api(&self) -> api::CodeGenInfo {
api::CodeGenInfo { generator: self.generator.to_api(), details: self.details.to_api() }
}
/// A codegen marker with no user message and parameters
pub fn no_details(generator: Sym) -> Self { Self { generator, details: intern!(str: "") } }
/// A codegen marker with a user message or parameters
pub fn details(generator: Sym, details: impl AsRef<str>) -> Self {
Self { generator, details: intern(details.as_ref()) }
}
/// Syntactic location
pub fn pos(&self) -> Pos { Pos::Gen(self.clone()) }
fn from_api(api: &api::CodeGenInfo) -> Self {
Self { generator: Sym::from_api(api.generator), details: Tok::from_api(api.details) }
}
fn to_api(&self) -> api::CodeGenInfo {
api::CodeGenInfo { generator: self.generator.to_api(), details: self.details.to_api() }
}
}
impl fmt::Debug for CodeGenInfo {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "CodeGenInfo({self})") }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "CodeGenInfo({self})") }
}
impl fmt::Display for CodeGenInfo {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "generated by {}", self.generator)?;
if !self.details.is_empty() { write!(f, ", details: {}", self.details) } else { write!(f, ".") }
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "generated by {}", self.generator)?;
if !self.details.is_empty() { write!(f, ", details: {}", self.details) } else { write!(f, ".") }
}
}
#[must_use]
fn pos2lc(s: &str, i: u32) -> (u32, u32) {
s.chars()
.take(i.try_into().unwrap())
.fold((1, 1), |(line, col), char| if char == '\n' { (line + 1, 1) } else { (line, col + 1) })
s.chars()
.take(i.try_into().unwrap())
.fold((1, 1), |(line, col), char| if char == '\n' { (line + 1, 1) } else { (line, col + 1) })
}

View File

@@ -1,6 +1,6 @@
use std::fmt::Arguments;
use std::fs::File;
use std::io::{stderr, Write};
use std::io::{Write, stderr};
pub use api::LogStrategy;
use itertools::Itertools;
@@ -10,21 +10,21 @@ use crate::api;
#[derive(Clone)]
pub struct Logger(api::LogStrategy);
impl Logger {
pub fn new(strat: api::LogStrategy) -> Self { Self(strat) }
pub fn log(&self, msg: impl AsRef<str>) { writeln!(self, "{}", msg.as_ref()) }
pub fn strat(&self) -> api::LogStrategy { self.0.clone() }
pub fn log_buf(&self, event: impl AsRef<str>, buf: &[u8]) {
if std::env::var("ORCHID_LOG_BUFFERS").is_ok_and(|v| !v.is_empty()) {
writeln!(self, "{}: [{}]", event.as_ref(), buf.iter().map(|b| format!("{b:02x}")).join(" "))
}
}
pub fn write_fmt(&self, fmt: Arguments) {
match &self.0 {
api::LogStrategy::StdErr => stderr().write_fmt(fmt).expect("Could not write to stderr!"),
api::LogStrategy::File(f) => {
let mut file = File::open(f).expect("Could not open logfile");
file.write_fmt(fmt).expect("Could not write to logfile");
},
}
}
pub fn new(strat: api::LogStrategy) -> Self { Self(strat) }
pub fn log(&self, msg: impl AsRef<str>) { writeln!(self, "{}", msg.as_ref()) }
pub fn strat(&self) -> api::LogStrategy { self.0.clone() }
pub fn log_buf(&self, event: impl AsRef<str>, buf: &[u8]) {
if std::env::var("ORCHID_LOG_BUFFERS").is_ok_and(|v| !v.is_empty()) {
writeln!(self, "{}: [{}]", event.as_ref(), buf.iter().map(|b| format!("{b:02x}")).join(" "))
}
}
pub fn write_fmt(&self, fmt: Arguments) {
match &self.0 {
api::LogStrategy::StdErr => stderr().write_fmt(fmt).expect("Could not write to stderr!"),
api::LogStrategy::File(f) => {
let mut file = File::open(f).expect("Could not open logfile");
file.write_fmt(fmt).expect("Could not write to logfile");
},
}
}
}

View File

@@ -13,84 +13,84 @@ use crate::{api, match_mapping};
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct MacroSlot<'a>(api::MacroTreeId, PhantomData<&'a ()>);
impl MacroSlot<'_> {
pub fn id(self) -> api::MacroTreeId { self.0 }
pub fn id(self) -> api::MacroTreeId { self.0 }
}
trait_set! {
pub trait MacroAtomToApi<A> = FnMut(&A) -> api::MacroToken;
pub trait MacroAtomFromApi<'a, A> = FnMut(&api::Atom) -> MTok<'a, A>;
pub trait MacroAtomToApi<A> = FnMut(&A) -> api::MacroToken;
pub trait MacroAtomFromApi<'a, A> = FnMut(&api::Atom) -> MTok<'a, A>;
}
#[derive(Clone, Debug)]
pub struct MTree<'a, A> {
pub pos: Pos,
pub tok: Arc<MTok<'a, A>>,
pub pos: Pos,
pub tok: Arc<MTok<'a, A>>,
}
impl<'a, A> MTree<'a, A> {
pub(crate) fn from_api(api: &api::MacroTree, do_atom: &mut impl MacroAtomFromApi<'a, A>) -> Self {
Self { pos: Pos::from_api(&api.location), tok: Arc::new(MTok::from_api(&api.token, do_atom)) }
}
pub(crate) fn to_api(&self, do_atom: &mut impl MacroAtomToApi<A>) -> api::MacroTree {
api::MacroTree { location: self.pos.to_api(), token: self.tok.to_api(do_atom) }
}
pub(crate) fn from_api(api: &api::MacroTree, do_atom: &mut impl MacroAtomFromApi<'a, A>) -> Self {
Self { pos: Pos::from_api(&api.location), tok: Arc::new(MTok::from_api(&api.token, do_atom)) }
}
pub(crate) fn to_api(&self, do_atom: &mut impl MacroAtomToApi<A>) -> api::MacroTree {
api::MacroTree { location: self.pos.to_api(), token: self.tok.to_api(do_atom) }
}
}
#[derive(Clone, Debug)]
pub enum MTok<'a, A> {
S(Paren, Vec<MTree<'a, A>>),
Name(Sym),
Slot(MacroSlot<'a>),
Lambda(Vec<MTree<'a, A>>, Vec<MTree<'a, A>>),
Ph(Ph),
Atom(A),
/// Used in extensions to directly return input
Ref(Arc<MTok<'a, Never>>),
/// Used in the matcher to skip previous macro output which can only go in
/// vectorial placeholders
Done(Arc<MTok<'a, A>>),
S(Paren, Vec<MTree<'a, A>>),
Name(Sym),
Slot(MacroSlot<'a>),
Lambda(Vec<MTree<'a, A>>, Vec<MTree<'a, A>>),
Ph(Ph),
Atom(A),
/// Used in extensions to directly return input
Ref(Arc<MTok<'a, Never>>),
/// Used in the matcher to skip previous macro output which can only go in
/// vectorial placeholders
Done(Arc<MTok<'a, A>>),
}
impl<'a, A> MTok<'a, A> {
pub(crate) fn from_api(
api: &api::MacroToken,
do_atom: &mut impl MacroAtomFromApi<'a, A>,
) -> Self {
match_mapping!(&api, api::MacroToken => MTok::<'a, A> {
Lambda(x => mtreev_from_api(x, do_atom), b => mtreev_from_api(b, do_atom)),
Name(t => Sym::from_api(*t)),
Slot(tk => MacroSlot(*tk, PhantomData)),
S(p.clone(), b => mtreev_from_api(b, do_atom)),
Ph(ph => Ph::from_api(ph)),
} {
api::MacroToken::Atom(a) => do_atom(a)
})
}
pub(crate) fn to_api(&self, do_atom: &mut impl MacroAtomToApi<A>) -> api::MacroToken {
fn sink(n: &Never) -> api::MacroToken { match *n {} }
match_mapping!(&self, MTok => api::MacroToken {
Lambda(x => mtreev_to_api(x, do_atom), b => mtreev_to_api(b, do_atom)),
Name(t.tok().to_api()),
Ph(ph.to_api()),
S(p.clone(), b => mtreev_to_api(b, do_atom)),
Slot(tk.0.clone()),
} {
MTok::Ref(r) => r.to_api(&mut sink),
MTok::Done(t) => t.to_api(do_atom),
MTok::Atom(a) => do_atom(a),
})
}
pub fn at(self, pos: Pos) -> MTree<'a, A> { MTree { pos, tok: Arc::new(self) } }
pub(crate) fn from_api(
api: &api::MacroToken,
do_atom: &mut impl MacroAtomFromApi<'a, A>,
) -> Self {
match_mapping!(&api, api::MacroToken => MTok::<'a, A> {
Lambda(x => mtreev_from_api(x, do_atom), b => mtreev_from_api(b, do_atom)),
Name(t => Sym::from_api(*t)),
Slot(tk => MacroSlot(*tk, PhantomData)),
S(p.clone(), b => mtreev_from_api(b, do_atom)),
Ph(ph => Ph::from_api(ph)),
} {
api::MacroToken::Atom(a) => do_atom(a)
})
}
pub(crate) fn to_api(&self, do_atom: &mut impl MacroAtomToApi<A>) -> api::MacroToken {
fn sink(n: &Never) -> api::MacroToken { match *n {} }
match_mapping!(&self, MTok => api::MacroToken {
Lambda(x => mtreev_to_api(x, do_atom), b => mtreev_to_api(b, do_atom)),
Name(t.tok().to_api()),
Ph(ph.to_api()),
S(p.clone(), b => mtreev_to_api(b, do_atom)),
Slot(tk.0.clone()),
} {
MTok::Ref(r) => r.to_api(&mut sink),
MTok::Done(t) => t.to_api(do_atom),
MTok::Atom(a) => do_atom(a),
})
}
pub fn at(self, pos: Pos) -> MTree<'a, A> { MTree { pos, tok: Arc::new(self) } }
}
pub fn mtreev_from_api<'a, 'b, A>(
api: impl IntoIterator<Item = &'b api::MacroTree>,
do_atom: &mut impl MacroAtomFromApi<'a, A>,
api: impl IntoIterator<Item = &'b api::MacroTree>,
do_atom: &mut impl MacroAtomFromApi<'a, A>,
) -> Vec<MTree<'a, A>> {
api.into_iter().map(|api| MTree::from_api(api, do_atom)).collect_vec()
api.into_iter().map(|api| MTree::from_api(api, do_atom)).collect_vec()
}
pub fn mtreev_to_api<'a: 'b, 'b, A: 'b>(
v: impl IntoIterator<Item = &'b MTree<'a, A>>,
do_atom: &mut impl MacroAtomToApi<A>,
v: impl IntoIterator<Item = &'b MTree<'a, A>>,
do_atom: &mut impl MacroAtomToApi<A>,
) -> Vec<api::MacroTree> {
v.into_iter().map(|t| t.to_api(do_atom)).collect_vec()
v.into_iter().map(|t| t.to_api(do_atom)).collect_vec()
}

View File

@@ -1,7 +1,7 @@
/// A shorthand for mapping over enums with identical structure. Used for converting between
/// owned enums and the corresponding API enums that only differ in the type of their
/// fields.
///
/// A shorthand for mapping over enums with identical structure. Used for
/// converting between owned enums and the corresponding API enums that only
/// differ in the type of their fields.
///
/// The basic form is
/// ```ignore
/// match_mapping!(self, ThisType => OtherType {
@@ -78,7 +78,7 @@ macro_rules! match_mapping {
(@PAT_MUNCH $ctx:tt ($($names:ident)*) $name:ident () $value:expr , $($tail:tt)*) => {
match_mapping!(@PAT_MUNCH $ctx ($($names)* $name) $($tail)*)
};
(@PAT_MUNCH $ctx:tt ($($names:ident)*) $name:ident . $($tail:tt)*) => {
(@PAT_MUNCH $ctx:tt ($($names:ident)*) $name:ident . $($tail:tt)*) => {
match_mapping!(@PAT_DOT_MUNCH $ctx ($($names)* $name) $($tail)*)
};
(@PAT_DOT_MUNCH $ctx:tt $names:tt , $($tail:tt)*) => {
@@ -122,4 +122,4 @@ macro_rules! match_mapping {
(@VAL_MUNCH ({} ($($prefix:tt)*)) ($( ( $name:ident $($value:tt)* ) )*) ) => {
$($prefix)* { $( $name : $($value)* ),* }
};
}
}

View File

@@ -3,14 +3,14 @@ use std::io;
use orchid_api_traits::{Decode, Encode};
pub fn send_msg(write: &mut impl io::Write, msg: &[u8]) -> io::Result<()> {
u32::try_from(msg.len()).unwrap().encode(write);
write.write_all(msg)?;
write.flush()
u32::try_from(msg.len()).unwrap().encode(write);
write.write_all(msg)?;
write.flush()
}
pub fn recv_msg(read: &mut impl io::Read) -> io::Result<Vec<u8>> {
let len = u32::decode(read);
let mut msg = vec![0u8; len as usize];
read.read_exact(&mut msg)?;
Ok(msg)
let len = u32::decode(read);
let mut msg = vec![0u8; len as usize];
read.read_exact(&mut msg)?;
Ok(msg)
}

View File

@@ -12,11 +12,11 @@ use itertools::Itertools;
use trait_set::trait_set;
use crate::api;
use crate::interner::{intern, InternMarker, Tok};
use crate::interner::{InternMarker, Tok, intern};
trait_set! {
/// Traits that all name iterators should implement
pub trait NameIter = Iterator<Item = Tok<String>> + DoubleEndedIterator + ExactSizeIterator;
/// Traits that all name iterators should implement
pub trait NameIter = Iterator<Item = Tok<String>> + DoubleEndedIterator + ExactSizeIterator;
}
/// A borrowed name fragment which can be empty. See [VPath] for the owned
@@ -25,129 +25,129 @@ trait_set! {
#[repr(transparent)]
pub struct PathSlice([Tok<String>]);
impl PathSlice {
/// Create a new [PathSlice]
pub fn new(slice: &[Tok<String>]) -> &PathSlice {
// SAFETY: This is ok because PathSlice is #[repr(transparent)]
unsafe { &*(slice as *const [Tok<String>] as *const PathSlice) }
}
/// Convert to an owned name fragment
pub fn to_vpath(&self) -> VPath { VPath(self.0.to_vec()) }
/// Iterate over the tokens
pub fn iter(&self) -> impl NameIter + '_ { self.into_iter() }
/// Iterate over the segments
pub fn str_iter(&self) -> impl Iterator<Item = &'_ str> {
Box::new(self.0.iter().map(|s| s.as_str()))
}
/// Find the longest shared prefix of this name and another sequence
pub fn coprefix<'a>(&'a self, other: &PathSlice) -> &'a PathSlice {
&self[0..self.iter().zip(other.iter()).take_while(|(l, r)| l == r).count() as u16]
}
/// Find the longest shared suffix of this name and another sequence
pub fn cosuffix<'a>(&'a self, other: &PathSlice) -> &'a PathSlice {
&self[0..self.iter().zip(other.iter()).take_while(|(l, r)| l == r).count() as u16]
}
/// Remove another
pub fn strip_prefix<'a>(&'a self, other: &PathSlice) -> Option<&'a PathSlice> {
let shared = self.coprefix(other).len();
(shared == other.len()).then_some(PathSlice::new(&self[shared..]))
}
/// Number of path segments
pub fn len(&self) -> u16 { self.0.len().try_into().expect("Too long name!") }
pub fn get<I: NameIndex>(&self, index: I) -> Option<&I::Output> { index.get(self) }
/// Whether there are any path segments. In other words, whether this is a
/// valid name
pub fn is_empty(&self) -> bool { self.len() == 0 }
/// Obtain a reference to the held slice. With all indexing traits shadowed,
/// this is better done explicitly
pub fn as_slice(&self) -> &[Tok<String>] { self }
/// Global empty path slice
pub fn empty() -> &'static Self { PathSlice::new(&[]) }
/// Create a new [PathSlice]
pub fn new(slice: &[Tok<String>]) -> &PathSlice {
// SAFETY: This is ok because PathSlice is #[repr(transparent)]
unsafe { &*(slice as *const [Tok<String>] as *const PathSlice) }
}
/// Convert to an owned name fragment
pub fn to_vpath(&self) -> VPath { VPath(self.0.to_vec()) }
/// Iterate over the tokens
pub fn iter(&self) -> impl NameIter + '_ { self.into_iter() }
/// Iterate over the segments
pub fn str_iter(&self) -> impl Iterator<Item = &'_ str> {
Box::new(self.0.iter().map(|s| s.as_str()))
}
/// Find the longest shared prefix of this name and another sequence
pub fn coprefix<'a>(&'a self, other: &PathSlice) -> &'a PathSlice {
&self[0..self.iter().zip(other.iter()).take_while(|(l, r)| l == r).count() as u16]
}
/// Find the longest shared suffix of this name and another sequence
pub fn cosuffix<'a>(&'a self, other: &PathSlice) -> &'a PathSlice {
&self[0..self.iter().zip(other.iter()).take_while(|(l, r)| l == r).count() as u16]
}
/// Remove another
pub fn strip_prefix<'a>(&'a self, other: &PathSlice) -> Option<&'a PathSlice> {
let shared = self.coprefix(other).len();
(shared == other.len()).then_some(PathSlice::new(&self[shared..]))
}
/// Number of path segments
pub fn len(&self) -> u16 { self.0.len().try_into().expect("Too long name!") }
pub fn get<I: NameIndex>(&self, index: I) -> Option<&I::Output> { index.get(self) }
/// Whether there are any path segments. In other words, whether this is a
/// valid name
pub fn is_empty(&self) -> bool { self.len() == 0 }
/// Obtain a reference to the held slice. With all indexing traits shadowed,
/// this is better done explicitly
pub fn as_slice(&self) -> &[Tok<String>] { self }
/// Global empty path slice
pub fn empty() -> &'static Self { PathSlice::new(&[]) }
}
impl fmt::Debug for PathSlice {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") }
}
impl fmt::Display for PathSlice {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.str_iter().join("::"))
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.str_iter().join("::"))
}
}
impl Borrow<[Tok<String>]> for PathSlice {
fn borrow(&self) -> &[Tok<String>] { &self.0 }
fn borrow(&self) -> &[Tok<String>] { &self.0 }
}
impl<'a> IntoIterator for &'a PathSlice {
type IntoIter = Cloned<slice::Iter<'a, Tok<String>>>;
type Item = Tok<String>;
fn into_iter(self) -> Self::IntoIter { self.0.iter().cloned() }
type IntoIter = Cloned<slice::Iter<'a, Tok<String>>>;
type Item = Tok<String>;
fn into_iter(self) -> Self::IntoIter { self.0.iter().cloned() }
}
pub trait NameIndex {
type Output: ?Sized;
fn get(self, name: &PathSlice) -> Option<&Self::Output>;
type Output: ?Sized;
fn get(self, name: &PathSlice) -> Option<&Self::Output>;
}
impl<T: NameIndex> Index<T> for PathSlice {
type Output = T::Output;
fn index(&self, index: T) -> &Self::Output { index.get(self).expect("Index out of bounds") }
type Output = T::Output;
fn index(&self, index: T) -> &Self::Output { index.get(self).expect("Index out of bounds") }
}
mod idx_impls {
use std::ops;
use std::ops;
use super::{conv_range, NameIndex, PathSlice};
use crate::interner::Tok;
use super::{NameIndex, PathSlice, conv_range};
use crate::interner::Tok;
impl NameIndex for u16 {
type Output = Tok<String>;
fn get(self, name: &PathSlice) -> Option<&Self::Output> { name.0.get(self as usize) }
}
impl NameIndex for u16 {
type Output = Tok<String>;
fn get(self, name: &PathSlice) -> Option<&Self::Output> { name.0.get(self as usize) }
}
impl NameIndex for ops::RangeFull {
type Output = PathSlice;
fn get(self, name: &PathSlice) -> Option<&Self::Output> { Some(name) }
}
impl NameIndex for ops::RangeFull {
type Output = PathSlice;
fn get(self, name: &PathSlice) -> Option<&Self::Output> { Some(name) }
}
macro_rules! impl_range_index_for_pathslice {
($range:ident) => {
impl ops::Index<ops::$range<u16>> for PathSlice {
type Output = Self;
fn index(&self, index: ops::$range<u16>) -> &Self::Output {
Self::new(&self.0[conv_range::<u16, usize>(index)])
}
}
};
}
macro_rules! impl_range_index_for_pathslice {
($range:ident) => {
impl ops::Index<ops::$range<u16>> for PathSlice {
type Output = Self;
fn index(&self, index: ops::$range<u16>) -> &Self::Output {
Self::new(&self.0[conv_range::<u16, usize>(index)])
}
}
};
}
impl_range_index_for_pathslice!(RangeFrom);
impl_range_index_for_pathslice!(RangeTo);
impl_range_index_for_pathslice!(Range);
impl_range_index_for_pathslice!(RangeInclusive);
impl_range_index_for_pathslice!(RangeToInclusive);
impl_range_index_for_pathslice!(RangeFrom);
impl_range_index_for_pathslice!(RangeTo);
impl_range_index_for_pathslice!(Range);
impl_range_index_for_pathslice!(RangeInclusive);
impl_range_index_for_pathslice!(RangeToInclusive);
}
impl Deref for PathSlice {
type Target = [Tok<String>];
type Target = [Tok<String>];
fn deref(&self) -> &Self::Target { &self.0 }
fn deref(&self) -> &Self::Target { &self.0 }
}
impl Borrow<PathSlice> for [Tok<String>] {
fn borrow(&self) -> &PathSlice { PathSlice::new(self) }
fn borrow(&self) -> &PathSlice { PathSlice::new(self) }
}
impl<const N: usize> Borrow<PathSlice> for [Tok<String>; N] {
fn borrow(&self) -> &PathSlice { PathSlice::new(&self[..]) }
fn borrow(&self) -> &PathSlice { PathSlice::new(&self[..]) }
}
impl Borrow<PathSlice> for Vec<Tok<String>> {
fn borrow(&self) -> &PathSlice { PathSlice::new(&self[..]) }
fn borrow(&self) -> &PathSlice { PathSlice::new(&self[..]) }
}
pub fn conv_bound<T: Into<U> + Clone, U>(bound: Bound<&T>) -> Bound<U> {
match bound {
Bound::Included(i) => Bound::Included(i.clone().into()),
Bound::Excluded(i) => Bound::Excluded(i.clone().into()),
Bound::Unbounded => Bound::Unbounded,
}
match bound {
Bound::Included(i) => Bound::Included(i.clone().into()),
Bound::Excluded(i) => Bound::Excluded(i.clone().into()),
Bound::Unbounded => Bound::Unbounded,
}
}
pub fn conv_range<'a, T: Into<U> + Clone + 'a, U: 'a>(
range: impl RangeBounds<T>,
range: impl RangeBounds<T>,
) -> (Bound<U>, Bound<U>) {
(conv_bound(range.start_bound()), conv_bound(range.end_bound()))
(conv_bound(range.start_bound()), conv_bound(range.end_bound()))
}
/// A token path which may be empty. [VName] is the non-empty,
@@ -155,90 +155,90 @@ pub fn conv_range<'a, T: Into<U> + Clone + 'a, U: 'a>(
#[derive(Clone, Default, Hash, PartialEq, Eq)]
pub struct VPath(pub Vec<Tok<String>>);
impl VPath {
/// Collect segments into a vector
pub fn new(items: impl IntoIterator<Item = Tok<String>>) -> Self {
Self(items.into_iter().collect())
}
/// Number of path segments
pub fn len(&self) -> usize { self.0.len() }
/// Whether there are any path segments. In other words, whether this is a
/// valid name
pub fn is_empty(&self) -> bool { self.len() == 0 }
/// Prepend some tokens to the path
pub fn prefix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
Self(items.into_iter().chain(self.0).collect())
}
/// Append some tokens to the path
pub fn suffix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
Self(self.0.into_iter().chain(items).collect())
}
/// Partition the string by `::` namespace separators
pub fn parse(s: &str) -> Self {
Self(if s.is_empty() { vec![] } else { s.split("::").map(intern).collect() })
}
/// Walk over the segments
pub fn str_iter(&self) -> impl Iterator<Item = &'_ str> {
Box::new(self.0.iter().map(|s| s.as_str()))
}
/// Try to convert into non-empty version
pub fn into_name(self) -> Result<VName, EmptyNameError> { VName::new(self.0) }
/// Add a token to the path. Since now we know that it can't be empty, turn it
/// into a name.
pub fn name_with_prefix(self, name: Tok<String>) -> VName {
VName(self.into_iter().chain([name]).collect())
}
/// Add a token to the beginning of the. Since now we know that it can't be
/// empty, turn it into a name.
pub fn name_with_suffix(self, name: Tok<String>) -> VName {
VName([name].into_iter().chain(self).collect())
}
/// Collect segments into a vector
pub fn new(items: impl IntoIterator<Item = Tok<String>>) -> Self {
Self(items.into_iter().collect())
}
/// Number of path segments
pub fn len(&self) -> usize { self.0.len() }
/// Whether there are any path segments. In other words, whether this is a
/// valid name
pub fn is_empty(&self) -> bool { self.len() == 0 }
/// Prepend some tokens to the path
pub fn prefix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
Self(items.into_iter().chain(self.0).collect())
}
/// Append some tokens to the path
pub fn suffix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
Self(self.0.into_iter().chain(items).collect())
}
/// Partition the string by `::` namespace separators
pub fn parse(s: &str) -> Self {
Self(if s.is_empty() { vec![] } else { s.split("::").map(intern).collect() })
}
/// Walk over the segments
pub fn str_iter(&self) -> impl Iterator<Item = &'_ str> {
Box::new(self.0.iter().map(|s| s.as_str()))
}
/// Try to convert into non-empty version
pub fn into_name(self) -> Result<VName, EmptyNameError> { VName::new(self.0) }
/// Add a token to the path. Since now we know that it can't be empty, turn it
/// into a name.
pub fn name_with_prefix(self, name: Tok<String>) -> VName {
VName(self.into_iter().chain([name]).collect())
}
/// Add a token to the beginning of the. Since now we know that it can't be
/// empty, turn it into a name.
pub fn name_with_suffix(self, name: Tok<String>) -> VName {
VName([name].into_iter().chain(self).collect())
}
/// Convert a fs path to a vpath
pub fn from_path(path: &Path) -> Option<(Self, bool)> {
let to_vpath =
|p: &Path| p.iter().map(|c| c.to_str().map(intern)).collect::<Option<_>>().map(VPath);
match path.extension().map(|s| s.to_str()) {
Some(Some("orc")) => Some((to_vpath(&path.with_extension(""))?, true)),
None => Some((to_vpath(path)?, false)),
Some(_) => None,
}
}
/// Convert a fs path to a vpath
pub fn from_path(path: &Path) -> Option<(Self, bool)> {
let to_vpath =
|p: &Path| p.iter().map(|c| c.to_str().map(intern)).collect::<Option<_>>().map(VPath);
match path.extension().map(|s| s.to_str()) {
Some(Some("orc")) => Some((to_vpath(&path.with_extension(""))?, true)),
None => Some((to_vpath(path)?, false)),
Some(_) => None,
}
}
}
impl fmt::Debug for VPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") }
}
impl fmt::Display for VPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.str_iter().join("::"))
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.str_iter().join("::"))
}
}
impl FromIterator<Tok<String>> for VPath {
fn from_iter<T: IntoIterator<Item = Tok<String>>>(iter: T) -> Self {
Self(iter.into_iter().collect())
}
fn from_iter<T: IntoIterator<Item = Tok<String>>>(iter: T) -> Self {
Self(iter.into_iter().collect())
}
}
impl IntoIterator for VPath {
type Item = Tok<String>;
type IntoIter = vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
type Item = Tok<String>;
type IntoIter = vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
}
impl Borrow<[Tok<String>]> for VPath {
fn borrow(&self) -> &[Tok<String>] { self.0.borrow() }
fn borrow(&self) -> &[Tok<String>] { self.0.borrow() }
}
impl Borrow<PathSlice> for VPath {
fn borrow(&self) -> &PathSlice { PathSlice::new(&self.0[..]) }
fn borrow(&self) -> &PathSlice { PathSlice::new(&self.0[..]) }
}
impl Deref for VPath {
type Target = PathSlice;
fn deref(&self) -> &Self::Target { self.borrow() }
type Target = PathSlice;
fn deref(&self) -> &Self::Target { self.borrow() }
}
impl<T> Index<T> for VPath
where PathSlice: Index<T>
{
type Output = <PathSlice as Index<T>>::Output;
type Output = <PathSlice as Index<T>>::Output;
fn index(&self, index: T) -> &Self::Output { &Borrow::<PathSlice>::borrow(self)[index] }
fn index(&self, index: T) -> &Self::Output { &Borrow::<PathSlice>::borrow(self)[index] }
}
/// A mutable representation of a namespaced identifier of at least one segment.
@@ -250,71 +250,71 @@ where PathSlice: Index<T>
#[derive(Clone, Hash, PartialEq, Eq)]
pub struct VName(Vec<Tok<String>>);
impl VName {
/// Assert that the sequence isn't empty and wrap it in [VName] to represent
/// this invariant
pub fn new(items: impl IntoIterator<Item = Tok<String>>) -> Result<Self, EmptyNameError> {
let data: Vec<_> = items.into_iter().collect();
if data.is_empty() { Err(EmptyNameError) } else { Ok(Self(data)) }
}
pub fn deintern(items: impl IntoIterator<Item = api::TStr>) -> Result<Self, EmptyNameError> {
Self::new(items.into_iter().map(Tok::from_api))
}
/// Unwrap the enclosed vector
pub fn into_vec(self) -> Vec<Tok<String>> { self.0 }
/// Get a reference to the enclosed vector
pub fn vec(&self) -> &Vec<Tok<String>> { &self.0 }
/// Mutable access to the underlying vector. To ensure correct results, this
/// must never be empty.
pub fn vec_mut(&mut self) -> &mut Vec<Tok<String>> { &mut self.0 }
/// Intern the name and return a [Sym]
pub fn to_sym(&self) -> Sym { Sym(intern(&self.0[..])) }
/// If this name has only one segment, return it
pub fn as_root(&self) -> Option<Tok<String>> { self.0.iter().exactly_one().ok().cloned() }
/// Prepend the segments to this name
#[must_use = "This is a pure function"]
pub fn prefix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
Self(items.into_iter().chain(self.0).collect())
}
/// Append the segments to this name
#[must_use = "This is a pure function"]
pub fn suffix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
Self(self.0.into_iter().chain(items).collect())
}
/// Read a `::` separated namespaced name
pub fn parse(s: &str) -> Result<Self, EmptyNameError> { Self::new(VPath::parse(s)) }
pub fn literal(s: &'static str) -> Self { Self::parse(s).expect("empty literal !?") }
/// Obtain an iterator over the segments of the name
pub fn iter(&self) -> impl Iterator<Item = Tok<String>> + '_ { self.0.iter().cloned() }
/// Assert that the sequence isn't empty and wrap it in [VName] to represent
/// this invariant
pub fn new(items: impl IntoIterator<Item = Tok<String>>) -> Result<Self, EmptyNameError> {
let data: Vec<_> = items.into_iter().collect();
if data.is_empty() { Err(EmptyNameError) } else { Ok(Self(data)) }
}
pub fn deintern(items: impl IntoIterator<Item = api::TStr>) -> Result<Self, EmptyNameError> {
Self::new(items.into_iter().map(Tok::from_api))
}
/// Unwrap the enclosed vector
pub fn into_vec(self) -> Vec<Tok<String>> { self.0 }
/// Get a reference to the enclosed vector
pub fn vec(&self) -> &Vec<Tok<String>> { &self.0 }
/// Mutable access to the underlying vector. To ensure correct results, this
/// must never be empty.
pub fn vec_mut(&mut self) -> &mut Vec<Tok<String>> { &mut self.0 }
/// Intern the name and return a [Sym]
pub fn to_sym(&self) -> Sym { Sym(intern(&self.0[..])) }
/// If this name has only one segment, return it
pub fn as_root(&self) -> Option<Tok<String>> { self.0.iter().exactly_one().ok().cloned() }
/// Prepend the segments to this name
#[must_use = "This is a pure function"]
pub fn prefix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
Self(items.into_iter().chain(self.0).collect())
}
/// Append the segments to this name
#[must_use = "This is a pure function"]
pub fn suffix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
Self(self.0.into_iter().chain(items).collect())
}
/// Read a `::` separated namespaced name
pub fn parse(s: &str) -> Result<Self, EmptyNameError> { Self::new(VPath::parse(s)) }
pub fn literal(s: &'static str) -> Self { Self::parse(s).expect("empty literal !?") }
/// Obtain an iterator over the segments of the name
pub fn iter(&self) -> impl Iterator<Item = Tok<String>> + '_ { self.0.iter().cloned() }
}
impl fmt::Debug for VName {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") }
}
impl fmt::Display for VName {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.str_iter().join("::"))
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.str_iter().join("::"))
}
}
impl IntoIterator for VName {
type Item = Tok<String>;
type IntoIter = vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
type Item = Tok<String>;
type IntoIter = vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
}
impl<T> Index<T> for VName
where PathSlice: Index<T>
{
type Output = <PathSlice as Index<T>>::Output;
type Output = <PathSlice as Index<T>>::Output;
fn index(&self, index: T) -> &Self::Output { &self.deref()[index] }
fn index(&self, index: T) -> &Self::Output { &self.deref()[index] }
}
impl Borrow<[Tok<String>]> for VName {
fn borrow(&self) -> &[Tok<String>] { self.0.borrow() }
fn borrow(&self) -> &[Tok<String>] { self.0.borrow() }
}
impl Borrow<PathSlice> for VName {
fn borrow(&self) -> &PathSlice { PathSlice::new(&self.0[..]) }
fn borrow(&self) -> &PathSlice { PathSlice::new(&self.0[..]) }
}
impl Deref for VName {
type Target = PathSlice;
fn deref(&self) -> &Self::Target { self.borrow() }
type Target = PathSlice;
fn deref(&self) -> &Self::Target { self.borrow() }
}
/// Error produced when a non-empty name [VName] or [Sym] is constructed with an
@@ -322,10 +322,10 @@ impl Deref for VName {
#[derive(Debug, Copy, Clone, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct EmptyNameError;
impl TryFrom<&[Tok<String>]> for VName {
type Error = EmptyNameError;
fn try_from(value: &[Tok<String>]) -> Result<Self, Self::Error> {
Self::new(value.iter().cloned())
}
type Error = EmptyNameError;
fn try_from(value: &[Tok<String>]) -> Result<Self, Self::Error> {
Self::new(value.iter().cloned())
}
}
/// An interned representation of a namespaced identifier.
@@ -336,94 +336,94 @@ impl TryFrom<&[Tok<String>]> for VName {
#[derive(Clone, Hash, PartialEq, Eq)]
pub struct Sym(Tok<Vec<Tok<String>>>);
impl Sym {
/// Assert that the sequence isn't empty, intern it and wrap it in a [Sym] to
/// represent this invariant
pub fn new(v: impl IntoIterator<Item = Tok<String>>) -> Result<Self, EmptyNameError> {
let items = v.into_iter().collect_vec();
Self::from_tok(intern(&items[..]))
}
/// Read a `::` separated namespaced name.
pub fn parse(s: &str) -> Result<Self, EmptyNameError> {
Ok(Sym(intern(&VName::parse(s)?.into_vec()[..])))
}
/// Assert that a token isn't empty, and wrap it in a [Sym]
pub fn from_tok(t: Tok<Vec<Tok<String>>>) -> Result<Self, EmptyNameError> {
if t.is_empty() { Err(EmptyNameError) } else { Ok(Self(t)) }
}
/// Grab the interner token
pub fn tok(&self) -> Tok<Vec<Tok<String>>> { self.0.clone() }
/// Get a number unique to this name suitable for arbitrary ordering.
pub fn id(&self) -> NonZeroU64 { self.0.to_api().get_id() }
/// Extern the sym for editing
pub fn to_vname(&self) -> VName { VName(self[..].to_vec()) }
pub fn from_api(marker: api::TStrv) -> Sym {
Self::from_tok(Tok::from_api(marker)).expect("Empty sequence found for serialized Sym")
}
pub fn to_api(&self) -> api::TStrv { self.tok().to_api() }
/// Assert that the sequence isn't empty, intern it and wrap it in a [Sym] to
/// represent this invariant
pub fn new(v: impl IntoIterator<Item = Tok<String>>) -> Result<Self, EmptyNameError> {
let items = v.into_iter().collect_vec();
Self::from_tok(intern(&items[..]))
}
/// Read a `::` separated namespaced name.
pub fn parse(s: &str) -> Result<Self, EmptyNameError> {
Ok(Sym(intern(&VName::parse(s)?.into_vec()[..])))
}
/// Assert that a token isn't empty, and wrap it in a [Sym]
pub fn from_tok(t: Tok<Vec<Tok<String>>>) -> Result<Self, EmptyNameError> {
if t.is_empty() { Err(EmptyNameError) } else { Ok(Self(t)) }
}
/// Grab the interner token
pub fn tok(&self) -> Tok<Vec<Tok<String>>> { self.0.clone() }
/// Get a number unique to this name suitable for arbitrary ordering.
pub fn id(&self) -> NonZeroU64 { self.0.to_api().get_id() }
/// Extern the sym for editing
pub fn to_vname(&self) -> VName { VName(self[..].to_vec()) }
pub fn from_api(marker: api::TStrv) -> Sym {
Self::from_tok(Tok::from_api(marker)).expect("Empty sequence found for serialized Sym")
}
pub fn to_api(&self) -> api::TStrv { self.tok().to_api() }
}
impl fmt::Debug for Sym {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Sym({self})") }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Sym({self})") }
}
impl fmt::Display for Sym {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.str_iter().join("::"))
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.str_iter().join("::"))
}
}
impl<T> Index<T> for Sym
where PathSlice: Index<T>
{
type Output = <PathSlice as Index<T>>::Output;
type Output = <PathSlice as Index<T>>::Output;
fn index(&self, index: T) -> &Self::Output { &self.deref()[index] }
fn index(&self, index: T) -> &Self::Output { &self.deref()[index] }
}
impl Borrow<[Tok<String>]> for Sym {
fn borrow(&self) -> &[Tok<String>] { &self.0[..] }
fn borrow(&self) -> &[Tok<String>] { &self.0[..] }
}
impl Borrow<PathSlice> for Sym {
fn borrow(&self) -> &PathSlice { PathSlice::new(&self.0[..]) }
fn borrow(&self) -> &PathSlice { PathSlice::new(&self.0[..]) }
}
impl Deref for Sym {
type Target = PathSlice;
fn deref(&self) -> &Self::Target { self.borrow() }
type Target = PathSlice;
fn deref(&self) -> &Self::Target { self.borrow() }
}
/// An abstraction over tokenized vs non-tokenized names so that they can be
/// handled together in datastructures. The names can never be empty
#[allow(clippy::len_without_is_empty)] // never empty
pub trait NameLike:
'static + Clone + Eq + Hash + fmt::Debug + fmt::Display + Borrow<PathSlice>
'static + Clone + Eq + Hash + fmt::Debug + fmt::Display + Borrow<PathSlice>
{
/// Convert into held slice
fn as_slice(&self) -> &[Tok<String>] { Borrow::<PathSlice>::borrow(self) }
/// Get iterator over tokens
fn iter(&self) -> impl NameIter + '_ { self.as_slice().iter().cloned() }
/// Get iterator over string segments
fn str_iter(&self) -> impl Iterator<Item = &'_ str> + '_ {
self.as_slice().iter().map(|t| t.as_str())
}
/// Fully resolve the name for printing
#[must_use]
fn to_strv(&self) -> Vec<String> { self.iter().map(|s| s.to_string()).collect() }
/// Format the name as an approximate filename
fn as_src_path(&self) -> String { format!("{}.orc", self.iter().join("/")) }
/// Return the number of segments in the name
fn len(&self) -> NonZeroUsize {
NonZeroUsize::try_from(self.iter().count()).expect("NameLike never empty")
}
/// Like slice's `split_first` except we know that it always returns Some
fn split_first(&self) -> (Tok<String>, &PathSlice) {
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
(foot.clone(), PathSlice::new(torso))
}
/// Like slice's `split_last` except we know that it always returns Some
fn split_last(&self) -> (Tok<String>, &PathSlice) {
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
(foot.clone(), PathSlice::new(torso))
}
/// Get the first element
fn first(&self) -> Tok<String> { self.split_first().0 }
/// Get the last element
fn last(&self) -> Tok<String> { self.split_last().0 }
/// Convert into held slice
fn as_slice(&self) -> &[Tok<String>] { Borrow::<PathSlice>::borrow(self) }
/// Get iterator over tokens
fn iter(&self) -> impl NameIter + '_ { self.as_slice().iter().cloned() }
/// Get iterator over string segments
fn str_iter(&self) -> impl Iterator<Item = &'_ str> + '_ {
self.as_slice().iter().map(|t| t.as_str())
}
/// Fully resolve the name for printing
#[must_use]
fn to_strv(&self) -> Vec<String> { self.iter().map(|s| s.to_string()).collect() }
/// Format the name as an approximate filename
fn as_src_path(&self) -> String { format!("{}.orc", self.iter().join("/")) }
/// Return the number of segments in the name
fn len(&self) -> NonZeroUsize {
NonZeroUsize::try_from(self.iter().count()).expect("NameLike never empty")
}
/// Like slice's `split_first` except we know that it always returns Some
fn split_first(&self) -> (Tok<String>, &PathSlice) {
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
(foot.clone(), PathSlice::new(torso))
}
/// Like slice's `split_last` except we know that it always returns Some
fn split_last(&self) -> (Tok<String>, &PathSlice) {
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
(foot.clone(), PathSlice::new(torso))
}
/// Get the first element
fn first(&self) -> Tok<String> { self.split_first().0 }
/// Get the last element
fn last(&self) -> Tok<String> { self.split_last().0 }
}
impl NameLike for Sym {}
@@ -492,35 +492,35 @@ macro_rules! path_slice {
#[cfg(test)]
mod test {
use std::borrow::Borrow;
use std::borrow::Borrow;
use super::{PathSlice, Sym, VName};
use crate::interner::{intern, Tok};
use crate::name::VPath;
use super::{PathSlice, Sym, VName};
use crate::interner::{Tok, intern};
use crate::name::VPath;
#[test]
fn recur() {
let myname = vname!(foo::bar);
let _borrowed_slice: &[Tok<String>] = myname.borrow();
let _borrowed_pathslice: &PathSlice = myname.borrow();
let _deref_pathslice: &PathSlice = &myname;
let _as_slice_out: &[Tok<String>] = myname.as_slice();
}
#[test]
fn recur() {
let myname = vname!(foo::bar);
let _borrowed_slice: &[Tok<String>] = myname.borrow();
let _borrowed_pathslice: &PathSlice = myname.borrow();
let _deref_pathslice: &PathSlice = &myname;
let _as_slice_out: &[Tok<String>] = myname.as_slice();
}
#[test]
fn literals() {
assert_eq!(
sym!(foo::bar::baz),
Sym::new([intern("foo"), intern("bar"), intern("baz")]).unwrap()
);
assert_eq!(
vname!(foo::bar::baz),
VName::new([intern("foo"), intern("bar"), intern("baz")]).unwrap()
);
assert_eq!(vpath!(foo::bar::baz), VPath::new([intern("foo"), intern("bar"), intern("baz")]));
assert_eq!(
path_slice!(foo::bar::baz),
PathSlice::new(&[intern("foo"), intern("bar"), intern("baz")])
);
}
#[test]
fn literals() {
assert_eq!(
sym!(foo::bar::baz),
Sym::new([intern("foo"), intern("bar"), intern("baz")]).unwrap()
);
assert_eq!(
vname!(foo::bar::baz),
VName::new([intern("foo"), intern("bar"), intern("baz")]).unwrap()
);
assert_eq!(vpath!(foo::bar::baz), VPath::new([intern("foo"), intern("bar"), intern("baz")]));
assert_eq!(
path_slice!(foo::bar::baz),
PathSlice::new(&[intern("foo"), intern("bar"), intern("baz")])
);
}
}

View File

@@ -1,131 +1,131 @@
use std::num::IntErrorKind;
use std::ops::Range;
use num_traits::ToPrimitive;
use ordered_float::NotNan;
use rust_decimal::Decimal;
use num_traits::ToPrimitive;
use crate::error::{mk_err, OrcErr};
use crate::error::{OrcErr, mk_err};
use crate::intern;
use crate::location::Pos;
/// A number, either floating point or unsigned int, parsed by Orchid.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Numeric {
/// A nonnegative integer
Uint(u64),
/// A binary float other than NaN
Float(NotNan<f64>),
/// A decimal number
Decimal(Decimal),
/// A nonnegative integer
Uint(u64),
/// A binary float other than NaN
Float(NotNan<f64>),
/// A decimal number
Decimal(Decimal),
}
impl Numeric {
pub fn decimal(num: i64, scale: u32) -> Self { Self::Decimal(Decimal::new(num, scale)) }
pub fn float(value: f64) -> Self { Self::Float(NotNan::new(value).unwrap()) }
pub fn to_f64(self) -> NotNan<f64> {
match self {
Self::Float(f) => f,
Self::Decimal(d) => {
let f = d.to_f64().expect("This is apparently always possible");
NotNan::new(f).expect("decimal was nan")
},
Self::Uint(i) => NotNan::new(i as f64).expect("int cannot be NaN"),
}
}
pub fn decimal(num: i64, scale: u32) -> Self { Self::Decimal(Decimal::new(num, scale)) }
pub fn float(value: f64) -> Self { Self::Float(NotNan::new(value).unwrap()) }
pub fn to_f64(self) -> NotNan<f64> {
match self {
Self::Float(f) => f,
Self::Decimal(d) => {
let f = d.to_f64().expect("This is apparently always possible");
NotNan::new(f).expect("decimal was nan")
},
Self::Uint(i) => NotNan::new(i as f64).expect("int cannot be NaN"),
}
}
}
/// Rasons why [parse_num] might fail. See [NumError].
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum NumErrorKind {
/// The literal describes [f64::NAN]
NaN,
/// Some integer appearing in the literal overflows [usize]
Overflow,
/// A character that isn't a digit in the given base was found
InvalidDigit,
/// The literal describes [f64::NAN]
NaN,
/// Some integer appearing in the literal overflows [usize]
Overflow,
/// A character that isn't a digit in the given base was found
InvalidDigit,
}
impl NumErrorKind {
fn from_int(kind: &IntErrorKind) -> Self {
match kind {
IntErrorKind::InvalidDigit => Self::InvalidDigit,
IntErrorKind::NegOverflow | IntErrorKind::PosOverflow => Self::Overflow,
_ => panic!("Impossible error condition"),
}
}
fn from_int(kind: &IntErrorKind) -> Self {
match kind {
IntErrorKind::InvalidDigit => Self::InvalidDigit,
IntErrorKind::NegOverflow | IntErrorKind::PosOverflow => Self::Overflow,
_ => panic!("Impossible error condition"),
}
}
}
/// Error produced by [parse_num]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct NumError {
/// Location
pub range: Range<usize>,
/// Reason
pub kind: NumErrorKind,
/// Location
pub range: Range<usize>,
/// Reason
pub kind: NumErrorKind,
}
pub fn num_to_err(NumError { kind, range }: NumError, offset: u32) -> OrcErr {
mk_err(
intern!(str: "Failed to parse number"),
match kind {
NumErrorKind::NaN => "NaN emerged during parsing",
NumErrorKind::InvalidDigit => "non-digit character encountered",
NumErrorKind::Overflow => "The number being described is too large or too accurate",
},
[Pos::Range(offset + range.start as u32..offset + range.end as u32).into()],
)
mk_err(
intern!(str: "Failed to parse number"),
match kind {
NumErrorKind::NaN => "NaN emerged during parsing",
NumErrorKind::InvalidDigit => "non-digit character encountered",
NumErrorKind::Overflow => "The number being described is too large or too accurate",
},
[Pos::Range(offset + range.start as u32..offset + range.end as u32).into()],
)
}
/// Parse a numbre literal out of text
pub fn parse_num(string: &str) -> Result<Numeric, NumError> {
let overflow_err = NumError { range: 0..string.len(), kind: NumErrorKind::Overflow };
let (radix, noprefix, pos) = (string.strip_prefix("0x").map(|s| (16u8, s, 2)))
.or_else(|| string.strip_prefix("0b").map(|s| (2u8, s, 2)))
.or_else(|| string.strip_prefix("0o").map(|s| (8u8, s, 2)))
.unwrap_or((10u8, string, 0));
// identity
let (base, exponent) = match noprefix.split_once('p') {
Some((b, e)) => {
let (s, d, len) = e.strip_prefix('-').map_or((1, e, 0), |ue| (-1, ue, 1));
(b, s * int_parse(d, 10, pos + b.len() + 1 + len)? as i32)
},
None => (noprefix, 0),
};
match base.split_once('.') {
None => {
let base_usize = int_parse(base, radix, pos)?;
if let Ok(pos_exp) = u32::try_from(exponent) {
if let Some(radical) = u64::from(radix).checked_pow(pos_exp) {
let number = base_usize.checked_mul(radical).ok_or(overflow_err)?;
return Ok(Numeric::Uint(number));
}
}
let f = (base_usize as f64) * (radix as f64).powi(exponent);
let err = NumError { range: 0..string.len(), kind: NumErrorKind::NaN };
Ok(Numeric::Float(NotNan::new(f).map_err(|_| err)?))
},
Some((whole, part)) => {
let whole_n = int_parse(whole, radix, pos)?;
let part_n = int_parse(part, radix, pos + whole.len() + 1)?;
let scale = part.chars().filter(|c| *c != '_').count() as u32;
if radix == 10 {
let mut scaled_unit = Decimal::ONE;
(scaled_unit.set_scale(scale))
.map_err(|_| NumError { range: 0..string.len(), kind: NumErrorKind::Overflow })?;
Ok(Numeric::Decimal(Decimal::from(whole_n) + scaled_unit * Decimal::from(part_n)))
} else {
let real_val = whole_n as f64 + (part_n as f64 / (radix as f64).powi(scale as i32));
let f = real_val * (radix as f64).powi(exponent);
Ok(Numeric::Float(NotNan::new(f).expect("None of the inputs are NaN")))
}
},
}
let overflow_err = NumError { range: 0..string.len(), kind: NumErrorKind::Overflow };
let (radix, noprefix, pos) = (string.strip_prefix("0x").map(|s| (16u8, s, 2)))
.or_else(|| string.strip_prefix("0b").map(|s| (2u8, s, 2)))
.or_else(|| string.strip_prefix("0o").map(|s| (8u8, s, 2)))
.unwrap_or((10u8, string, 0));
// identity
let (base, exponent) = match noprefix.split_once('p') {
Some((b, e)) => {
let (s, d, len) = e.strip_prefix('-').map_or((1, e, 0), |ue| (-1, ue, 1));
(b, s * int_parse(d, 10, pos + b.len() + 1 + len)? as i32)
},
None => (noprefix, 0),
};
match base.split_once('.') {
None => {
let base_usize = int_parse(base, radix, pos)?;
if let Ok(pos_exp) = u32::try_from(exponent) {
if let Some(radical) = u64::from(radix).checked_pow(pos_exp) {
let number = base_usize.checked_mul(radical).ok_or(overflow_err)?;
return Ok(Numeric::Uint(number));
}
}
let f = (base_usize as f64) * (radix as f64).powi(exponent);
let err = NumError { range: 0..string.len(), kind: NumErrorKind::NaN };
Ok(Numeric::Float(NotNan::new(f).map_err(|_| err)?))
},
Some((whole, part)) => {
let whole_n = int_parse(whole, radix, pos)?;
let part_n = int_parse(part, radix, pos + whole.len() + 1)?;
let scale = part.chars().filter(|c| *c != '_').count() as u32;
if radix == 10 {
let mut scaled_unit = Decimal::ONE;
(scaled_unit.set_scale(scale))
.map_err(|_| NumError { range: 0..string.len(), kind: NumErrorKind::Overflow })?;
Ok(Numeric::Decimal(Decimal::from(whole_n) + scaled_unit * Decimal::from(part_n)))
} else {
let real_val = whole_n as f64 + (part_n as f64 / (radix as f64).powi(scale as i32));
let f = real_val * (radix as f64).powi(exponent);
Ok(Numeric::Float(NotNan::new(f).expect("None of the inputs are NaN")))
}
},
}
}
fn int_parse(s: &str, radix: u8, start: usize) -> Result<u64, NumError> {
let s = s.chars().filter(|c| *c != '_').collect::<String>();
let range = start..(start + s.len());
u64::from_str_radix(&s, radix as u32)
.map_err(|e| NumError { range, kind: NumErrorKind::from_int(e.kind()) })
let s = s.chars().filter(|c| *c != '_').collect::<String>();
let range = start..(start + s.len());
u64::from_str_radix(&s, radix as u32)
.map_err(|e| NumError { range, kind: NumErrorKind::from_int(e.kind()) })
}
/// Filter for characters that can appear in numbers
@@ -136,42 +136,42 @@ pub fn numstart(c: char) -> bool { c.is_ascii_digit() }
/// Print a number as a base-16 floating point literal
#[must_use]
pub fn print_nat16(num: NotNan<f64>) -> String {
if *num == 0.0 {
return "0x0".to_string();
} else if num.is_infinite() {
return match num.is_sign_positive() {
true => "Infinity".to_string(),
false => "-Infinity".to_string(),
};
} else if num.is_nan() {
return "NaN".to_string();
}
let exp = num.log(16.0).floor();
let man = *num / 16_f64.powf(exp);
format!("0x{man}p{exp:.0}")
if *num == 0.0 {
return "0x0".to_string();
} else if num.is_infinite() {
return match num.is_sign_positive() {
true => "Infinity".to_string(),
false => "-Infinity".to_string(),
};
} else if num.is_nan() {
return "NaN".to_string();
}
let exp = num.log(16.0).floor();
let man = *num / 16_f64.powf(exp);
format!("0x{man}p{exp:.0}")
}
#[cfg(test)]
mod test {
use super::{parse_num, Numeric};
use super::{Numeric, parse_num};
#[test]
fn just_ints() {
let test = |s, n| assert_eq!(parse_num(s), Ok(Numeric::Uint(n)));
test("12345", 12345);
test("0xcafebabe", 0xcafebabe);
test("0o751", 0o751);
test("0b111000111", 0b111000111);
}
#[test]
fn just_ints() {
let test = |s, n| assert_eq!(parse_num(s), Ok(Numeric::Uint(n)));
test("12345", 12345);
test("0xcafebabe", 0xcafebabe);
test("0o751", 0o751);
test("0b111000111", 0b111000111);
}
#[test]
fn decimals() {
let test = |s, n| assert_eq!(parse_num(s), Ok(n));
test("3.1417", Numeric::decimal(31417, 4));
test("0xf.cafe", Numeric::float(0xf as f64 + 0xcafe as f64 / 0x10000 as f64));
test("34p3", Numeric::Uint(34000));
test("0x2p3", Numeric::Uint(0x2 * 0x1000));
test("1.5p3", Numeric::decimal(1500, 0));
test("0x2.5p3", Numeric::float((0x25 * 0x100) as f64));
}
#[test]
fn decimals() {
let test = |s, n| assert_eq!(parse_num(s), Ok(n));
test("3.1417", Numeric::decimal(31417, 4));
test("0xf.cafe", Numeric::float(0xf as f64 + 0xcafe as f64 / 0x10000 as f64));
test("34p3", Numeric::Uint(34000));
test("0x2p3", Numeric::Uint(0x2 * 0x1000));
test("1.5p3", Numeric::decimal(1500, 0));
test("0x2.5p3", Numeric::float((0x25 * 0x100) as f64));
}
}

View File

@@ -3,8 +3,8 @@ use std::ops::{Deref, Range};
use itertools::Itertools;
use crate::error::{mk_err, mk_errv, OrcRes, Reporter};
use crate::interner::{intern, Tok};
use crate::error::{OrcRes, Reporter, mk_err, mk_errv};
use crate::interner::{Tok, intern};
use crate::location::Pos;
use crate::name::VPath;
use crate::tree::{AtomRepr, ExtraTok, Paren, TokTree, Token};
@@ -17,297 +17,299 @@ pub fn unrep_space(c: char) -> bool { c.is_whitespace() && !"\r\n".contains(c) }
#[derive(Debug)]
pub struct Snippet<'a, 'b, A: AtomRepr, X: ExtraTok> {
prev: &'a TokTree<'b, A, X>,
cur: &'a [TokTree<'b, A, X>],
prev: &'a TokTree<'b, A, X>,
cur: &'a [TokTree<'b, A, X>],
}
impl<'a, 'b, A: AtomRepr, X: ExtraTok> Snippet<'a, 'b, A, X> {
pub fn new(prev: &'a TokTree<'b, A, X>, cur: &'a [TokTree<'b, A, X>]) -> Self {
Self { prev, cur }
}
pub fn split_at(self, pos: u32) -> (Self, Self) {
let fst = Self { prev: self.prev, cur: &self.cur[..pos as usize] };
let new_prev = if pos == 0 { self.prev } else { &self.cur[pos as usize - 1] };
let snd = Self { prev: new_prev, cur: &self.cur[pos as usize..] };
(fst, snd)
}
pub fn find_idx(self, mut f: impl FnMut(&Token<'b, A, X>) -> bool) -> Option<u32> {
self.cur.iter().position(|t| f(&t.tok)).map(|t| t as u32)
}
pub fn get(self, idx: u32) -> Option<&'a TokTree<'b, A, X>> { self.cur.get(idx as usize) }
pub fn len(self) -> u32 { self.cur.len() as u32 }
pub fn prev(self) -> &'a TokTree<'b, A, X> { self.prev }
pub fn pos(self) -> Range<u32> {
(self.cur.first().map(|f| f.range.start..self.cur.last().unwrap().range.end))
.unwrap_or(self.prev.range.clone())
}
pub fn pop_front(self) -> Option<(&'a TokTree<'b, A, X>, Self)> {
self.cur.first().map(|r| (r, self.split_at(1).1))
}
pub fn pop_back(self) -> Option<(Self, &'a TokTree<'b, A, X>)> {
self.cur.last().map(|r| (self.split_at(self.len() - 1).0, r))
}
pub fn split_once(self, f: impl FnMut(&Token<'b, A, X>) -> bool) -> Option<(Self, Self)> {
let idx = self.find_idx(f)?;
Some((self.split_at(idx).0, self.split_at(idx + 1).1))
}
pub fn split(
mut self,
mut f: impl FnMut(&Token<'b, A, X>) -> bool,
) -> impl Iterator<Item = Self> {
iter::from_fn(move || {
self.is_empty().then_some(())?;
let (ret, next) = self.split_once(&mut f).unwrap_or(self.split_at(self.len()));
self = next;
Some(ret)
})
}
pub fn is_empty(self) -> bool { self.len() == 0 }
pub fn skip_fluff(self) -> Self {
let non_fluff_start = self.find_idx(|t| !matches!(t, Token::NS | Token::Comment(_)));
self.split_at(non_fluff_start.unwrap_or(self.len())).1
}
pub fn new(prev: &'a TokTree<'b, A, X>, cur: &'a [TokTree<'b, A, X>]) -> Self {
Self { prev, cur }
}
pub fn split_at(self, pos: u32) -> (Self, Self) {
let fst = Self { prev: self.prev, cur: &self.cur[..pos as usize] };
let new_prev = if pos == 0 { self.prev } else { &self.cur[pos as usize - 1] };
let snd = Self { prev: new_prev, cur: &self.cur[pos as usize..] };
(fst, snd)
}
pub fn find_idx(self, mut f: impl FnMut(&Token<'b, A, X>) -> bool) -> Option<u32> {
self.cur.iter().position(|t| f(&t.tok)).map(|t| t as u32)
}
pub fn get(self, idx: u32) -> Option<&'a TokTree<'b, A, X>> { self.cur.get(idx as usize) }
pub fn len(self) -> u32 { self.cur.len() as u32 }
pub fn prev(self) -> &'a TokTree<'b, A, X> { self.prev }
pub fn pos(self) -> Range<u32> {
(self.cur.first().map(|f| f.range.start..self.cur.last().unwrap().range.end))
.unwrap_or(self.prev.range.clone())
}
pub fn pop_front(self) -> Option<(&'a TokTree<'b, A, X>, Self)> {
self.cur.first().map(|r| (r, self.split_at(1).1))
}
pub fn pop_back(self) -> Option<(Self, &'a TokTree<'b, A, X>)> {
self.cur.last().map(|r| (self.split_at(self.len() - 1).0, r))
}
pub fn split_once(self, f: impl FnMut(&Token<'b, A, X>) -> bool) -> Option<(Self, Self)> {
let idx = self.find_idx(f)?;
Some((self.split_at(idx).0, self.split_at(idx + 1).1))
}
pub fn split(
mut self,
mut f: impl FnMut(&Token<'b, A, X>) -> bool,
) -> impl Iterator<Item = Self> {
iter::from_fn(move || {
self.is_empty().then_some(())?;
let (ret, next) = self.split_once(&mut f).unwrap_or(self.split_at(self.len()));
self = next;
Some(ret)
})
}
pub fn is_empty(self) -> bool { self.len() == 0 }
pub fn skip_fluff(self) -> Self {
let non_fluff_start = self.find_idx(|t| !matches!(t, Token::NS | Token::Comment(_)));
self.split_at(non_fluff_start.unwrap_or(self.len())).1
}
}
impl<A: AtomRepr, X: ExtraTok> Copy for Snippet<'_, '_, A, X> {}
impl<A: AtomRepr, X: ExtraTok> Clone for Snippet<'_, '_, A, X> {
fn clone(&self) -> Self { *self }
fn clone(&self) -> Self { *self }
}
impl<'b, A: AtomRepr, X: ExtraTok> Deref for Snippet<'_, 'b, A, X> {
type Target = [TokTree<'b, A, X>];
fn deref(&self) -> &Self::Target { self.cur }
type Target = [TokTree<'b, A, X>];
fn deref(&self) -> &Self::Target { self.cur }
}
/// Remove tokens that aren't meaningful in expression context, such as comments
/// or line breaks
pub fn strip_fluff<'a, A: AtomRepr, X: ExtraTok>(
tt: &TokTree<'a, A, X>,
tt: &TokTree<'a, A, X>,
) -> Option<TokTree<'a, A, X>> {
let tok = match &tt.tok {
Token::BR => return None,
Token::Comment(_) => return None,
Token::LambdaHead(arg) => Token::LambdaHead(arg.iter().filter_map(strip_fluff).collect()),
Token::S(p, b) => Token::S(*p, b.iter().filter_map(strip_fluff).collect()),
t => t.clone(),
};
Some(TokTree { tok, range: tt.range.clone() })
let tok = match &tt.tok {
Token::BR => return None,
Token::Comment(_) => return None,
Token::LambdaHead(arg) => Token::LambdaHead(arg.iter().filter_map(strip_fluff).collect()),
Token::S(p, b) => Token::S(*p, b.iter().filter_map(strip_fluff).collect()),
t => t.clone(),
};
Some(TokTree { tok, range: tt.range.clone() })
}
#[derive(Clone, Debug)]
pub struct Comment {
pub text: Tok<String>,
pub pos: Pos,
pub text: Tok<String>,
pub pos: Pos,
}
impl Comment {
pub fn to_api(&self) -> api::Comment {
api::Comment { location: self.pos.to_api(), text: self.text.to_api() }
}
pub fn from_api(api: &api::Comment) -> Self {
Self { pos: Pos::from_api(&api.location), text: Tok::from_api(api.text) }
}
pub fn to_api(&self) -> api::Comment {
api::Comment { location: self.pos.to_api(), text: self.text.to_api() }
}
pub fn from_api(api: &api::Comment) -> Self {
Self { pos: Pos::from_api(&api.location), text: Tok::from_api(api.text) }
}
}
pub fn line_items<'a, 'b, A: AtomRepr, X: ExtraTok>(
snip: Snippet<'a, 'b, A, X>,
snip: Snippet<'a, 'b, A, X>,
) -> Vec<Parsed<'a, 'b, Vec<Comment>, A, X>> {
let mut items = Vec::new();
let mut comments = Vec::new();
for mut line in snip.split(|t| matches!(t, Token::BR)) {
match &line.cur {
[TokTree { tok: Token::S(Paren::Round, tokens), .. }] => line.cur = tokens,
[] => continue,
_ => (),
}
match line.find_idx(|t| !matches!(t, Token::Comment(_))) {
None => comments.extend(line.cur),
Some(i) => {
let (cmts, tail) = line.split_at(i);
let comments = Vec::from_iter(comments.drain(..).chain(cmts.cur).map(|t| match &t.tok {
Token::Comment(c) => Comment { text: intern(&**c), pos: Pos::Range(t.range.clone()) },
_ => unreachable!("All are comments checked above"),
}));
items.push(Parsed { output: comments, tail });
},
}
}
items
let mut items = Vec::new();
let mut comments = Vec::new();
for mut line in snip.split(|t| matches!(t, Token::BR)) {
match &line.cur {
[TokTree { tok: Token::S(Paren::Round, tokens), .. }] => line.cur = tokens,
[] => continue,
_ => (),
}
match line.find_idx(|t| !matches!(t, Token::Comment(_))) {
None => comments.extend(line.cur),
Some(i) => {
let (cmts, tail) = line.split_at(i);
let comments = Vec::from_iter(comments.drain(..).chain(cmts.cur).map(|t| match &t.tok {
Token::Comment(c) => Comment { text: intern(&**c), pos: Pos::Range(t.range.clone()) },
_ => unreachable!("All are comments checked above"),
}));
items.push(Parsed { output: comments, tail });
},
}
}
items
}
pub fn try_pop_no_fluff<'a, 'b, A: AtomRepr, X: ExtraTok>(
snip: Snippet<'a, 'b, A, X>,
snip: Snippet<'a, 'b, A, X>,
) -> ParseRes<'a, 'b, &'a TokTree<'b, A, X>, A, X> {
snip.skip_fluff().pop_front().map(|(output, tail)| Parsed { output, tail }).ok_or_else(|| {
mk_errv(
intern!(str: "Unexpected end"),
"Pattern ends abruptly",
[Pos::Range(snip.pos()).into()],
)
})
snip.skip_fluff().pop_front().map(|(output, tail)| Parsed { output, tail }).ok_or_else(|| {
mk_errv(
intern!(str: "Unexpected end"),
"Pattern ends abruptly",
[Pos::Range(snip.pos()).into()],
)
})
}
pub fn expect_end(snip: Snippet<'_, '_, impl AtomRepr, impl ExtraTok>) -> OrcRes<()> {
match snip.skip_fluff().get(0) {
Some(surplus) => Err(mk_errv(
intern!(str: "Extra code after end of line"),
"Code found after the end of the line",
[Pos::Range(surplus.range.clone()).into()],
)),
None => Ok(()),
}
match snip.skip_fluff().get(0) {
Some(surplus) => Err(mk_errv(
intern!(str: "Extra code after end of line"),
"Code found after the end of the line",
[Pos::Range(surplus.range.clone()).into()],
)),
None => Ok(()),
}
}
pub fn expect_tok<'a, 'b, A: AtomRepr, X: ExtraTok>(
snip: Snippet<'a, 'b, A, X>,
tok: Tok<String>,
snip: Snippet<'a, 'b, A, X>,
tok: Tok<String>,
) -> ParseRes<'a, 'b, (), A, X> {
let Parsed { output: head, tail } = try_pop_no_fluff(snip)?;
match &head.tok {
Token::Name(n) if *n == tok => Ok(Parsed { output: (), tail }),
t => Err(mk_errv(
intern!(str: "Expected specific keyword"),
format!("Expected {tok} but found {t}"),
[Pos::Range(head.range.clone()).into()],
)),
}
let Parsed { output: head, tail } = try_pop_no_fluff(snip)?;
match &head.tok {
Token::Name(n) if *n == tok => Ok(Parsed { output: (), tail }),
t => Err(mk_errv(
intern!(str: "Expected specific keyword"),
format!("Expected {tok} but found {t}"),
[Pos::Range(head.range.clone()).into()],
)),
}
}
pub struct Parsed<'a, 'b, T, A: AtomRepr, X: ExtraTok> {
pub output: T,
pub tail: Snippet<'a, 'b, A, X>,
pub output: T,
pub tail: Snippet<'a, 'b, A, X>,
}
pub type ParseRes<'a, 'b, T, A, X> = OrcRes<Parsed<'a, 'b, T, A, X>>;
pub fn parse_multiname<'a, 'b, A: AtomRepr, X: ExtraTok>(
ctx: &impl Reporter,
tail: Snippet<'a, 'b, A, X>,
ctx: &impl Reporter,
tail: Snippet<'a, 'b, A, X>,
) -> ParseRes<'a, 'b, Vec<(Import, Pos)>, A, X> {
let ret = rec(ctx, tail);
#[allow(clippy::type_complexity)] // it's an internal function
pub fn rec<'a, 'b, A: AtomRepr, X: ExtraTok>(
ctx: &impl Reporter,
tail: Snippet<'a, 'b, A, X>,
) -> ParseRes<'a, 'b, Vec<(Vec<Tok<String>>, Option<Tok<String>>, Pos)>, A, X> {
let comma = intern!(str: ",");
let globstar = intern!(str: "*");
let (name, tail) = tail.skip_fluff().pop_front().ok_or_else(|| {
mk_err(intern!(str: "Expected name"), "Expected a name, a list of names, or a globstar.", [
Pos::Range(tail.pos()).into(),
])
})?;
if let Some((Token::NS, tail)) = tail.skip_fluff().pop_front().map(|(tt, s)| (&tt.tok, s)) {
let n = match &name.tok {
Token::Name(n) if n.starts_with(name_start) => Ok(n),
_ => Err(mk_err(intern!(str: "Unexpected name prefix"), "Only names can precede ::", [
Pos::Range(name.range.clone()).into(),
])),
};
match (rec(ctx, tail), n) {
(Err(ev), n) => Err(ev.extended(n.err())),
(Ok(Parsed { tail, .. }), Err(e)) => {
ctx.report(e);
Ok(Parsed { output: vec![], tail })
},
(Ok(Parsed { tail, output }), Ok(pre)) => Ok(Parsed {
output: output.into_iter().update(|i| i.0.push(pre.clone())).collect_vec(),
tail,
}),
}
} else {
let output = match &name.tok {
Token::Name(ntok) => {
let nopt = match ntok {
n if *n == globstar => None,
n if n.starts_with(op_char) =>
return Err(mk_errv(
intern!(str: "Unescaped operator in multiname"),
"Operators in multinames should be enclosed in []",
[Pos::Range(name.range.clone()).into()],
)),
n => Some(n.clone()),
};
vec![(vec![], nopt, Pos::Range(name.range.clone()))]
},
Token::S(Paren::Square, b) => {
let mut ok = Vec::new();
b.iter().for_each(|tt| match &tt.tok {
Token::Name(n) if n.starts_with(op_char) =>
ok.push((vec![], Some(n.clone()), Pos::Range(tt.range.clone()))),
Token::BR | Token::Comment(_) => (),
_ => ctx.report(mk_err(
intern!(str: "Non-operator in escapement in multiname"),
"In multinames, [] functions as a literal name list reserved for operators",
[Pos::Range(name.range.clone()).into()],
)),
});
ok
},
Token::S(Paren::Round, b) => {
let mut ok = Vec::new();
let body = Snippet::new(name, b);
for csent in body.split(|n| matches!(n, Token::Name(n) if *n == comma)) {
match rec(ctx, csent) {
Err(e) => ctx.report(e),
Ok(Parsed { output, tail }) => match tail.get(0) {
None => ok.extend(output),
Some(t) => ctx.report(mk_err(
intern!(str: "Unexpected token in multiname group"),
"Unexpected token. Likely missing a :: or , or wanted [] instead of ()",
[Pos::Range(t.range.clone()).into()],
)),
},
}
}
ok
},
t =>
return Err(mk_errv(
intern!(str: "Unrecognized name end"),
format!("Names cannot end with {t} tokens"),
[Pos::Range(name.range.clone()).into()],
)),
};
Ok(Parsed { output, tail })
}
}
ret.map(|Parsed { output, tail }| {
let output = (output.into_iter())
.map(|(p, name, pos)| (Import { path: VPath::new(p.into_iter().rev()), name }, pos))
.collect_vec();
Parsed { output, tail }
})
let ret = rec(ctx, tail);
#[allow(clippy::type_complexity)] // it's an internal function
pub fn rec<'a, 'b, A: AtomRepr, X: ExtraTok>(
ctx: &impl Reporter,
tail: Snippet<'a, 'b, A, X>,
) -> ParseRes<'a, 'b, Vec<(Vec<Tok<String>>, Option<Tok<String>>, Pos)>, A, X> {
let comma = intern!(str: ",");
let globstar = intern!(str: "*");
let (name, tail) = tail.skip_fluff().pop_front().ok_or_else(|| {
mk_err(intern!(str: "Expected name"), "Expected a name, a list of names, or a globstar.", [
Pos::Range(tail.pos()).into(),
])
})?;
if let Some((Token::NS, tail)) = tail.skip_fluff().pop_front().map(|(tt, s)| (&tt.tok, s)) {
let n = match &name.tok {
Token::Name(n) if n.starts_with(name_start) => Ok(n),
_ => Err(mk_err(intern!(str: "Unexpected name prefix"), "Only names can precede ::", [
Pos::Range(name.range.clone()).into(),
])),
};
match (rec(ctx, tail), n) {
(Err(ev), n) => Err(ev.extended(n.err())),
(Ok(Parsed { tail, .. }), Err(e)) => {
ctx.report(e);
Ok(Parsed { output: vec![], tail })
},
(Ok(Parsed { tail, output }), Ok(pre)) => Ok(Parsed {
output: output.into_iter().update(|i| i.0.push(pre.clone())).collect_vec(),
tail,
}),
}
} else {
let output = match &name.tok {
Token::Name(ntok) => {
let nopt = match ntok {
n if *n == globstar => None,
n if n.starts_with(op_char) => {
return Err(mk_errv(
intern!(str: "Unescaped operator in multiname"),
"Operators in multinames should be enclosed in []",
[Pos::Range(name.range.clone()).into()],
));
},
n => Some(n.clone()),
};
vec![(vec![], nopt, Pos::Range(name.range.clone()))]
},
Token::S(Paren::Square, b) => {
let mut ok = Vec::new();
b.iter().for_each(|tt| match &tt.tok {
Token::Name(n) if n.starts_with(op_char) =>
ok.push((vec![], Some(n.clone()), Pos::Range(tt.range.clone()))),
Token::BR | Token::Comment(_) => (),
_ => ctx.report(mk_err(
intern!(str: "Non-operator in escapement in multiname"),
"In multinames, [] functions as a literal name list reserved for operators",
[Pos::Range(name.range.clone()).into()],
)),
});
ok
},
Token::S(Paren::Round, b) => {
let mut ok = Vec::new();
let body = Snippet::new(name, b);
for csent in body.split(|n| matches!(n, Token::Name(n) if *n == comma)) {
match rec(ctx, csent) {
Err(e) => ctx.report(e),
Ok(Parsed { output, tail }) => match tail.get(0) {
None => ok.extend(output),
Some(t) => ctx.report(mk_err(
intern!(str: "Unexpected token in multiname group"),
"Unexpected token. Likely missing a :: or , or wanted [] instead of ()",
[Pos::Range(t.range.clone()).into()],
)),
},
}
}
ok
},
t => {
return Err(mk_errv(
intern!(str: "Unrecognized name end"),
format!("Names cannot end with {t} tokens"),
[Pos::Range(name.range.clone()).into()],
));
},
};
Ok(Parsed { output, tail })
}
}
ret.map(|Parsed { output, tail }| {
let output = (output.into_iter())
.map(|(p, name, pos)| (Import { path: VPath::new(p.into_iter().rev()), name }, pos))
.collect_vec();
Parsed { output, tail }
})
}
/// A compound name, possibly ending with a globstar
#[derive(Debug, Clone)]
pub struct Import {
pub path: VPath,
pub name: Option<Tok<String>>,
pub path: VPath,
pub name: Option<Tok<String>>,
}
impl Import {
// pub fn from_api(i: api::CompName) -> Self {
// Self { path: VPath::new(i.path.into_iter().map(deintern)), name: i.name.map(deintern) }
// }
// pub fn to_api(&self) -> api::CompName {
// api::CompName {
// path: self.path.iter().map(|t| t.marker()).collect(),
// name: self.name.as_ref().map(|t| t.marker()),
// }
// }
// pub fn from_api(i: api::CompName) -> Self {
// Self { path: VPath::new(i.path.into_iter().map(deintern)), name:
// i.name.map(deintern) } }
// pub fn to_api(&self) -> api::CompName {
// api::CompName {
// path: self.path.iter().map(|t| t.marker()).collect(),
// name: self.name.as_ref().map(|t| t.marker()),
// }
// }
}
#[cfg(test)]
mod test {
use never::Never;
use never::Never;
use super::Snippet;
use super::Snippet;
fn _covary_snip_a<'a, 'b>(
x: Snippet<'static, 'b, Never, Never>,
) -> Snippet<'a, 'b, Never, Never> {
x
}
fn _covary_snip_b<'a, 'b>(
x: Snippet<'a, 'static, Never, Never>,
) -> Snippet<'a, 'b, Never, Never> {
x
}
fn _covary_snip_a<'a, 'b>(
x: Snippet<'static, 'b, Never, Never>,
) -> Snippet<'a, 'b, Never, Never> {
x
}
fn _covary_snip_b<'a, 'b>(
x: Snippet<'a, 'static, Never, Never>,
) -> Snippet<'a, 'b, Never, Never> {
x
}
}

View File

@@ -7,7 +7,7 @@ use std::iter;
/// Create a new vector consisting of the provided vector with the
/// element appended. See [pushed_ref] to use it with a slice
pub fn pushed<I: IntoIterator, C: FromIterator<I::Item>>(vec: I, t: I::Item) -> C {
vec.into_iter().chain(iter::once(t)).collect()
vec.into_iter().chain(iter::once(t)).collect()
}
/// Pure version of [Vec::push]
@@ -15,21 +15,21 @@ pub fn pushed<I: IntoIterator, C: FromIterator<I::Item>>(vec: I, t: I::Item) ->
/// Create a new vector consisting of the provided slice with the
/// element appended. See [pushed] for the owned version
pub fn pushed_ref<'a, T: Clone + 'a, C: FromIterator<T>>(
vec: impl IntoIterator<Item = &'a T>,
t: T,
vec: impl IntoIterator<Item = &'a T>,
t: T,
) -> C {
vec.into_iter().cloned().chain(iter::once(t)).collect()
vec.into_iter().cloned().chain(iter::once(t)).collect()
}
/// Push an element on the adhoc stack, pass it to the callback, then pop the
/// element out again.
pub fn with_pushed<T, U>(
vec: &mut Vec<T>,
item: T,
cb: impl for<'a> FnOnce(&'a mut Vec<T>) -> U,
vec: &mut Vec<T>,
item: T,
cb: impl for<'a> FnOnce(&'a mut Vec<T>) -> U,
) -> (T, U) {
vec.push(item);
let out = cb(vec);
let item = vec.pop().expect("top element stolen by callback");
(item, out)
vec.push(item);
let out = cb(vec);
let item = vec.pop().expect("top element stolen by callback");
(item, out)
}

View File

@@ -3,84 +3,84 @@ use std::cell::RefCell;
use std::marker::PhantomData;
use std::ops::{BitAnd, Deref};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::mpsc::{sync_channel, SyncSender};
use std::sync::mpsc::{SyncSender, sync_channel};
use std::sync::{Arc, Mutex};
use std::{mem, thread};
use derive_destructure::destructure;
use dyn_clone::{clone_box, DynClone};
use dyn_clone::{DynClone, clone_box};
use hashbrown::HashMap;
use orchid_api_traits::{Channel, Coding, Decode, Encode, MsgSet, Request};
use trait_set::trait_set;
pub struct Receipt;
impl Receipt {
pub fn off_thread(name: String, cb: impl FnOnce() -> Self + Send + 'static) -> Self {
thread::Builder::new().name(name).spawn(cb).unwrap();
Self
}
pub fn off_thread(name: String, cb: impl FnOnce() -> Self + Send + 'static) -> Self {
thread::Builder::new().name(name).spawn(cb).unwrap();
Self
}
}
trait_set! {
pub trait SendFn<T: MsgSet> = for<'a> FnMut(&'a [u8], ReqNot<T>) + DynClone + Send + 'static;
pub trait ReqFn<T: MsgSet> =
FnMut(RequestHandle<T>, <T::In as Channel>::Req) -> Receipt + DynClone + Send + Sync + 'static;
pub trait NotifFn<T: MsgSet> =
for<'a> FnMut(<T::In as Channel>::Notif, ReqNot<T>) + DynClone + Send + Sync + 'static;
pub trait SendFn<T: MsgSet> = for<'a> FnMut(&'a [u8], ReqNot<T>) + DynClone + Send + 'static;
pub trait ReqFn<T: MsgSet> =
FnMut(RequestHandle<T>, <T::In as Channel>::Req) -> Receipt + DynClone + Send + Sync + 'static;
pub trait NotifFn<T: MsgSet> =
for<'a> FnMut(<T::In as Channel>::Notif, ReqNot<T>) + DynClone + Send + Sync + 'static;
}
fn get_id(message: &[u8]) -> (u64, &[u8]) {
(u64::from_be_bytes(message[..8].to_vec().try_into().unwrap()), &message[8..])
(u64::from_be_bytes(message[..8].to_vec().try_into().unwrap()), &message[8..])
}
pub trait ReqHandlish {
fn defer_drop(&self, val: impl Any + 'static);
fn defer_drop(&self, val: impl Any + 'static);
}
#[derive(destructure)]
pub struct RequestHandle<MS: MsgSet> {
defer_drop: RefCell<Vec<Box<dyn Any>>>,
fulfilled: AtomicBool,
id: u64,
parent: ReqNot<MS>,
defer_drop: RefCell<Vec<Box<dyn Any>>>,
fulfilled: AtomicBool,
id: u64,
parent: ReqNot<MS>,
}
impl<MS: MsgSet + 'static> RequestHandle<MS> {
fn new(parent: ReqNot<MS>, id: u64) -> Self {
Self { defer_drop: RefCell::default(), fulfilled: false.into(), parent, id }
}
pub fn reqnot(&self) -> ReqNot<MS> { self.parent.clone() }
pub fn handle<U: Request>(&self, _: &U, rep: &U::Response) -> Receipt { self.respond(rep) }
pub fn will_handle_as<U: Request>(&self, _: &U) -> ReqTypToken<U> { ReqTypToken(PhantomData) }
pub fn handle_as<U: Request>(&self, _: ReqTypToken<U>, rep: &U::Response) -> Receipt {
self.respond(rep)
}
pub fn respond(&self, response: &impl Encode) -> Receipt {
assert!(!self.fulfilled.swap(true, Ordering::Relaxed), "Already responded to {}", self.id);
let mut buf = (!self.id).to_be_bytes().to_vec();
response.encode(&mut buf);
let mut send = clone_box(&*self.reqnot().0.lock().unwrap().send);
(send)(&buf, self.parent.clone());
Receipt
}
fn new(parent: ReqNot<MS>, id: u64) -> Self {
Self { defer_drop: RefCell::default(), fulfilled: false.into(), parent, id }
}
pub fn reqnot(&self) -> ReqNot<MS> { self.parent.clone() }
pub fn handle<U: Request>(&self, _: &U, rep: &U::Response) -> Receipt { self.respond(rep) }
pub fn will_handle_as<U: Request>(&self, _: &U) -> ReqTypToken<U> { ReqTypToken(PhantomData) }
pub fn handle_as<U: Request>(&self, _: ReqTypToken<U>, rep: &U::Response) -> Receipt {
self.respond(rep)
}
pub fn respond(&self, response: &impl Encode) -> Receipt {
assert!(!self.fulfilled.swap(true, Ordering::Relaxed), "Already responded to {}", self.id);
let mut buf = (!self.id).to_be_bytes().to_vec();
response.encode(&mut buf);
let mut send = clone_box(&*self.reqnot().0.lock().unwrap().send);
(send)(&buf, self.parent.clone());
Receipt
}
}
impl<MS: MsgSet> ReqHandlish for RequestHandle<MS> {
fn defer_drop(&self, val: impl Any) { self.defer_drop.borrow_mut().push(Box::new(val)) }
fn defer_drop(&self, val: impl Any) { self.defer_drop.borrow_mut().push(Box::new(val)) }
}
impl<MS: MsgSet> Drop for RequestHandle<MS> {
fn drop(&mut self) {
let done = self.fulfilled.load(Ordering::Relaxed);
debug_assert!(done, "Request {} dropped without response", self.id)
}
fn drop(&mut self) {
let done = self.fulfilled.load(Ordering::Relaxed);
debug_assert!(done, "Request {} dropped without response", self.id)
}
}
pub struct ReqTypToken<T>(PhantomData<T>);
pub struct ReqNotData<T: MsgSet> {
id: u64,
send: Box<dyn SendFn<T>>,
notif: Box<dyn NotifFn<T>>,
req: Box<dyn ReqFn<T>>,
responses: HashMap<u64, SyncSender<Vec<u8>>>,
id: u64,
send: Box<dyn SendFn<T>>,
notif: Box<dyn NotifFn<T>>,
req: Box<dyn ReqFn<T>>,
responses: HashMap<u64, SyncSender<Vec<u8>>>,
}
/// Wraps a raw message buffer to save on copying.
@@ -88,180 +88,180 @@ pub struct ReqNotData<T: MsgSet> {
#[derive(Debug, Clone)]
pub struct RawReply(Vec<u8>);
impl Deref for RawReply {
type Target = [u8];
fn deref(&self) -> &Self::Target { get_id(&self.0[..]).1 }
type Target = [u8];
fn deref(&self) -> &Self::Target { get_id(&self.0[..]).1 }
}
pub struct ReqNot<T: MsgSet>(Arc<Mutex<ReqNotData<T>>>);
impl<T: MsgSet> ReqNot<T> {
pub fn new(send: impl SendFn<T>, notif: impl NotifFn<T>, req: impl ReqFn<T>) -> Self {
Self(Arc::new(Mutex::new(ReqNotData {
id: 1,
send: Box::new(send),
notif: Box::new(notif),
req: Box::new(req),
responses: HashMap::new(),
})))
}
pub fn new(send: impl SendFn<T>, notif: impl NotifFn<T>, req: impl ReqFn<T>) -> Self {
Self(Arc::new(Mutex::new(ReqNotData {
id: 1,
send: Box::new(send),
notif: Box::new(notif),
req: Box::new(req),
responses: HashMap::new(),
})))
}
/// Can be called from a polling thread or dispatched in any other way
pub fn receive(&self, message: &[u8]) {
let mut g = self.0.lock().unwrap();
let (id, payload) = get_id(message);
if id == 0 {
let mut notif = clone_box(&*g.notif);
mem::drop(g);
notif(<T::In as Channel>::Notif::decode(&mut &payload[..]), self.clone())
} else if 0 < id.bitand(1 << 63) {
let sender = g.responses.remove(&!id).expect("Received response for invalid message");
sender.send(message.to_vec()).unwrap();
} else {
let message = <T::In as Channel>::Req::decode(&mut &payload[..]);
let mut req = clone_box(&*g.req);
mem::drop(g);
let rn = self.clone();
thread::Builder::new()
.name(format!("request {id}"))
.spawn(move || req(RequestHandle::new(rn, id), message))
.unwrap();
}
}
/// Can be called from a polling thread or dispatched in any other way
pub fn receive(&self, message: &[u8]) {
let mut g = self.0.lock().unwrap();
let (id, payload) = get_id(message);
if id == 0 {
let mut notif = clone_box(&*g.notif);
mem::drop(g);
notif(<T::In as Channel>::Notif::decode(&mut &payload[..]), self.clone())
} else if 0 < id.bitand(1 << 63) {
let sender = g.responses.remove(&!id).expect("Received response for invalid message");
sender.send(message.to_vec()).unwrap();
} else {
let message = <T::In as Channel>::Req::decode(&mut &payload[..]);
let mut req = clone_box(&*g.req);
mem::drop(g);
let rn = self.clone();
thread::Builder::new()
.name(format!("request {id}"))
.spawn(move || req(RequestHandle::new(rn, id), message))
.unwrap();
}
}
pub fn notify<N: Coding + Into<<T::Out as Channel>::Notif>>(&self, notif: N) {
let mut send = clone_box(&*self.0.lock().unwrap().send);
let mut buf = vec![0; 8];
let msg: <T::Out as Channel>::Notif = notif.into();
msg.encode(&mut buf);
send(&buf, self.clone())
}
pub fn notify<N: Coding + Into<<T::Out as Channel>::Notif>>(&self, notif: N) {
let mut send = clone_box(&*self.0.lock().unwrap().send);
let mut buf = vec![0; 8];
let msg: <T::Out as Channel>::Notif = notif.into();
msg.encode(&mut buf);
send(&buf, self.clone())
}
}
pub trait DynRequester: Send + Sync {
type Transfer;
/// Encode and send a request, then receive the response buffer.
fn raw_request(&self, data: Self::Transfer) -> RawReply;
type Transfer;
/// Encode and send a request, then receive the response buffer.
fn raw_request(&self, data: Self::Transfer) -> RawReply;
}
pub struct MappedRequester<'a, T>(Box<dyn Fn(T) -> RawReply + Send + Sync + 'a>);
impl<'a, T> MappedRequester<'a, T> {
fn new<U: DynRequester + 'a>(req: U) -> Self
where T: Into<U::Transfer> {
MappedRequester(Box::new(move |t| req.raw_request(t.into())))
}
fn new<U: DynRequester + 'a>(req: U) -> Self
where T: Into<U::Transfer> {
MappedRequester(Box::new(move |t| req.raw_request(t.into())))
}
}
impl<T> DynRequester for MappedRequester<'_, T> {
type Transfer = T;
fn raw_request(&self, data: Self::Transfer) -> RawReply { self.0(data) }
type Transfer = T;
fn raw_request(&self, data: Self::Transfer) -> RawReply { self.0(data) }
}
impl<T: MsgSet> DynRequester for ReqNot<T> {
type Transfer = <T::Out as Channel>::Req;
fn raw_request(&self, req: Self::Transfer) -> RawReply {
let mut g = self.0.lock().unwrap();
let id = g.id;
g.id += 1;
let mut buf = id.to_be_bytes().to_vec();
req.encode(&mut buf);
let (send, recv) = sync_channel(1);
g.responses.insert(id, send);
let mut send = clone_box(&*g.send);
mem::drop(g);
send(&buf, self.clone());
RawReply(recv.recv().unwrap())
}
type Transfer = <T::Out as Channel>::Req;
fn raw_request(&self, req: Self::Transfer) -> RawReply {
let mut g = self.0.lock().unwrap();
let id = g.id;
g.id += 1;
let mut buf = id.to_be_bytes().to_vec();
req.encode(&mut buf);
let (send, recv) = sync_channel(1);
g.responses.insert(id, send);
let mut send = clone_box(&*g.send);
mem::drop(g);
send(&buf, self.clone());
RawReply(recv.recv().unwrap())
}
}
pub trait Requester: DynRequester {
#[must_use = "These types are subject to change with protocol versions. \
#[must_use = "These types are subject to change with protocol versions. \
If you don't want to use the return value, At a minimum, force the type."]
fn request<R: Request + Into<Self::Transfer>>(&self, data: R) -> R::Response;
fn map<'a, U: Into<Self::Transfer>>(self) -> MappedRequester<'a, U>
where Self: Sized + 'a {
MappedRequester::new(self)
}
fn request<R: Request + Into<Self::Transfer>>(&self, data: R) -> R::Response;
fn map<'a, U: Into<Self::Transfer>>(self) -> MappedRequester<'a, U>
where Self: Sized + 'a {
MappedRequester::new(self)
}
}
impl<This: DynRequester + ?Sized> Requester for This {
fn request<R: Request + Into<Self::Transfer>>(&self, data: R) -> R::Response {
R::Response::decode(&mut &self.raw_request(data.into())[..])
}
fn request<R: Request + Into<Self::Transfer>>(&self, data: R) -> R::Response {
R::Response::decode(&mut &self.raw_request(data.into())[..])
}
}
impl<T: MsgSet> Clone for ReqNot<T> {
fn clone(&self) -> Self { Self(self.0.clone()) }
fn clone(&self) -> Self { Self(self.0.clone()) }
}
#[cfg(test)]
mod test {
use std::sync::{Arc, Mutex};
use std::sync::{Arc, Mutex};
use orchid_api_derive::Coding;
use orchid_api_traits::{Channel, Request};
use orchid_api_derive::Coding;
use orchid_api_traits::{Channel, Request};
use super::{MsgSet, ReqNot};
use crate::clone;
use crate::reqnot::Requester as _;
use super::{MsgSet, ReqNot};
use crate::clone;
use crate::reqnot::Requester as _;
#[derive(Clone, Debug, Coding, PartialEq)]
pub struct TestReq(u8);
impl Request for TestReq {
type Response = u8;
}
#[derive(Clone, Debug, Coding, PartialEq)]
pub struct TestReq(u8);
impl Request for TestReq {
type Response = u8;
}
pub struct TestChan;
impl Channel for TestChan {
type Notif = u8;
type Req = TestReq;
}
pub struct TestChan;
impl Channel for TestChan {
type Notif = u8;
type Req = TestReq;
}
pub struct TestMsgSet;
impl MsgSet for TestMsgSet {
type In = TestChan;
type Out = TestChan;
}
pub struct TestMsgSet;
impl MsgSet for TestMsgSet {
type In = TestChan;
type Out = TestChan;
}
#[test]
fn notification() {
let received = Arc::new(Mutex::new(None));
let receiver = ReqNot::<TestMsgSet>::new(
|_, _| panic!("Should not send anything"),
clone!(received; move |notif, _| *received.lock().unwrap() = Some(notif)),
|_, _| panic!("Not receiving a request"),
);
let sender = ReqNot::<TestMsgSet>::new(
clone!(receiver; move |d, _| receiver.receive(d)),
|_, _| panic!("Should not receive notif"),
|_, _| panic!("Should not receive request"),
);
sender.notify(3);
assert_eq!(*received.lock().unwrap(), Some(3));
sender.notify(4);
assert_eq!(*received.lock().unwrap(), Some(4));
}
#[test]
fn notification() {
let received = Arc::new(Mutex::new(None));
let receiver = ReqNot::<TestMsgSet>::new(
|_, _| panic!("Should not send anything"),
clone!(received; move |notif, _| *received.lock().unwrap() = Some(notif)),
|_, _| panic!("Not receiving a request"),
);
let sender = ReqNot::<TestMsgSet>::new(
clone!(receiver; move |d, _| receiver.receive(d)),
|_, _| panic!("Should not receive notif"),
|_, _| panic!("Should not receive request"),
);
sender.notify(3);
assert_eq!(*received.lock().unwrap(), Some(3));
sender.notify(4);
assert_eq!(*received.lock().unwrap(), Some(4));
}
#[test]
fn request() {
let receiver = Arc::new(Mutex::<Option<ReqNot<TestMsgSet>>>::new(None));
let sender = Arc::new(ReqNot::<TestMsgSet>::new(
{
let receiver = receiver.clone();
move |d, _| receiver.lock().unwrap().as_ref().unwrap().receive(d)
},
|_, _| panic!("Should not receive notif"),
|_, _| panic!("Should not receive request"),
));
*receiver.lock().unwrap() = Some(ReqNot::new(
{
let sender = sender.clone();
move |d, _| sender.receive(d)
},
|_, _| panic!("Not receiving notifs"),
|hand, req| {
assert_eq!(req, TestReq(5));
hand.respond(&6u8)
},
));
let response = sender.request(TestReq(5));
assert_eq!(response, 6);
}
#[test]
fn request() {
let receiver = Arc::new(Mutex::<Option<ReqNot<TestMsgSet>>>::new(None));
let sender = Arc::new(ReqNot::<TestMsgSet>::new(
{
let receiver = receiver.clone();
move |d, _| receiver.lock().unwrap().as_ref().unwrap().receive(d)
},
|_, _| panic!("Should not receive notif"),
|_, _| panic!("Should not receive request"),
));
*receiver.lock().unwrap() = Some(ReqNot::new(
{
let sender = sender.clone();
move |d, _| sender.receive(d)
},
|_, _| panic!("Not receiving notifs"),
|hand, req| {
assert_eq!(req, TestReq(5));
hand.respond(&6u8)
},
));
let response = sender.request(TestReq(5));
assert_eq!(response, 6);
}
}

View File

@@ -8,20 +8,20 @@ use trait_set::trait_set;
use super::boxed_iter::BoxedIter;
trait_set! {
trait Payload<'a, T> = Fn() -> BoxedIter<'a, T> + 'a;
trait Payload<'a, T> = Fn() -> BoxedIter<'a, T> + 'a;
}
/// Dynamic iterator building callback. Given how many trait objects this
/// involves, it may actually be slower than C#.
pub struct Sequence<'a, T: 'a>(Rc<dyn Payload<'a, T>>);
impl<'a, T: 'a> Sequence<'a, T> {
/// Construct from a concrete function returning a concrete iterator
pub fn new<I: IntoIterator<Item = T> + 'a>(f: impl Fn() -> I + 'a) -> Self {
Self(Rc::new(move || Box::new(f().into_iter())))
}
/// Get an iterator from the function
pub fn iter(&self) -> BoxedIter<'_, T> { (self.0)() }
/// Construct from a concrete function returning a concrete iterator
pub fn new<I: IntoIterator<Item = T> + 'a>(f: impl Fn() -> I + 'a) -> Self {
Self(Rc::new(move || Box::new(f().into_iter())))
}
/// Get an iterator from the function
pub fn iter(&self) -> BoxedIter<'_, T> { (self.0)() }
}
impl<'a, T: 'a> Clone for Sequence<'a, T> {
fn clone(&self) -> Self { Self(self.0.clone()) }
fn clone(&self) -> Self { Self(self.0.clone()) }
}

View File

@@ -10,88 +10,88 @@ use crate::boxed_iter::BoxedIter;
/// are technically usable for this purpose, they're very easy to confuse
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Side {
/// Left, low, or high-to-low in the case of sequences
Left,
/// Right, high, or low-to-high in the case of sequences
Right,
/// Left, low, or high-to-low in the case of sequences
Left,
/// Right, high, or low-to-high in the case of sequences
Right,
}
impl fmt::Display for Side {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Left => write!(f, "Left"),
Self::Right => write!(f, "Right"),
}
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Left => write!(f, "Left"),
Self::Right => write!(f, "Right"),
}
}
}
impl Side {
/// Get the side that is not the current one
pub fn opposite(&self) -> Self {
match self {
Self::Left => Self::Right,
Self::Right => Self::Left,
}
}
/// Shorthand for opposite
pub fn inv(&self) -> Self { self.opposite() }
/// take N elements from this end of a slice
pub fn slice<'a, T>(&self, size: usize, slice: &'a [T]) -> &'a [T] {
match self {
Side::Left => &slice[..size],
Side::Right => &slice[slice.len() - size..],
}
}
/// ignore N elements from this end of a slice
pub fn crop<'a, T>(&self, margin: usize, slice: &'a [T]) -> &'a [T] {
self.opposite().slice(slice.len() - margin, slice)
}
/// ignore N elements from this end and M elements from the other end
/// of a slice
pub fn crop_both<'a, T>(&self, margin: usize, opposite: usize, slice: &'a [T]) -> &'a [T] {
self.crop(margin, self.opposite().crop(opposite, slice))
}
/// Pick this side from a pair of things
pub fn pick<T>(&self, pair: (T, T)) -> T {
match self {
Side::Left => pair.0,
Side::Right => pair.1,
}
}
/// Make a pair with the first element on this side
pub fn pair<T>(&self, this: T, opposite: T) -> (T, T) {
match self {
Side::Left => (this, opposite),
Side::Right => (opposite, this),
}
}
/// Walk a double ended iterator (assumed to be left-to-right) in this
/// direction
pub fn walk<'a, I: DoubleEndedIterator + 'a>(&self, iter: I) -> BoxedIter<'a, I::Item> {
match self {
Side::Right => Box::new(iter) as BoxedIter<I::Item>,
Side::Left => Box::new(iter.rev()),
}
}
/// Get the side that is not the current one
pub fn opposite(&self) -> Self {
match self {
Self::Left => Self::Right,
Self::Right => Self::Left,
}
}
/// Shorthand for opposite
pub fn inv(&self) -> Self { self.opposite() }
/// take N elements from this end of a slice
pub fn slice<'a, T>(&self, size: usize, slice: &'a [T]) -> &'a [T] {
match self {
Side::Left => &slice[..size],
Side::Right => &slice[slice.len() - size..],
}
}
/// ignore N elements from this end of a slice
pub fn crop<'a, T>(&self, margin: usize, slice: &'a [T]) -> &'a [T] {
self.opposite().slice(slice.len() - margin, slice)
}
/// ignore N elements from this end and M elements from the other end
/// of a slice
pub fn crop_both<'a, T>(&self, margin: usize, opposite: usize, slice: &'a [T]) -> &'a [T] {
self.crop(margin, self.opposite().crop(opposite, slice))
}
/// Pick this side from a pair of things
pub fn pick<T>(&self, pair: (T, T)) -> T {
match self {
Side::Left => pair.0,
Side::Right => pair.1,
}
}
/// Make a pair with the first element on this side
pub fn pair<T>(&self, this: T, opposite: T) -> (T, T) {
match self {
Side::Left => (this, opposite),
Side::Right => (opposite, this),
}
}
/// Walk a double ended iterator (assumed to be left-to-right) in this
/// direction
pub fn walk<'a, I: DoubleEndedIterator + 'a>(&self, iter: I) -> BoxedIter<'a, I::Item> {
match self {
Side::Right => Box::new(iter) as BoxedIter<I::Item>,
Side::Left => Box::new(iter.rev()),
}
}
}
impl Not for Side {
type Output = Side;
type Output = Side;
fn not(self) -> Self::Output { self.opposite() }
fn not(self) -> Self::Output { self.opposite() }
}
#[cfg(test)]
mod test {
use itertools::Itertools;
use itertools::Itertools;
use super::*;
use super::*;
/// I apparently have a tendency to mix these up so it's best if
/// the sides are explicitly stated
#[test]
fn test_walk() {
assert_eq!(Side::Right.walk(0..4).collect_vec(), vec![0, 1, 2, 3], "can walk a range");
assert_eq!(Side::Left.walk(0..4).collect_vec(), vec![3, 2, 1, 0], "can walk a range backwards")
}
/// I apparently have a tendency to mix these up so it's best if
/// the sides are explicitly stated
#[test]
fn test_walk() {
assert_eq!(Side::Right.walk(0..4).collect_vec(), vec![0, 1, 2, 3], "can walk a range");
assert_eq!(Side::Left.walk(0..4).collect_vec(), vec![3, 2, 1, 0], "can walk a range backwards")
}
}

View File

@@ -3,4 +3,4 @@ pub use api::Paren;
use crate::api;
pub const PARENS: &[(char, char, Paren)] =
&[('(', ')', Paren::Round), ('[', ']', Paren::Square), ('{', '}', Paren::Curly)];
&[('(', ')', Paren::Round), ('[', ']', Paren::Square), ('{', '}', Paren::Curly)];

View File

@@ -6,320 +6,319 @@ use std::marker::PhantomData;
use std::ops::Range;
use std::sync::Arc;
pub use api::PhKind;
use itertools::Itertools;
use never::Never;
use ordered_float::NotNan;
use trait_set::trait_set;
use crate::{api, match_mapping};
use crate::error::OrcErrv;
use crate::interner::Tok;
use crate::location::Pos;
use crate::name::PathSlice;
use crate::parse::Snippet;
use crate::tokens::PARENS;
pub use api::PhKind as PhKind;
use crate::{api, match_mapping};
trait_set! {
pub trait RecurCB<'a, A: AtomRepr, X: ExtraTok> = Fn(TokTree<'a, A, X>) -> TokTree<'a, A, X>;
pub trait ExtraTok = Display + Clone + fmt::Debug;
pub trait RecurCB<'a, A: AtomRepr, X: ExtraTok> = Fn(TokTree<'a, A, X>) -> TokTree<'a, A, X>;
pub trait ExtraTok = Display + Clone + fmt::Debug;
}
pub fn recur<'a, A: AtomRepr, X: ExtraTok>(
tt: TokTree<'a, A, X>,
f: &impl Fn(TokTree<'a, A, X>, &dyn RecurCB<'a, A, X>) -> TokTree<'a, A, X>,
tt: TokTree<'a, A, X>,
f: &impl Fn(TokTree<'a, A, X>, &dyn RecurCB<'a, A, X>) -> TokTree<'a, A, X>,
) -> TokTree<'a, A, X> {
f(tt, &|TokTree { range, tok }| {
let tok = match tok {
tok @ (Token::Atom(_) | Token::BR | Token::Bottom(_) | Token::Comment(_) | Token::NS) => tok,
tok @ (Token::Name(_) | Token::Slot(_) | Token::X(_) | Token::Ph(_) | Token::Macro(_)) => tok,
Token::LambdaHead(arg) =>
Token::LambdaHead(arg.into_iter().map(|tt| recur(tt, f)).collect_vec()),
Token::S(p, b) => Token::S(p, b.into_iter().map(|tt| recur(tt, f)).collect_vec()),
};
TokTree { range, tok }
})
f(tt, &|TokTree { range, tok }| {
let tok = match tok {
tok @ (Token::Atom(_) | Token::BR | Token::Bottom(_) | Token::Comment(_) | Token::NS) => tok,
tok @ (Token::Name(_) | Token::Slot(_) | Token::X(_) | Token::Ph(_) | Token::Macro(_)) => tok,
Token::LambdaHead(arg) =>
Token::LambdaHead(arg.into_iter().map(|tt| recur(tt, f)).collect_vec()),
Token::S(p, b) => Token::S(p, b.into_iter().map(|tt| recur(tt, f)).collect_vec()),
};
TokTree { range, tok }
})
}
pub trait AtomRepr: fmt::Display + Clone + fmt::Debug {
type Ctx: ?Sized;
fn from_api(api: &api::Atom, pos: Pos, ctx: &mut Self::Ctx) -> Self;
fn to_api(&self) -> orchid_api::Atom;
type Ctx: ?Sized;
fn from_api(api: &api::Atom, pos: Pos, ctx: &mut Self::Ctx) -> Self;
fn to_api(&self) -> orchid_api::Atom;
}
impl AtomRepr for Never {
type Ctx = Never;
fn from_api(_: &api::Atom, _: Pos, _: &mut Self::Ctx) -> Self { panic!() }
fn to_api(&self) -> orchid_api::Atom { match *self {} }
type Ctx = Never;
fn from_api(_: &api::Atom, _: Pos, _: &mut Self::Ctx) -> Self { panic!() }
fn to_api(&self) -> orchid_api::Atom { match *self {} }
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct TokHandle<'a>(api::TreeTicket, PhantomData<&'a ()>);
impl TokHandle<'static> {
pub fn new(tt: api::TreeTicket) -> Self { TokHandle(tt, PhantomData) }
pub fn new(tt: api::TreeTicket) -> Self { TokHandle(tt, PhantomData) }
}
impl TokHandle<'_> {
pub fn ticket(self) -> api::TreeTicket { self.0 }
pub fn ticket(self) -> api::TreeTicket { self.0 }
}
impl Display for TokHandle<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Handle({})", self.0.0) }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Handle({})", self.0.0) }
}
#[derive(Clone, Debug)]
pub struct TokTree<'a, A: AtomRepr, X: ExtraTok> {
pub tok: Token<'a, A, X>,
pub range: Range<u32>,
pub tok: Token<'a, A, X>,
pub range: Range<u32>,
}
impl<'a, A: AtomRepr, X: ExtraTok> TokTree<'a, A, X> {
pub fn from_api(tt: &api::TokenTree, ctx: &mut A::Ctx) -> Self {
let tok = match_mapping!(&tt.token, api::Token => Token::<'a, A, X> {
BR, NS,
Atom(a => A::from_api(a, Pos::Range(tt.range.clone()), ctx)),
Bottom(e => OrcErrv::from_api(e)),
LambdaHead(arg => ttv_from_api(arg, ctx)),
Name(n => Tok::from_api(*n)),
S(*par, b => ttv_from_api(b, ctx)),
Comment(c.clone()),
Slot(id => TokHandle::new(*id)),
Ph(ph => Ph::from_api(ph)),
Macro(*prio)
});
Self { range: tt.range.clone(), tok }
}
pub fn from_api(tt: &api::TokenTree, ctx: &mut A::Ctx) -> Self {
let tok = match_mapping!(&tt.token, api::Token => Token::<'a, A, X> {
BR, NS,
Atom(a => A::from_api(a, Pos::Range(tt.range.clone()), ctx)),
Bottom(e => OrcErrv::from_api(e)),
LambdaHead(arg => ttv_from_api(arg, ctx)),
Name(n => Tok::from_api(*n)),
S(*par, b => ttv_from_api(b, ctx)),
Comment(c.clone()),
Slot(id => TokHandle::new(*id)),
Ph(ph => Ph::from_api(ph)),
Macro(*prio)
});
Self { range: tt.range.clone(), tok }
}
pub fn to_api(
&self,
do_extra: &mut impl FnMut(&X, Range<u32>) -> api::TokenTree,
) -> api::TokenTree {
let token = match_mapping!(&self.tok, Token => api::Token {
Atom(a.to_api()),
BR,
NS,
Bottom(e.to_api()),
Comment(c.clone()),
LambdaHead(arg => ttv_to_api(arg, do_extra)),
Name(n.to_api()),
Slot(tt.ticket()),
S(*p, b => ttv_to_api(b, do_extra)),
Ph(ph.to_api()),
Macro(*prio),
} {
Token::X(x) => return do_extra(x, self.range.clone())
});
api::TokenTree { range: self.range.clone(), token }
}
pub fn to_api(
&self,
do_extra: &mut impl FnMut(&X, Range<u32>) -> api::TokenTree,
) -> api::TokenTree {
let token = match_mapping!(&self.tok, Token => api::Token {
Atom(a.to_api()),
BR,
NS,
Bottom(e.to_api()),
Comment(c.clone()),
LambdaHead(arg => ttv_to_api(arg, do_extra)),
Name(n.to_api()),
Slot(tt.ticket()),
S(*p, b => ttv_to_api(b, do_extra)),
Ph(ph.to_api()),
Macro(*prio),
} {
Token::X(x) => return do_extra(x, self.range.clone())
});
api::TokenTree { range: self.range.clone(), token }
}
pub fn into_api(
self,
do_extra: &mut impl FnMut(X, Range<u32>) -> api::TokenTree,
) -> api::TokenTree {
let token = match self.tok {
Token::Atom(a) => api::Token::Atom(a.to_api()),
Token::BR => api::Token::BR,
Token::NS => api::Token::NS,
Token::Bottom(e) => api::Token::Bottom(e.to_api()),
Token::Comment(c) => api::Token::Comment(c.clone()),
Token::LambdaHead(arg) => api::Token::LambdaHead(ttv_into_api(arg, do_extra)),
Token::Name(n) => api::Token::Name(n.to_api()),
Token::Slot(tt) => api::Token::Slot(tt.ticket()),
Token::S(p, b) => api::Token::S(p, ttv_into_api(b, do_extra)),
Token::Ph(Ph { kind, name }) =>
api::Token::Ph(api::Placeholder { name: name.to_api(), kind }),
Token::X(x) => return do_extra(x, self.range.clone()),
Token::Macro(prio) => api::Token::Macro(prio),
};
api::TokenTree { range: self.range.clone(), token }
}
pub fn into_api(
self,
do_extra: &mut impl FnMut(X, Range<u32>) -> api::TokenTree,
) -> api::TokenTree {
let token = match self.tok {
Token::Atom(a) => api::Token::Atom(a.to_api()),
Token::BR => api::Token::BR,
Token::NS => api::Token::NS,
Token::Bottom(e) => api::Token::Bottom(e.to_api()),
Token::Comment(c) => api::Token::Comment(c.clone()),
Token::LambdaHead(arg) => api::Token::LambdaHead(ttv_into_api(arg, do_extra)),
Token::Name(n) => api::Token::Name(n.to_api()),
Token::Slot(tt) => api::Token::Slot(tt.ticket()),
Token::S(p, b) => api::Token::S(p, ttv_into_api(b, do_extra)),
Token::Ph(Ph { kind, name }) =>
api::Token::Ph(api::Placeholder { name: name.to_api(), kind }),
Token::X(x) => return do_extra(x, self.range.clone()),
Token::Macro(prio) => api::Token::Macro(prio),
};
api::TokenTree { range: self.range.clone(), token }
}
pub fn is_kw(&self, tk: Tok<String>) -> bool { self.tok.is_kw(tk) }
pub fn as_name(&self) -> Option<Tok<String>> {
if let Token::Name(n) = &self.tok { Some(n.clone()) } else { None }
}
pub fn as_s(&self, par: Paren) -> Option<Snippet<'_, 'a, A, X>> {
self.tok.as_s(par).map(|slc| Snippet::new(self, slc))
}
pub fn lambda(arg: Vec<Self>, mut body: Vec<Self>) -> Self {
let arg_range = ttv_range(&arg);
let s_range = arg_range.start..body.last().expect("Lambda with empty body!").range.end;
body.insert(0, Token::LambdaHead(arg).at(arg_range));
Token::S(Paren::Round, body).at(s_range)
}
pub fn is_kw(&self, tk: Tok<String>) -> bool { self.tok.is_kw(tk) }
pub fn as_name(&self) -> Option<Tok<String>> {
if let Token::Name(n) = &self.tok { Some(n.clone()) } else { None }
}
pub fn as_s(&self, par: Paren) -> Option<Snippet<'_, 'a, A, X>> {
self.tok.as_s(par).map(|slc| Snippet::new(self, slc))
}
pub fn lambda(arg: Vec<Self>, mut body: Vec<Self>) -> Self {
let arg_range = ttv_range(&arg);
let s_range = arg_range.start..body.last().expect("Lambda with empty body!").range.end;
body.insert(0, Token::LambdaHead(arg).at(arg_range));
Token::S(Paren::Round, body).at(s_range)
}
}
impl<A: AtomRepr, X: ExtraTok> Display for TokTree<'_, A, X> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.tok) }
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.tok) }
}
pub fn ttv_from_api<A: AtomRepr, X: ExtraTok>(
tokv: impl IntoIterator<Item: Borrow<api::TokenTree>>,
ctx: &mut A::Ctx,
tokv: impl IntoIterator<Item: Borrow<api::TokenTree>>,
ctx: &mut A::Ctx,
) -> Vec<TokTree<'static, A, X>> {
tokv.into_iter().map(|t| TokTree::<A, X>::from_api(t.borrow(), ctx)).collect()
tokv.into_iter().map(|t| TokTree::<A, X>::from_api(t.borrow(), ctx)).collect()
}
pub fn ttv_to_api<'a, A: AtomRepr, X: ExtraTok>(
tokv: impl IntoIterator<Item: Borrow<TokTree<'a, A, X>>>,
do_extra: &mut impl FnMut(&X, Range<u32>) -> api::TokenTree,
tokv: impl IntoIterator<Item: Borrow<TokTree<'a, A, X>>>,
do_extra: &mut impl FnMut(&X, Range<u32>) -> api::TokenTree,
) -> Vec<api::TokenTree> {
tokv.into_iter().map(|tok| Borrow::<TokTree<A, X>>::borrow(&tok).to_api(do_extra)).collect_vec()
tokv.into_iter().map(|tok| Borrow::<TokTree<A, X>>::borrow(&tok).to_api(do_extra)).collect_vec()
}
pub fn ttv_into_api<'a, A: AtomRepr, X: ExtraTok>(
tokv: impl IntoIterator<Item = TokTree<'a, A, X>>,
do_extra: &mut impl FnMut(X, Range<u32>) -> api::TokenTree,
tokv: impl IntoIterator<Item = TokTree<'a, A, X>>,
do_extra: &mut impl FnMut(X, Range<u32>) -> api::TokenTree,
) -> Vec<api::TokenTree> {
tokv.into_iter().map(|t| t.into_api(do_extra)).collect_vec()
tokv.into_iter().map(|t| t.into_api(do_extra)).collect_vec()
}
/// This takes a position and not a range because it assigns the range to
/// multiple leaf tokens, which is only valid if it's a zero-width range
pub fn vname_tv<'a: 'b, 'b, A: AtomRepr + 'a, X: ExtraTok + 'a>(
name: &'b PathSlice,
pos: u32,
name: &'b PathSlice,
pos: u32,
) -> impl Iterator<Item = TokTree<'a, A, X>> + 'b {
let (head, tail) = name.split_first().expect("Empty vname");
iter::once(Token::Name(head.clone()))
.chain(tail.iter().flat_map(|t| [Token::NS, Token::Name(t.clone())]))
.map(move |t| t.at(pos..pos))
let (head, tail) = name.split_first().expect("Empty vname");
iter::once(Token::Name(head.clone()))
.chain(tail.iter().flat_map(|t| [Token::NS, Token::Name(t.clone())]))
.map(move |t| t.at(pos..pos))
}
pub fn wrap_tokv<'a, A: AtomRepr, X: ExtraTok>(
items: impl IntoIterator<Item = TokTree<'a, A, X>>
items: impl IntoIterator<Item = TokTree<'a, A, X>>,
) -> TokTree<'a, A, X> {
let items_v = items.into_iter().collect_vec();
match items_v.len() {
0 => panic!("A tokv with no elements is illegal"),
1 => items_v.into_iter().next().unwrap(),
_ => {
let range = items_v.first().unwrap().range.start..items_v.last().unwrap().range.end;
Token::S(api::Paren::Round, items_v).at(range)
},
}
let items_v = items.into_iter().collect_vec();
match items_v.len() {
0 => panic!("A tokv with no elements is illegal"),
1 => items_v.into_iter().next().unwrap(),
_ => {
let range = items_v.first().unwrap().range.start..items_v.last().unwrap().range.end;
Token::S(api::Paren::Round, items_v).at(range)
},
}
}
pub use api::Paren;
#[derive(Clone, Debug)]
pub enum Token<'a, A: AtomRepr, X: ExtraTok> {
Comment(Arc<String>),
LambdaHead(Vec<TokTree<'a, A, X>>),
Name(Tok<String>),
NS,
BR,
S(Paren, Vec<TokTree<'a, A, X>>),
Atom(A),
Bottom(OrcErrv),
Slot(TokHandle<'a>),
X(X),
Ph(Ph),
Macro(Option<NotNan<f64>>),
Comment(Arc<String>),
LambdaHead(Vec<TokTree<'a, A, X>>),
Name(Tok<String>),
NS,
BR,
S(Paren, Vec<TokTree<'a, A, X>>),
Atom(A),
Bottom(OrcErrv),
Slot(TokHandle<'a>),
X(X),
Ph(Ph),
Macro(Option<NotNan<f64>>),
}
impl<'a, A: AtomRepr, X: ExtraTok> Token<'a, A, X> {
pub fn at(self, range: Range<u32>) -> TokTree<'a, A, X> { TokTree { range, tok: self } }
pub fn is_kw(&self, tk: Tok<String>) -> bool {
matches!(self, Token::Name(n) if *n == tk)
}
pub fn as_s(&self, par: Paren) -> Option<&[TokTree<'a, A, X>]> {
match self {
Self::S(p, b) if *p == par => Some(b),
_ => None,
}
}
pub fn at(self, range: Range<u32>) -> TokTree<'a, A, X> { TokTree { range, tok: self } }
pub fn is_kw(&self, tk: Tok<String>) -> bool { matches!(self, Token::Name(n) if *n == tk) }
pub fn as_s(&self, par: Paren) -> Option<&[TokTree<'a, A, X>]> {
match self {
Self::S(p, b) if *p == par => Some(b),
_ => None,
}
}
}
impl<A: AtomRepr, X: ExtraTok> Display for Token<'_, A, X> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
thread_local! {
static PAREN_LEVEL: RefCell<usize> = 0.into();
}
fn get_indent() -> usize { PAREN_LEVEL.with_borrow(|t| *t) }
fn with_indent<T>(f: impl FnOnce() -> T) -> T {
PAREN_LEVEL.with_borrow_mut(|t| *t += 1);
let r = f();
PAREN_LEVEL.with_borrow_mut(|t| *t -= 1);
r
}
match self {
Self::Atom(a) => f.write_str(&indent(&format!("{a} "), get_indent(), false)),
Self::BR => write!(f, "\n{}", " ".repeat(get_indent())),
Self::Bottom(err) if err.len() == 1 => write!(f, "Bottom({}) ", err.one().unwrap()),
Self::Bottom(err) => {
write!(f, "Botttom(\n{}) ", indent(&err.to_string(), get_indent() + 1, true))
},
Self::Comment(c) => write!(f, "--[{c}]-- "),
Self::LambdaHead(arg) => with_indent(|| write!(f, "\\ {} . ", ttv_fmt(arg))),
Self::NS => f.write_str(":: "),
Self::Name(n) => write!(f, "{n} "),
Self::Slot(th) => write!(f, "{th} "),
Self::Ph(Ph { kind, name }) => match &kind {
PhKind::Scalar => write!(f, "${name}"),
PhKind::Vector { at_least_one, priority } => {
if *at_least_one { write!(f, ".")? }
write!(f, "..${name}")?;
if 0 < *priority { write!(f, "{priority}") } else { Ok(()) }
}
}
Self::S(p, b) => {
let (lp, rp, _) = PARENS.iter().find(|(_, _, par)| par == p).unwrap();
write!(f, "{lp} ")?;
with_indent(|| f.write_str(&ttv_fmt(b)))?;
write!(f, "{rp} ")
},
Self::X(x) => write!(f, "{x} "),
Self::Macro(None) => write!(f, "macro "),
Self::Macro(Some(prio)) => write!(f, "macro({prio})"),
}
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
thread_local! {
static PAREN_LEVEL: RefCell<usize> = 0.into();
}
fn get_indent() -> usize { PAREN_LEVEL.with_borrow(|t| *t) }
fn with_indent<T>(f: impl FnOnce() -> T) -> T {
PAREN_LEVEL.with_borrow_mut(|t| *t += 1);
let r = f();
PAREN_LEVEL.with_borrow_mut(|t| *t -= 1);
r
}
match self {
Self::Atom(a) => f.write_str(&indent(&format!("{a} "), get_indent(), false)),
Self::BR => write!(f, "\n{}", " ".repeat(get_indent())),
Self::Bottom(err) if err.len() == 1 => write!(f, "Bottom({}) ", err.one().unwrap()),
Self::Bottom(err) => {
write!(f, "Botttom(\n{}) ", indent(&err.to_string(), get_indent() + 1, true))
},
Self::Comment(c) => write!(f, "--[{c}]-- "),
Self::LambdaHead(arg) => with_indent(|| write!(f, "\\ {} . ", ttv_fmt(arg))),
Self::NS => f.write_str(":: "),
Self::Name(n) => write!(f, "{n} "),
Self::Slot(th) => write!(f, "{th} "),
Self::Ph(Ph { kind, name }) => match &kind {
PhKind::Scalar => write!(f, "${name}"),
PhKind::Vector { at_least_one, priority } => {
if *at_least_one {
write!(f, ".")?
}
write!(f, "..${name}")?;
if 0 < *priority { write!(f, "{priority}") } else { Ok(()) }
},
},
Self::S(p, b) => {
let (lp, rp, _) = PARENS.iter().find(|(_, _, par)| par == p).unwrap();
write!(f, "{lp} ")?;
with_indent(|| f.write_str(&ttv_fmt(b)))?;
write!(f, "{rp} ")
},
Self::X(x) => write!(f, "{x} "),
Self::Macro(None) => write!(f, "macro "),
Self::Macro(Some(prio)) => write!(f, "macro({prio})"),
}
}
}
pub fn ttv_range(ttv: &[TokTree<'_, impl AtomRepr, impl ExtraTok>]) -> Range<u32> {
assert!(!ttv.is_empty(), "Empty slice has no range");
ttv.first().unwrap().range.start..ttv.last().unwrap().range.end
assert!(!ttv.is_empty(), "Empty slice has no range");
ttv.first().unwrap().range.start..ttv.last().unwrap().range.end
}
pub fn ttv_fmt<'a: 'b, 'b>(
ttv: impl IntoIterator<Item = &'b TokTree<'a, impl AtomRepr + 'b, impl ExtraTok + 'b>>,
ttv: impl IntoIterator<Item = &'b TokTree<'a, impl AtomRepr + 'b, impl ExtraTok + 'b>>,
) -> String {
ttv.into_iter().join("")
ttv.into_iter().join("")
}
pub fn indent(s: &str, lvl: usize, first: bool) -> String {
if first {
s.replace("\n", &("\n".to_string() + &" ".repeat(lvl)))
} else if let Some((fst, rest)) = s.split_once('\n') {
fst.to_string() + "\n" + &indent(rest, lvl, true)
} else {
s.to_string()
}
if first {
s.replace("\n", &("\n".to_string() + &" ".repeat(lvl)))
} else if let Some((fst, rest)) = s.split_once('\n') {
fst.to_string() + "\n" + &indent(rest, lvl, true)
} else {
s.to_string()
}
}
#[derive(Clone, Debug)]
pub struct Ph {
pub name: Tok<String>,
pub kind: PhKind,
pub name: Tok<String>,
pub kind: PhKind,
}
impl Ph {
pub fn from_api(api: &api::Placeholder) -> Self {
Self { name: Tok::from_api(api.name), kind: api.kind }
}
pub fn to_api(&self) -> api::Placeholder {
api::Placeholder { name: self.name.to_api(), kind: self.kind }
}
pub fn from_api(api: &api::Placeholder) -> Self {
Self { name: Tok::from_api(api.name), kind: api.kind }
}
pub fn to_api(&self) -> api::Placeholder {
api::Placeholder { name: self.name.to_api(), kind: self.kind }
}
}
#[cfg(test)]
mod test {
use super::*;
use super::*;
#[test]
fn test_covariance() {
fn _f<'a>(x: Token<'static, Never, Never>) -> Token<'a, Never, Never> { x }
}
#[test]
fn test_covariance() {
fn _f<'a>(x: Token<'static, Never, Never>) -> Token<'a, Never, Never> { x }
}
#[test]
fn fail_covariance() {
// this fails to compile
// fn _f<'a, 'b>(x: &'a mut &'static ()) -> &'a mut &'b () { x }
// this passes because it's covariant
fn _f<'a, 'b>(x: &'a fn() -> &'static ()) -> &'a fn() -> &'b () { x }
}
#[test]
fn fail_covariance() {
// this fails to compile
// fn _f<'a, 'b>(x: &'a mut &'static ()) -> &'a mut &'b () { x }
// this passes because it's covariant
fn _f<'a, 'b>(x: &'a fn() -> &'static ()) -> &'a fn() -> &'b () { x }
}
}

View File

@@ -11,15 +11,15 @@ derive_destructure = "1.0.0"
dyn-clone = "1.0.17"
hashbrown = "0.15.2"
itertools = "0.14.0"
konst = "0.3.9"
konst = "0.3.16"
lazy_static = "1.5.0"
never = "0.1.0"
once_cell = "1.19.0"
once_cell = "1.20.2"
orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
orchid-base = { version = "0.1.0", path = "../orchid-base" }
ordered-float = "4.2.0"
ordered-float = "4.6.0"
paste = "1.0.15"
substack = "1.1.0"
substack = "1.1.1"
trait-set = "0.3.0"

View File

@@ -1,13 +1,13 @@
use std::any::{type_name, Any, TypeId};
use std::any::{Any, TypeId, type_name};
use std::fmt;
use std::io::{Read, Write};
use std::marker::PhantomData;
use std::ops::Deref;
use std::sync::{Arc, OnceLock};
use dyn_clone::{clone_box, DynClone};
use orchid_api_traits::{enc_vec, Coding, Decode, Encode, Request};
use orchid_base::error::{mk_err, OrcErr, OrcRes};
use dyn_clone::{DynClone, clone_box};
use orchid_api_traits::{Coding, Decode, Encode, Request, enc_vec};
use orchid_base::error::{OrcErr, OrcRes, mk_err};
use orchid_base::intern;
use orchid_base::location::Pos;
use orchid_base::name::Sym;
@@ -18,241 +18,247 @@ use trait_set::trait_set;
use crate::api;
// use crate::error::{ProjectError, ProjectResult};
use crate::expr::{Expr, ExprData, ExprHandle, ExprKind};
use crate::system::{atom_info_for, downcast_atom, DynSystemCard, SysCtx};
use crate::system::{DynSystemCard, SysCtx, atom_info_for, downcast_atom};
pub trait AtomCard: 'static + Sized {
type Data: Clone + Coding + Sized;
type Data: Clone + Coding + Sized;
}
pub trait AtomicVariant {}
pub trait Atomic: 'static + Sized {
type Variant: AtomicVariant;
type Data: Clone + Coding + Sized;
fn reg_reqs() -> MethodSet<Self>;
type Variant: AtomicVariant;
type Data: Clone + Coding + Sized;
fn reg_reqs() -> MethodSet<Self>;
}
impl<A: Atomic> AtomCard for A {
type Data = <Self as Atomic>::Data;
type Data = <Self as Atomic>::Data;
}
pub trait AtomicFeatures: Atomic {
fn factory(self) -> AtomFactory;
type Info: AtomDynfo;
fn info() -> Self::Info;
fn dynfo() -> Box<dyn AtomDynfo>;
fn factory(self) -> AtomFactory;
type Info: AtomDynfo;
fn info() -> Self::Info;
fn dynfo() -> Box<dyn AtomDynfo>;
}
pub trait ToAtom {
fn to_atom_factory(self) -> AtomFactory;
fn to_atom_factory(self) -> AtomFactory;
}
impl<A: AtomicFeatures> ToAtom for A {
fn to_atom_factory(self) -> AtomFactory { self.factory() }
fn to_atom_factory(self) -> AtomFactory { self.factory() }
}
impl ToAtom for AtomFactory {
fn to_atom_factory(self) -> AtomFactory { self }
fn to_atom_factory(self) -> AtomFactory { self }
}
pub trait AtomicFeaturesImpl<Variant: AtomicVariant> {
fn _factory(self) -> AtomFactory;
type _Info: AtomDynfo;
fn _info() -> Self::_Info;
fn _factory(self) -> AtomFactory;
type _Info: AtomDynfo;
fn _info() -> Self::_Info;
}
impl<A: Atomic + AtomicFeaturesImpl<A::Variant>> AtomicFeatures for A {
fn factory(self) -> AtomFactory { self._factory() }
type Info = <Self as AtomicFeaturesImpl<A::Variant>>::_Info;
fn info() -> Self::Info { Self::_info() }
fn dynfo() -> Box<dyn AtomDynfo> { Box::new(Self::info()) }
fn factory(self) -> AtomFactory { self._factory() }
type Info = <Self as AtomicFeaturesImpl<A::Variant>>::_Info;
fn info() -> Self::Info { Self::_info() }
fn dynfo() -> Box<dyn AtomDynfo> { Box::new(Self::info()) }
}
pub fn get_info<A: AtomCard>(
sys: &(impl DynSystemCard + ?Sized),
sys: &(impl DynSystemCard + ?Sized),
) -> (api::AtomId, Box<dyn AtomDynfo>) {
atom_info_for(sys, TypeId::of::<A>()).unwrap_or_else(|| {
panic!("Atom {} not associated with system {}", type_name::<A>(), sys.name())
})
atom_info_for(sys, TypeId::of::<A>()).unwrap_or_else(|| {
panic!("Atom {} not associated with system {}", type_name::<A>(), sys.name())
})
}
#[derive(Clone)]
pub struct ForeignAtom<'a> {
pub expr: Option<Arc<ExprHandle>>,
pub _life: PhantomData<&'a ()>,
pub ctx: SysCtx,
pub atom: api::Atom,
pub pos: Pos,
pub expr: Option<Arc<ExprHandle>>,
pub _life: PhantomData<&'a ()>,
pub ctx: SysCtx,
pub atom: api::Atom,
pub pos: Pos,
}
impl ForeignAtom<'_> {
pub fn oex_opt(self) -> Option<Expr> {
let (handle, pos) = (self.expr.as_ref()?.clone(), self.pos.clone());
let data = ExprData { pos, kind: ExprKind::Atom(ForeignAtom { _life: PhantomData, ..self }) };
Some(Expr { handle: Some(handle), val: OnceLock::from(data) })
}
pub fn oex_opt(self) -> Option<Expr> {
let (handle, pos) = (self.expr.as_ref()?.clone(), self.pos.clone());
let data = ExprData { pos, kind: ExprKind::Atom(ForeignAtom { _life: PhantomData, ..self }) };
Some(Expr { handle: Some(handle), val: OnceLock::from(data) })
}
}
impl ForeignAtom<'static> {
pub fn oex(self) -> Expr { self.oex_opt().unwrap() }
pub(crate) fn new(handle: Arc<ExprHandle>, atom: api::Atom, pos: Pos) -> Self {
ForeignAtom { _life: PhantomData, atom, ctx: handle.ctx.clone(), expr: Some(handle), pos }
}
pub fn request<M: AtomMethod>(&self, m: M) -> Option<M::Response> {
let rep = self.ctx.reqnot.request(api::Fwd(
self.atom.clone(),
Sym::parse(M::NAME).unwrap().tok().to_api(),
enc_vec(&m)
))?;
Some(M::Response::decode(&mut &rep[..]))
}
pub fn oex(self) -> Expr { self.oex_opt().unwrap() }
pub(crate) fn new(handle: Arc<ExprHandle>, atom: api::Atom, pos: Pos) -> Self {
ForeignAtom { _life: PhantomData, atom, ctx: handle.ctx.clone(), expr: Some(handle), pos }
}
pub fn request<M: AtomMethod>(&self, m: M) -> Option<M::Response> {
let rep = self.ctx.reqnot.request(api::Fwd(
self.atom.clone(),
Sym::parse(M::NAME).unwrap().tok().to_api(),
enc_vec(&m),
))?;
Some(M::Response::decode(&mut &rep[..]))
}
}
impl fmt::Display for ForeignAtom<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}::{:?}", if self.expr.is_some() { "Clause" } else { "Tok" }, self.atom)
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}::{:?}", if self.expr.is_some() { "Clause" } else { "Tok" }, self.atom)
}
}
impl fmt::Debug for ForeignAtom<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ForeignAtom({self})") }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ForeignAtom({self})") }
}
impl AtomRepr for ForeignAtom<'_> {
type Ctx = SysCtx;
fn from_api(atom: &api::Atom, pos: Pos, ctx: &mut Self::Ctx) -> Self {
Self { atom: atom.clone(), _life: PhantomData, ctx: ctx.clone(), expr: None, pos }
}
fn to_api(&self) -> orchid_api::Atom { self.atom.clone() }
type Ctx = SysCtx;
fn from_api(atom: &api::Atom, pos: Pos, ctx: &mut Self::Ctx) -> Self {
Self { atom: atom.clone(), _life: PhantomData, ctx: ctx.clone(), expr: None, pos }
}
fn to_api(&self) -> orchid_api::Atom { self.atom.clone() }
}
pub struct NotTypAtom(pub Pos, pub Expr, pub Box<dyn AtomDynfo>);
impl NotTypAtom {
pub fn mk_err(&self) -> OrcErr {
mk_err(
intern!(str: "Not the expected type"),
format!("This expression is not a {}", self.2.name()),
[self.0.clone().into()],
)
}
pub fn mk_err(&self) -> OrcErr {
mk_err(
intern!(str: "Not the expected type"),
format!("This expression is not a {}", self.2.name()),
[self.0.clone().into()],
)
}
}
pub trait AtomMethod: Request {
const NAME: &str;
const NAME: &str;
}
pub trait Supports<M: AtomMethod>: AtomCard {
fn handle(&self, ctx: SysCtx, req: M) -> <M as Request>::Response;
fn handle(&self, ctx: SysCtx, req: M) -> <M as Request>::Response;
}
trait_set! {
trait AtomReqCb<A> = Fn(&A, SysCtx, &mut dyn Read, &mut dyn Write) + Send + Sync
trait AtomReqCb<A> = Fn(&A, SysCtx, &mut dyn Read, &mut dyn Write) + Send + Sync
}
pub struct AtomReqHandler<A: AtomCard> {
key: Sym,
cb: Box<dyn AtomReqCb<A>>,
key: Sym,
cb: Box<dyn AtomReqCb<A>>,
}
pub struct MethodSet<A: AtomCard> {
handlers: Vec<AtomReqHandler<A>>,
handlers: Vec<AtomReqHandler<A>>,
}
impl<A: AtomCard> MethodSet<A> {
pub fn new() -> Self { Self{ handlers: vec![] } }
pub fn new() -> Self { Self { handlers: vec![] } }
pub fn handle<M: AtomMethod>(mut self) -> Self where A: Supports<M> {
self.handlers.push(AtomReqHandler {
key: Sym::parse(M::NAME).expect("AtomMethod::NAME cannoot be empty"),
cb: Box::new(move |
a: &A,
ctx: SysCtx,
req: &mut dyn Read,
rep: &mut dyn Write
| {
Supports::<M>::handle(a, ctx, M::decode(req)).encode(rep);
})
});
self
}
pub fn handle<M: AtomMethod>(mut self) -> Self
where A: Supports<M> {
self.handlers.push(AtomReqHandler {
key: Sym::parse(M::NAME).expect("AtomMethod::NAME cannoot be empty"),
cb: Box::new(move |a: &A, ctx: SysCtx, req: &mut dyn Read, rep: &mut dyn Write| {
Supports::<M>::handle(a, ctx, M::decode(req)).encode(rep);
}),
});
self
}
pub(crate) fn dispatch(
&self, atom: &A, ctx: SysCtx, key: Sym, req: &mut dyn Read, rep: &mut dyn Write
) -> bool {
match self.handlers.iter().find(|h| h.key == key) {
None => false,
Some(handler) => {
(handler.cb)(atom, ctx, req, rep);
true
},
}
}
pub(crate) fn dispatch(
&self,
atom: &A,
ctx: SysCtx,
key: Sym,
req: &mut dyn Read,
rep: &mut dyn Write,
) -> bool {
match self.handlers.iter().find(|h| h.key == key) {
None => false,
Some(handler) => {
(handler.cb)(atom, ctx, req, rep);
true
},
}
}
}
impl<A: AtomCard> Default for MethodSet<A> {
fn default() -> Self {
Self::new()
}
fn default() -> Self { Self::new() }
}
#[derive(Clone)]
pub struct TypAtom<'a, A: AtomicFeatures> {
pub data: ForeignAtom<'a>,
pub value: A::Data,
pub data: ForeignAtom<'a>,
pub value: A::Data,
}
impl<A: AtomicFeatures> TypAtom<'static, A> {
pub fn downcast(expr: Arc<ExprHandle>) -> Result<Self, NotTypAtom> {
match Expr::new(expr).foreign_atom() {
Err(oe) => Err(NotTypAtom(oe.get_data().pos.clone(), oe, Box::new(A::info()))),
Ok(atm) => match downcast_atom::<A>(atm) {
Err(fa) => Err(NotTypAtom(fa.pos.clone(), fa.oex(), Box::new(A::info()))),
Ok(tatom) => Ok(tatom),
},
}
}
pub fn downcast(expr: Arc<ExprHandle>) -> Result<Self, NotTypAtom> {
match Expr::new(expr).foreign_atom() {
Err(oe) => Err(NotTypAtom(oe.get_data().pos.clone(), oe, Box::new(A::info()))),
Ok(atm) => match downcast_atom::<A>(atm) {
Err(fa) => Err(NotTypAtom(fa.pos.clone(), fa.oex(), Box::new(A::info()))),
Ok(tatom) => Ok(tatom),
},
}
}
}
impl<A: AtomicFeatures> TypAtom<'_, A> {
pub fn request<M: AtomMethod>(&self, req: M) -> M::Response where A: Supports<M> {
M::Response::decode(
&mut &self.data.ctx.reqnot.request(api::Fwd(
self.data.atom.clone(),
Sym::parse(M::NAME).unwrap().tok().to_api(),
enc_vec(&req)
)).unwrap()[..]
)
}
pub fn request<M: AtomMethod>(&self, req: M) -> M::Response
where A: Supports<M> {
M::Response::decode(
&mut &self
.data
.ctx
.reqnot
.request(api::Fwd(
self.data.atom.clone(),
Sym::parse(M::NAME).unwrap().tok().to_api(),
enc_vec(&req),
))
.unwrap()[..],
)
}
}
impl<A: AtomicFeatures> Deref for TypAtom<'_, A> {
type Target = A::Data;
fn deref(&self) -> &Self::Target { &self.value }
type Target = A::Data;
fn deref(&self) -> &Self::Target { &self.value }
}
pub struct AtomCtx<'a>(pub &'a [u8], pub Option<api::AtomId>, pub SysCtx);
pub trait AtomDynfo: Send + Sync + 'static {
fn tid(&self) -> TypeId;
fn name(&self) -> &'static str;
fn decode(&self, ctx: AtomCtx<'_>) -> Box<dyn Any>;
fn call(&self, ctx: AtomCtx<'_>, arg: api::ExprTicket) -> Expr;
fn call_ref(&self, ctx: AtomCtx<'_>, arg: api::ExprTicket) -> Expr;
fn print(&self, ctx: AtomCtx<'_>) -> String;
fn handle_req(&self, ctx: AtomCtx<'_>, key: Sym, req: &mut dyn Read, rep: &mut dyn Write) -> bool;
fn command(&self, ctx: AtomCtx<'_>) -> OrcRes<Option<Expr>>;
fn serialize(&self, ctx: AtomCtx<'_>, write: &mut dyn Write) -> Option<Vec<api::ExprTicket>>;
fn deserialize(&self, ctx: SysCtx, data: &[u8], refs: &[api::ExprTicket]) -> api::Atom;
fn drop(&self, ctx: AtomCtx<'_>);
fn tid(&self) -> TypeId;
fn name(&self) -> &'static str;
fn decode(&self, ctx: AtomCtx<'_>) -> Box<dyn Any>;
fn call(&self, ctx: AtomCtx<'_>, arg: api::ExprTicket) -> Expr;
fn call_ref(&self, ctx: AtomCtx<'_>, arg: api::ExprTicket) -> Expr;
fn print(&self, ctx: AtomCtx<'_>) -> String;
fn handle_req(&self, ctx: AtomCtx<'_>, key: Sym, req: &mut dyn Read, rep: &mut dyn Write)
-> bool;
fn command(&self, ctx: AtomCtx<'_>) -> OrcRes<Option<Expr>>;
fn serialize(&self, ctx: AtomCtx<'_>, write: &mut dyn Write) -> Option<Vec<api::ExprTicket>>;
fn deserialize(&self, ctx: SysCtx, data: &[u8], refs: &[api::ExprTicket]) -> api::Atom;
fn drop(&self, ctx: AtomCtx<'_>);
}
trait_set! {
pub trait AtomFactoryFn = FnOnce(SysCtx) -> api::Atom + DynClone + Send + Sync;
pub trait AtomFactoryFn = FnOnce(SysCtx) -> api::Atom + DynClone + Send + Sync;
}
pub struct AtomFactory(Box<dyn AtomFactoryFn>);
impl AtomFactory {
pub fn new(f: impl FnOnce(SysCtx) -> api::Atom + Clone + Send + Sync + 'static) -> Self {
Self(Box::new(f))
}
pub fn build(self, ctx: SysCtx) -> api::Atom { (self.0)(ctx) }
pub fn new(f: impl FnOnce(SysCtx) -> api::Atom + Clone + Send + Sync + 'static) -> Self {
Self(Box::new(f))
}
pub fn build(self, ctx: SysCtx) -> api::Atom { (self.0)(ctx) }
}
impl Clone for AtomFactory {
fn clone(&self) -> Self { AtomFactory(clone_box(&*self.0)) }
fn clone(&self) -> Self { AtomFactory(clone_box(&*self.0)) }
}
impl fmt::Debug for AtomFactory {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "AtomFactory") }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "AtomFactory") }
}
impl fmt::Display for AtomFactory {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "AtomFactory") }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "AtomFactory") }
}
pub fn err_not_callable() -> OrcErr {
mk_err(intern!(str: "This atom is not callable"), "Attempted to apply value as function", [])
mk_err(intern!(str: "This atom is not callable"), "Attempted to apply value as function", [])
}
pub fn err_not_command() -> OrcErr {
mk_err(intern!(str: "This atom is not a command"), "Settled on an inactionable value", [])
mk_err(intern!(str: "This atom is not a command"), "Settled on an inactionable value", [])
}

View File

@@ -12,8 +12,8 @@ use orchid_base::name::Sym;
use crate::api;
use crate::atom::{
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
err_not_callable, err_not_command, get_info,
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
err_not_callable, err_not_command, get_info,
};
use crate::expr::{Expr, ExprHandle, bot};
use crate::system::SysCtx;
@@ -21,197 +21,197 @@ use crate::system::SysCtx;
pub struct OwnedVariant;
impl AtomicVariant for OwnedVariant {}
impl<A: OwnedAtom + Atomic<Variant = OwnedVariant>> AtomicFeaturesImpl<OwnedVariant> for A {
fn _factory(self) -> AtomFactory {
AtomFactory::new(move |ctx| {
let rec = OBJ_STORE.add(Box::new(self));
let (id, _) = get_info::<A>(ctx.cted.inst().card());
let mut data = enc_vec(&id);
rec.encode(&mut data);
api::Atom { drop: Some(api::AtomId(rec.id())), data, owner: ctx.id }
})
}
fn _info() -> Self::_Info { OwnedAtomDynfo(A::reg_reqs()) }
type _Info = OwnedAtomDynfo<A>;
fn _factory(self) -> AtomFactory {
AtomFactory::new(move |ctx| {
let rec = OBJ_STORE.add(Box::new(self));
let (id, _) = get_info::<A>(ctx.cted.inst().card());
let mut data = enc_vec(&id);
rec.encode(&mut data);
api::Atom { drop: Some(api::AtomId(rec.id())), data, owner: ctx.id }
})
}
fn _info() -> Self::_Info { OwnedAtomDynfo(A::reg_reqs()) }
type _Info = OwnedAtomDynfo<A>;
}
fn with_atom<U>(id: api::AtomId, f: impl FnOnce(IdRecord<'_, Box<dyn DynOwnedAtom>>) -> U) -> U {
f(OBJ_STORE.get(id.0).unwrap_or_else(|| panic!("Received invalid atom ID: {}", id.0)))
f(OBJ_STORE.get(id.0).unwrap_or_else(|| panic!("Received invalid atom ID: {}", id.0)))
}
pub struct OwnedAtomDynfo<T: OwnedAtom>(MethodSet<T>);
impl<T: OwnedAtom> AtomDynfo for OwnedAtomDynfo<T> {
fn print(&self, AtomCtx(_, id, ctx): AtomCtx<'_>) -> String {
with_atom(id.unwrap(), |a| a.dyn_print(ctx))
}
fn tid(&self) -> TypeId { TypeId::of::<T>() }
fn name(&self) -> &'static str { type_name::<T>() }
fn decode(&self, AtomCtx(data, ..): AtomCtx) -> Box<dyn Any> {
Box::new(<T as AtomCard>::Data::decode(&mut &data[..]))
}
fn call(&self, AtomCtx(_, id, ctx): AtomCtx, arg: api::ExprTicket) -> Expr {
with_atom(id.unwrap(), |a| a.remove().dyn_call(ctx, arg))
}
fn call_ref(&self, AtomCtx(_, id, ctx): AtomCtx, arg: api::ExprTicket) -> Expr {
with_atom(id.unwrap(), |a| a.dyn_call_ref(ctx, arg))
}
fn handle_req(
&self,
AtomCtx(_, id, ctx): AtomCtx,
key: Sym,
req: &mut dyn Read,
rep: &mut dyn Write,
) -> bool {
with_atom(id.unwrap(), |a| {
self.0.dispatch(a.as_any_ref().downcast_ref().unwrap(), ctx, key, req, rep)
})
}
fn command(&self, AtomCtx(_, id, ctx): AtomCtx<'_>) -> OrcRes<Option<Expr>> {
with_atom(id.unwrap(), |a| a.remove().dyn_command(ctx))
}
fn drop(&self, AtomCtx(_, id, ctx): AtomCtx) {
with_atom(id.unwrap(), |a| a.remove().dyn_free(ctx))
}
fn serialize(
&self,
AtomCtx(_, id, ctx): AtomCtx<'_>,
write: &mut dyn Write,
) -> Option<Vec<api::ExprTicket>> {
let id = id.unwrap();
id.encode(write);
with_atom(id, |a| a.dyn_serialize(ctx, write))
.map(|v| v.into_iter().map(|t| t.handle.unwrap().tk).collect_vec())
}
fn deserialize(&self, ctx: SysCtx, data: &[u8], refs: &[api::ExprTicket]) -> orchid_api::Atom {
let refs = refs.iter().map(|tk| Expr::new(Arc::new(ExprHandle::from_args(ctx.clone(), *tk))));
let obj = T::deserialize(DeserCtxImpl(data, &ctx), T::Refs::from_iter(refs));
obj._factory().build(ctx)
}
fn print(&self, AtomCtx(_, id, ctx): AtomCtx<'_>) -> String {
with_atom(id.unwrap(), |a| a.dyn_print(ctx))
}
fn tid(&self) -> TypeId { TypeId::of::<T>() }
fn name(&self) -> &'static str { type_name::<T>() }
fn decode(&self, AtomCtx(data, ..): AtomCtx) -> Box<dyn Any> {
Box::new(<T as AtomCard>::Data::decode(&mut &data[..]))
}
fn call(&self, AtomCtx(_, id, ctx): AtomCtx, arg: api::ExprTicket) -> Expr {
with_atom(id.unwrap(), |a| a.remove().dyn_call(ctx, arg))
}
fn call_ref(&self, AtomCtx(_, id, ctx): AtomCtx, arg: api::ExprTicket) -> Expr {
with_atom(id.unwrap(), |a| a.dyn_call_ref(ctx, arg))
}
fn handle_req(
&self,
AtomCtx(_, id, ctx): AtomCtx,
key: Sym,
req: &mut dyn Read,
rep: &mut dyn Write,
) -> bool {
with_atom(id.unwrap(), |a| {
self.0.dispatch(a.as_any_ref().downcast_ref().unwrap(), ctx, key, req, rep)
})
}
fn command(&self, AtomCtx(_, id, ctx): AtomCtx<'_>) -> OrcRes<Option<Expr>> {
with_atom(id.unwrap(), |a| a.remove().dyn_command(ctx))
}
fn drop(&self, AtomCtx(_, id, ctx): AtomCtx) {
with_atom(id.unwrap(), |a| a.remove().dyn_free(ctx))
}
fn serialize(
&self,
AtomCtx(_, id, ctx): AtomCtx<'_>,
write: &mut dyn Write,
) -> Option<Vec<api::ExprTicket>> {
let id = id.unwrap();
id.encode(write);
with_atom(id, |a| a.dyn_serialize(ctx, write))
.map(|v| v.into_iter().map(|t| t.handle.unwrap().tk).collect_vec())
}
fn deserialize(&self, ctx: SysCtx, data: &[u8], refs: &[api::ExprTicket]) -> orchid_api::Atom {
let refs = refs.iter().map(|tk| Expr::new(Arc::new(ExprHandle::from_args(ctx.clone(), *tk))));
let obj = T::deserialize(DeserCtxImpl(data, &ctx), T::Refs::from_iter(refs));
obj._factory().build(ctx)
}
}
pub trait DeserializeCtx: Sized {
fn read<T: Decode>(&mut self) -> T;
fn is_empty(&self) -> bool;
fn assert_empty(self) { assert!(self.is_empty(), "Bytes found after decoding") }
fn decode<T: Decode>(mut self) -> T {
let t = self.read();
self.assert_empty();
t
}
fn sys(&self) -> SysCtx;
fn read<T: Decode>(&mut self) -> T;
fn is_empty(&self) -> bool;
fn assert_empty(self) { assert!(self.is_empty(), "Bytes found after decoding") }
fn decode<T: Decode>(mut self) -> T {
let t = self.read();
self.assert_empty();
t
}
fn sys(&self) -> SysCtx;
}
struct DeserCtxImpl<'a>(&'a [u8], &'a SysCtx);
impl DeserializeCtx for DeserCtxImpl<'_> {
fn read<T: Decode>(&mut self) -> T { T::decode(&mut self.0) }
fn is_empty(&self) -> bool { self.0.is_empty() }
fn sys(&self) -> SysCtx { self.1.clone() }
fn read<T: Decode>(&mut self) -> T { T::decode(&mut self.0) }
fn is_empty(&self) -> bool { self.0.is_empty() }
fn sys(&self) -> SysCtx { self.1.clone() }
}
pub trait RefSet {
fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self;
fn to_vec(self) -> Vec<Expr>;
fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self;
fn to_vec(self) -> Vec<Expr>;
}
static E_NON_SER: &str = "Never is a stand-in refset for non-serializable atoms";
impl RefSet for Never {
fn from_iter<I>(_: I) -> Self { panic!("{E_NON_SER}") }
fn to_vec(self) -> Vec<Expr> { panic!("{E_NON_SER}") }
fn from_iter<I>(_: I) -> Self { panic!("{E_NON_SER}") }
fn to_vec(self) -> Vec<Expr> { panic!("{E_NON_SER}") }
}
impl RefSet for () {
fn to_vec(self) -> Vec<Expr> { Vec::new() }
fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self {
assert_eq!(refs.len(), 0, "Expected no refs")
}
fn to_vec(self) -> Vec<Expr> { Vec::new() }
fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self {
assert_eq!(refs.len(), 0, "Expected no refs")
}
}
impl RefSet for Vec<Expr> {
fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self { refs.collect_vec() }
fn to_vec(self) -> Vec<Expr> { self }
fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self { refs.collect_vec() }
fn to_vec(self) -> Vec<Expr> { self }
}
impl<const N: usize> RefSet for [Expr; N] {
fn to_vec(self) -> Vec<Expr> { self.into_iter().collect_vec() }
fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self {
assert_eq!(refs.len(), N, "Wrong number of refs provided");
refs.collect_vec().try_into().unwrap_or_else(|_: Vec<_>| unreachable!())
}
fn to_vec(self) -> Vec<Expr> { self.into_iter().collect_vec() }
fn from_iter<I: Iterator<Item = Expr> + ExactSizeIterator>(refs: I) -> Self {
assert_eq!(refs.len(), N, "Wrong number of refs provided");
refs.collect_vec().try_into().unwrap_or_else(|_: Vec<_>| unreachable!())
}
}
/// Atoms that have a [Drop]
pub trait OwnedAtom: Atomic<Variant = OwnedVariant> + Send + Sync + Any + Clone + 'static {
/// If serializable, the collection that best stores subexpression references
/// for this atom.
///
/// - `()` for no subexppressions,
/// - `[Expr; N]` for a static number of subexpressions
/// - `Vec<Expr>` for a variable number of subexpressions
/// - `Never` if not serializable
///
/// If this isn't `Never`, you must override the default, panicking
/// `serialize` and `deserialize` implementation
type Refs: RefSet;
fn val(&self) -> Cow<'_, Self::Data>;
#[allow(unused_variables)]
fn call_ref(&self, arg: ExprHandle) -> Expr { bot([err_not_callable()]) }
fn call(self, arg: ExprHandle) -> Expr {
let ctx = arg.get_ctx();
let gcl = self.call_ref(arg);
self.free(ctx);
gcl
}
#[allow(unused_variables)]
fn command(self, ctx: SysCtx) -> OrcRes<Option<Expr>> { Err(err_not_command().into()) }
#[allow(unused_variables)]
fn free(self, ctx: SysCtx) {}
#[allow(unused_variables)]
fn print(&self, ctx: SysCtx) -> String { format!("OwnedAtom({})", type_name::<Self>()) }
#[allow(unused_variables)]
fn serialize(&self, ctx: SysCtx, write: &mut (impl Write + ?Sized)) -> Self::Refs {
assert!(
TypeId::of::<Self::Refs>() != TypeId::of::<Never>(),
"The extension scaffold is broken, this function should never be called on Never Refs"
);
panic!("Either implement serialize or set Refs to Never for {}", type_name::<Self>())
}
#[allow(unused_variables)]
fn deserialize(ctx: impl DeserializeCtx, refs: Self::Refs) -> Self {
assert!(
TypeId::of::<Self::Refs>() != TypeId::of::<Never>(),
"The extension scaffold is broken, this function should never be called on Never Refs"
);
panic!("Either implement deserialize or set Refs to Never for {}", type_name::<Self>())
}
/// If serializable, the collection that best stores subexpression references
/// for this atom.
///
/// - `()` for no subexppressions,
/// - `[Expr; N]` for a static number of subexpressions
/// - `Vec<Expr>` for a variable number of subexpressions
/// - `Never` if not serializable
///
/// If this isn't `Never`, you must override the default, panicking
/// `serialize` and `deserialize` implementation
type Refs: RefSet;
fn val(&self) -> Cow<'_, Self::Data>;
#[allow(unused_variables)]
fn call_ref(&self, arg: ExprHandle) -> Expr { bot([err_not_callable()]) }
fn call(self, arg: ExprHandle) -> Expr {
let ctx = arg.get_ctx();
let gcl = self.call_ref(arg);
self.free(ctx);
gcl
}
#[allow(unused_variables)]
fn command(self, ctx: SysCtx) -> OrcRes<Option<Expr>> { Err(err_not_command().into()) }
#[allow(unused_variables)]
fn free(self, ctx: SysCtx) {}
#[allow(unused_variables)]
fn print(&self, ctx: SysCtx) -> String { format!("OwnedAtom({})", type_name::<Self>()) }
#[allow(unused_variables)]
fn serialize(&self, ctx: SysCtx, write: &mut (impl Write + ?Sized)) -> Self::Refs {
assert!(
TypeId::of::<Self::Refs>() != TypeId::of::<Never>(),
"The extension scaffold is broken, this function should never be called on Never Refs"
);
panic!("Either implement serialize or set Refs to Never for {}", type_name::<Self>())
}
#[allow(unused_variables)]
fn deserialize(ctx: impl DeserializeCtx, refs: Self::Refs) -> Self {
assert!(
TypeId::of::<Self::Refs>() != TypeId::of::<Never>(),
"The extension scaffold is broken, this function should never be called on Never Refs"
);
panic!("Either implement deserialize or set Refs to Never for {}", type_name::<Self>())
}
}
pub trait DynOwnedAtom: Send + Sync + 'static {
fn atom_tid(&self) -> TypeId;
fn as_any_ref(&self) -> &dyn Any;
fn encode(&self, buffer: &mut dyn Write);
fn dyn_call_ref(&self, ctx: SysCtx, arg: api::ExprTicket) -> Expr;
fn dyn_call(self: Box<Self>, ctx: SysCtx, arg: api::ExprTicket) -> Expr;
fn dyn_command(self: Box<Self>, ctx: SysCtx) -> OrcRes<Option<Expr>>;
fn dyn_free(self: Box<Self>, ctx: SysCtx);
fn dyn_print(&self, ctx: SysCtx) -> String;
fn dyn_serialize(&self, ctx: SysCtx, sink: &mut dyn Write) -> Option<Vec<Expr>>;
fn atom_tid(&self) -> TypeId;
fn as_any_ref(&self) -> &dyn Any;
fn encode(&self, buffer: &mut dyn Write);
fn dyn_call_ref(&self, ctx: SysCtx, arg: api::ExprTicket) -> Expr;
fn dyn_call(self: Box<Self>, ctx: SysCtx, arg: api::ExprTicket) -> Expr;
fn dyn_command(self: Box<Self>, ctx: SysCtx) -> OrcRes<Option<Expr>>;
fn dyn_free(self: Box<Self>, ctx: SysCtx);
fn dyn_print(&self, ctx: SysCtx) -> String;
fn dyn_serialize(&self, ctx: SysCtx, sink: &mut dyn Write) -> Option<Vec<Expr>>;
}
impl<T: OwnedAtom> DynOwnedAtom for T {
fn atom_tid(&self) -> TypeId { TypeId::of::<T>() }
fn as_any_ref(&self) -> &dyn Any { self }
fn encode(&self, buffer: &mut dyn Write) { self.val().as_ref().encode(buffer) }
fn dyn_call_ref(&self, ctx: SysCtx, arg: api::ExprTicket) -> Expr {
self.call_ref(ExprHandle::from_args(ctx, arg))
}
fn dyn_call(self: Box<Self>, ctx: SysCtx, arg: api::ExprTicket) -> Expr {
self.call(ExprHandle::from_args(ctx, arg))
}
fn dyn_command(self: Box<Self>, ctx: SysCtx) -> OrcRes<Option<Expr>> { self.command(ctx) }
fn dyn_free(self: Box<Self>, ctx: SysCtx) { self.free(ctx) }
fn dyn_print(&self, ctx: SysCtx) -> String { self.print(ctx) }
fn dyn_serialize(&self, ctx: SysCtx, sink: &mut dyn Write) -> Option<Vec<Expr>> {
(TypeId::of::<Never>() != TypeId::of::<<Self as OwnedAtom>::Refs>())
.then(|| self.serialize(ctx, sink).to_vec())
}
fn atom_tid(&self) -> TypeId { TypeId::of::<T>() }
fn as_any_ref(&self) -> &dyn Any { self }
fn encode(&self, buffer: &mut dyn Write) { self.val().as_ref().encode(buffer) }
fn dyn_call_ref(&self, ctx: SysCtx, arg: api::ExprTicket) -> Expr {
self.call_ref(ExprHandle::from_args(ctx, arg))
}
fn dyn_call(self: Box<Self>, ctx: SysCtx, arg: api::ExprTicket) -> Expr {
self.call(ExprHandle::from_args(ctx, arg))
}
fn dyn_command(self: Box<Self>, ctx: SysCtx) -> OrcRes<Option<Expr>> { self.command(ctx) }
fn dyn_free(self: Box<Self>, ctx: SysCtx) { self.free(ctx) }
fn dyn_print(&self, ctx: SysCtx) -> String { self.print(ctx) }
fn dyn_serialize(&self, ctx: SysCtx, sink: &mut dyn Write) -> Option<Vec<Expr>> {
(TypeId::of::<Never>() != TypeId::of::<<Self as OwnedAtom>::Refs>())
.then(|| self.serialize(ctx, sink).to_vec())
}
}
pub(crate) static OBJ_STORE: IdStore<Box<dyn DynOwnedAtom>> = IdStore::new();

View File

@@ -7,8 +7,8 @@ use orchid_base::name::Sym;
use crate::api;
use crate::atom::{
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
err_not_callable, err_not_command, get_info,
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
err_not_callable, err_not_command, get_info,
};
use crate::expr::{Expr, ExprHandle, bot};
use crate::system::SysCtx;
@@ -16,65 +16,65 @@ use crate::system::SysCtx;
pub struct ThinVariant;
impl AtomicVariant for ThinVariant {}
impl<A: ThinAtom + Atomic<Variant = ThinVariant>> AtomicFeaturesImpl<ThinVariant> for A {
fn _factory(self) -> AtomFactory {
AtomFactory::new(move |ctx| {
let (id, _) = get_info::<A>(ctx.cted.inst().card());
let mut buf = enc_vec(&id);
self.encode(&mut buf);
api::Atom { drop: None, data: buf, owner: ctx.id }
})
}
fn _info() -> Self::_Info { ThinAtomDynfo(Self::reg_reqs()) }
type _Info = ThinAtomDynfo<Self>;
fn _factory(self) -> AtomFactory {
AtomFactory::new(move |ctx| {
let (id, _) = get_info::<A>(ctx.cted.inst().card());
let mut buf = enc_vec(&id);
self.encode(&mut buf);
api::Atom { drop: None, data: buf, owner: ctx.id }
})
}
fn _info() -> Self::_Info { ThinAtomDynfo(Self::reg_reqs()) }
type _Info = ThinAtomDynfo<Self>;
}
pub struct ThinAtomDynfo<T: ThinAtom>(MethodSet<T>);
impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
fn print(&self, AtomCtx(buf, _, ctx): AtomCtx<'_>) -> String {
T::decode(&mut &buf[..]).print(ctx)
}
fn tid(&self) -> TypeId { TypeId::of::<T>() }
fn name(&self) -> &'static str { type_name::<T>() }
fn decode(&self, AtomCtx(buf, ..): AtomCtx) -> Box<dyn Any> { Box::new(T::decode(&mut &buf[..])) }
fn call(&self, AtomCtx(buf, _, ctx): AtomCtx, arg: api::ExprTicket) -> Expr {
T::decode(&mut &buf[..]).call(ExprHandle::from_args(ctx, arg))
}
fn call_ref(&self, AtomCtx(buf, _, ctx): AtomCtx, arg: api::ExprTicket) -> Expr {
T::decode(&mut &buf[..]).call(ExprHandle::from_args(ctx, arg))
}
fn handle_req(
&self,
AtomCtx(buf, _, sys): AtomCtx,
key: Sym,
req: &mut dyn std::io::Read,
rep: &mut dyn Write,
) -> bool {
self.0.dispatch(&T::decode(&mut &buf[..]), sys, key, req, rep)
}
fn command(&self, AtomCtx(buf, _, ctx): AtomCtx<'_>) -> OrcRes<Option<Expr>> {
T::decode(&mut &buf[..]).command(ctx)
}
fn serialize(&self, actx: AtomCtx<'_>, write: &mut dyn Write) -> Option<Vec<api::ExprTicket>> {
T::decode(&mut &actx.0[..]).encode(write);
Some(Vec::new())
}
fn deserialize(&self, ctx: SysCtx, data: &[u8], refs: &[api::ExprTicket]) -> api::Atom {
assert!(refs.is_empty(), "Refs found when deserializing thin atom");
T::decode(&mut &data[..])._factory().build(ctx)
}
fn drop(&self, AtomCtx(buf, _, ctx): AtomCtx) {
let string_self = T::decode(&mut &buf[..]).print(ctx.clone());
writeln!(ctx.logger, "Received drop signal for non-drop atom {string_self:?}");
}
fn print(&self, AtomCtx(buf, _, ctx): AtomCtx<'_>) -> String {
T::decode(&mut &buf[..]).print(ctx)
}
fn tid(&self) -> TypeId { TypeId::of::<T>() }
fn name(&self) -> &'static str { type_name::<T>() }
fn decode(&self, AtomCtx(buf, ..): AtomCtx) -> Box<dyn Any> { Box::new(T::decode(&mut &buf[..])) }
fn call(&self, AtomCtx(buf, _, ctx): AtomCtx, arg: api::ExprTicket) -> Expr {
T::decode(&mut &buf[..]).call(ExprHandle::from_args(ctx, arg))
}
fn call_ref(&self, AtomCtx(buf, _, ctx): AtomCtx, arg: api::ExprTicket) -> Expr {
T::decode(&mut &buf[..]).call(ExprHandle::from_args(ctx, arg))
}
fn handle_req(
&self,
AtomCtx(buf, _, sys): AtomCtx,
key: Sym,
req: &mut dyn std::io::Read,
rep: &mut dyn Write,
) -> bool {
self.0.dispatch(&T::decode(&mut &buf[..]), sys, key, req, rep)
}
fn command(&self, AtomCtx(buf, _, ctx): AtomCtx<'_>) -> OrcRes<Option<Expr>> {
T::decode(&mut &buf[..]).command(ctx)
}
fn serialize(&self, actx: AtomCtx<'_>, write: &mut dyn Write) -> Option<Vec<api::ExprTicket>> {
T::decode(&mut &actx.0[..]).encode(write);
Some(Vec::new())
}
fn deserialize(&self, ctx: SysCtx, data: &[u8], refs: &[api::ExprTicket]) -> api::Atom {
assert!(refs.is_empty(), "Refs found when deserializing thin atom");
T::decode(&mut &data[..])._factory().build(ctx)
}
fn drop(&self, AtomCtx(buf, _, ctx): AtomCtx) {
let string_self = T::decode(&mut &buf[..]).print(ctx.clone());
writeln!(ctx.logger, "Received drop signal for non-drop atom {string_self:?}");
}
}
pub trait ThinAtom:
AtomCard<Data = Self> + Atomic<Variant = ThinVariant> + Coding + Send + Sync + 'static
AtomCard<Data = Self> + Atomic<Variant = ThinVariant> + Coding + Send + Sync + 'static
{
#[allow(unused_variables)]
fn call(&self, arg: ExprHandle) -> Expr { bot([err_not_callable()]) }
#[allow(unused_variables)]
fn command(&self, ctx: SysCtx) -> OrcRes<Option<Expr>> { Err(err_not_command().into()) }
#[allow(unused_variables)]
fn print(&self, ctx: SysCtx) -> String { format!("ThinAtom({})", type_name::<Self>()) }
#[allow(unused_variables)]
fn call(&self, arg: ExprHandle) -> Expr { bot([err_not_callable()]) }
#[allow(unused_variables)]
fn command(&self, ctx: SysCtx) -> OrcRes<Option<Expr>> { Err(err_not_command().into()) }
#[allow(unused_variables)]
fn print(&self, ctx: SysCtx) -> String { format!("ThinAtom({})", type_name::<Self>()) }
}

View File

@@ -1,58 +1,58 @@
use orchid_base::error::{mk_err, OrcErr, OrcRes};
use orchid_base::error::{OrcErr, OrcRes, mk_err};
use orchid_base::intern;
use orchid_base::location::Pos;
use crate::atom::{AtomicFeatures, ToAtom, TypAtom};
use crate::expr::{atom, bot, Expr};
use crate::expr::{Expr, atom, bot};
use crate::system::downcast_atom;
pub trait TryFromExpr: Sized {
fn try_from_expr(expr: Expr) -> OrcRes<Self>;
fn try_from_expr(expr: Expr) -> OrcRes<Self>;
}
impl TryFromExpr for Expr {
fn try_from_expr(expr: Expr) -> OrcRes<Self> { Ok(expr) }
fn try_from_expr(expr: Expr) -> OrcRes<Self> { Ok(expr) }
}
impl<T: TryFromExpr, U: TryFromExpr> TryFromExpr for (T, U) {
fn try_from_expr(expr: Expr) -> OrcRes<Self> {
Ok((T::try_from_expr(expr.clone())?, U::try_from_expr(expr)?))
}
fn try_from_expr(expr: Expr) -> OrcRes<Self> {
Ok((T::try_from_expr(expr.clone())?, U::try_from_expr(expr)?))
}
}
fn err_not_atom(pos: Pos) -> OrcErr {
mk_err(intern!(str: "Expected an atom"), "This expression is not an atom", [pos.into()])
mk_err(intern!(str: "Expected an atom"), "This expression is not an atom", [pos.into()])
}
fn err_type(pos: Pos) -> OrcErr {
mk_err(intern!(str: "Type error"), "The atom is a different type than expected", [pos.into()])
mk_err(intern!(str: "Type error"), "The atom is a different type than expected", [pos.into()])
}
impl<A: AtomicFeatures> TryFromExpr for TypAtom<'_, A> {
fn try_from_expr(expr: Expr) -> OrcRes<Self> {
(expr.foreign_atom())
.map_err(|ex| err_not_atom(ex.pos.clone()).into())
.and_then(|f| downcast_atom(f).map_err(|f| err_type(f.pos).into()))
}
fn try_from_expr(expr: Expr) -> OrcRes<Self> {
(expr.foreign_atom())
.map_err(|ex| err_not_atom(ex.pos.clone()).into())
.and_then(|f| downcast_atom(f).map_err(|f| err_type(f.pos).into()))
}
}
pub trait ToExpr {
fn to_expr(self) -> Expr;
fn to_expr(self) -> Expr;
}
impl ToExpr for Expr {
fn to_expr(self) -> Expr { self }
fn to_expr(self) -> Expr { self }
}
impl<T: ToExpr> ToExpr for OrcRes<T> {
fn to_expr(self) -> Expr {
match self {
Err(e) => bot(e),
Ok(t) => t.to_expr(),
}
}
fn to_expr(self) -> Expr {
match self {
Err(e) => bot(e),
Ok(t) => t.to_expr(),
}
}
}
impl<A: ToAtom> ToExpr for A {
fn to_expr(self) -> Expr { atom(self) }
fn to_expr(self) -> Expr { atom(self) }
}

View File

@@ -6,10 +6,10 @@ use std::{mem, process, thread};
use hashbrown::HashMap;
use itertools::Itertools;
use orchid_api_traits::{enc_vec, Decode, Encode};
use orchid_api_traits::{Decode, Encode, enc_vec};
use orchid_base::char_filter::{char_filter_match, char_filter_union, mk_char_filter};
use orchid_base::clone;
use orchid_base::interner::{init_replica, sweep_replica, Tok};
use orchid_base::interner::{Tok, init_replica, sweep_replica};
use orchid_base::logging::Logger;
use orchid_base::macros::{mtreev_from_api, mtreev_to_api};
use orchid_base::name::{PathSlice, Sym};
@@ -22,283 +22,283 @@ use crate::api;
use crate::atom::{AtomCtx, AtomDynfo};
use crate::atom_owned::OBJ_STORE;
use crate::fs::VirtFS;
use crate::lexer::{err_cascade, err_not_applicable, LexContext};
use crate::macros::{apply_rule, RuleCtx};
use crate::lexer::{LexContext, err_cascade, err_not_applicable};
use crate::macros::{RuleCtx, apply_rule};
use crate::msg::{recv_parent_msg, send_parent_msg};
use crate::system::{atom_by_idx, SysCtx};
use crate::system::{SysCtx, atom_by_idx};
use crate::system_ctor::{CtedObj, DynSystemCtor};
use crate::tree::{do_extra, GenTok, GenTokTree, LazyMemberFactory, TIACtxImpl};
use crate::tree::{GenTok, GenTokTree, LazyMemberFactory, TIACtxImpl, do_extra};
pub type ExtReq = RequestHandle<api::ExtMsgSet>;
pub type ExtReqNot = ReqNot<api::ExtMsgSet>;
pub struct ExtensionData {
pub name: &'static str,
pub systems: &'static [&'static dyn DynSystemCtor],
pub name: &'static str,
pub systems: &'static [&'static dyn DynSystemCtor],
}
impl ExtensionData {
pub fn new(name: &'static str, systems: &'static [&'static dyn DynSystemCtor]) -> Self {
Self { name, systems }
}
pub fn main(self) { extension_main(self) }
pub fn new(name: &'static str, systems: &'static [&'static dyn DynSystemCtor]) -> Self {
Self { name, systems }
}
pub fn main(self) { extension_main(self) }
}
pub enum MemberRecord {
Gen(Sym, LazyMemberFactory),
Res,
Gen(Sym, LazyMemberFactory),
Res,
}
pub struct SystemRecord {
cted: CtedObj,
vfses: HashMap<api::VfsId, &'static dyn VirtFS>,
declfs: api::EagerVfs,
lazy_members: HashMap<api::TreeId, MemberRecord>,
cted: CtedObj,
vfses: HashMap<api::VfsId, &'static dyn VirtFS>,
declfs: api::EagerVfs,
lazy_members: HashMap<api::TreeId, MemberRecord>,
}
pub fn with_atom_record<T>(
get_sys_ctx: &impl Fn(api::SysId, ReqNot<api::ExtMsgSet>) -> SysCtx,
reqnot: ReqNot<api::ExtMsgSet>,
atom: &api::Atom,
cb: impl FnOnce(Box<dyn AtomDynfo>, SysCtx, api::AtomId, &[u8]) -> T,
get_sys_ctx: &impl Fn(api::SysId, ReqNot<api::ExtMsgSet>) -> SysCtx,
reqnot: ReqNot<api::ExtMsgSet>,
atom: &api::Atom,
cb: impl FnOnce(Box<dyn AtomDynfo>, SysCtx, api::AtomId, &[u8]) -> T,
) -> T {
let mut data = &atom.data[..];
let ctx = get_sys_ctx(atom.owner, reqnot);
let inst = ctx.cted.inst();
let id = api::AtomId::decode(&mut data);
let atom_record = atom_by_idx(inst.card(), id).expect("Atom ID reserved");
cb(atom_record, ctx, id, data)
let mut data = &atom.data[..];
let ctx = get_sys_ctx(atom.owner, reqnot);
let inst = ctx.cted.inst();
let id = api::AtomId::decode(&mut data);
let atom_record = atom_by_idx(inst.card(), id).expect("Atom ID reserved");
cb(atom_record, ctx, id, data)
}
pub fn extension_main(data: ExtensionData) {
if thread::Builder::new()
.name(format!("ext-main:{}", data.name))
.spawn(|| extension_main_logic(data))
.unwrap()
.join()
.is_err()
{
process::exit(-1)
}
if thread::Builder::new()
.name(format!("ext-main:{}", data.name))
.spawn(|| extension_main_logic(data))
.unwrap()
.join()
.is_err()
{
process::exit(-1)
}
}
fn extension_main_logic(data: ExtensionData) {
let api::HostHeader { log_strategy } = api::HostHeader::decode(&mut std::io::stdin().lock());
let mut buf = Vec::new();
let decls = (data.systems.iter().enumerate())
.map(|(id, sys)| (u16::try_from(id).expect("more than u16max system ctors"), sys))
.map(|(id, sys)| sys.decl(api::SysDeclId(NonZero::new(id + 1).unwrap())))
.collect_vec();
let systems = Arc::new(Mutex::new(HashMap::<api::SysId, SystemRecord>::new()));
api::ExtensionHeader { name: data.name.to_string(), systems: decls.clone() }.encode(&mut buf);
std::io::stdout().write_all(&buf).unwrap();
std::io::stdout().flush().unwrap();
let exiting = Arc::new(AtomicBool::new(false));
let logger = Arc::new(Logger::new(log_strategy));
let mk_ctx = clone!(logger, systems; move |id: api::SysId, reqnot: ReqNot<api::ExtMsgSet>| {
let cted = systems.lock().unwrap()[&id].cted.clone();
SysCtx { id, cted, logger: logger.clone(), reqnot }
});
let rn = ReqNot::<api::ExtMsgSet>::new(
clone!(logger; move |a, _| {
logger.log_buf("Upsending", a);
send_parent_msg(a).unwrap()
}),
clone!(systems, exiting, mk_ctx; move |n, reqnot| match n {
api::HostExtNotif::Exit => exiting.store(true, Ordering::Relaxed),
api::HostExtNotif::SystemDrop(api::SystemDrop(sys_id)) =>
mem::drop(systems.lock().unwrap().remove(&sys_id)),
api::HostExtNotif::AtomDrop(api::AtomDrop(sys_id, atom)) =>
OBJ_STORE.get(atom.0).unwrap().remove().dyn_free(mk_ctx(sys_id, reqnot)),
}),
clone!(systems, logger; move |hand, req| match req {
api::HostExtReq::Ping(ping@api::Ping) => hand.handle(&ping, &()),
api::HostExtReq::Sweep(sweep@api::Sweep) => hand.handle(&sweep, &sweep_replica()),
api::HostExtReq::SysReq(api::SysReq::NewSystem(new_sys)) => {
let i = decls.iter().enumerate().find(|(_, s)| s.id == new_sys.system).unwrap().0;
let cted = data.systems[i].new_system(&new_sys);
let mut vfses = HashMap::new();
let lex_filter = cted.inst().dyn_lexers().iter().fold(api::CharFilter(vec![]), |cf, lx| {
let lxcf = mk_char_filter(lx.char_filter().iter().cloned());
char_filter_union(&cf, &lxcf)
});
let mut lazy_mems = HashMap::new();
let ctx = SysCtx{
cted: cted.clone(),
id: new_sys.id,
logger: logger.clone(),
reqnot: hand.reqnot()
};
let mut tia_ctx = TIACtxImpl{
lazy: &mut lazy_mems,
sys: ctx.clone(),
basepath: &[],
path: Substack::Bottom,
};
let const_root = (cted.inst().dyn_env().into_iter())
.map(|(k, v)| (k.to_api(), v.into_api(&mut tia_ctx)))
.collect();
systems.lock().unwrap().insert(new_sys.id, SystemRecord {
declfs: cted.inst().dyn_vfs().to_api_rec(&mut vfses),
vfses,
cted,
lazy_members: lazy_mems
});
hand.handle(&new_sys, &api::SystemInst {
lex_filter,
const_root,
line_types: vec![]
})
}
api::HostExtReq::GetMember(get_tree@api::GetMember(sys_id, tree_id)) => {
let mut systems_g = systems.lock().unwrap();
let sys = systems_g.get_mut(&sys_id).expect("System not found");
let lazy = &mut sys.lazy_members;
let (path, cb) = match lazy.insert(tree_id, MemberRecord::Res) {
None => panic!("Tree for ID not found"),
Some(MemberRecord::Res) => panic!("This tree has already been transmitted"),
Some(MemberRecord::Gen(path, cb)) => (path, cb),
};
let tree = cb.build(path.clone());
hand.handle(&get_tree, &tree.into_api(&mut TIACtxImpl{
sys: SysCtx::new(sys_id, &sys.cted, &logger, hand.reqnot()),
path: Substack::Bottom,
basepath: &path,
lazy,
}))
}
api::HostExtReq::VfsReq(api::VfsReq::GetVfs(get_vfs@api::GetVfs(sys_id))) => {
let systems_g = systems.lock().unwrap();
hand.handle(&get_vfs, &systems_g[&sys_id].declfs)
}
api::HostExtReq::SysReq(api::SysReq::SysFwded(fwd)) => {
let api::SysFwded(sys_id, payload) = fwd;
let ctx = mk_ctx(sys_id, hand.reqnot());
let sys = ctx.cted.inst();
sys.dyn_request(hand, payload)
}
api::HostExtReq::VfsReq(api::VfsReq::VfsRead(vfs_read)) => {
let api::VfsRead(sys_id, vfs_id, path) = &vfs_read;
let systems_g = systems.lock().unwrap();
let path = path.iter().map(|t| Tok::from_api(*t)).collect_vec();
hand.handle(&vfs_read, &systems_g[sys_id].vfses[vfs_id].load(PathSlice::new(&path)))
}
api::HostExtReq::LexExpr(lex @ api::LexExpr{ sys, text, pos, id }) => {
let systems_g = systems.lock().unwrap();
let lexers = systems_g[&sys].cted.inst().dyn_lexers();
mem::drop(systems_g);
let text = Tok::from_api(text);
let ctx = LexContext { sys, id, pos, reqnot: hand.reqnot(), text: &text };
let trigger_char = text.chars().nth(pos as usize).unwrap();
for lx in lexers.iter().filter(|l| char_filter_match(l.char_filter(), trigger_char)) {
match lx.lex(&text[pos as usize..], &ctx) {
Err(e) if e.any(|e| *e == err_not_applicable()) => continue,
Err(e) => {
let eopt = e.keep_only(|e| *e != err_cascade()).map(|e| Err(e.to_api()));
return hand.handle(&lex, &eopt)
},
Ok((s, expr)) => {
let ctx = mk_ctx(sys, hand.reqnot());
let expr = expr.to_api(&mut |f, r| do_extra(f, r, ctx.clone()));
let pos = (text.len() - s.len()) as u32;
return hand.handle(&lex, &Some(Ok(api::LexedExpr{ pos, expr })))
}
}
}
writeln!(logger, "Got notified about n/a character '{trigger_char}'");
hand.handle(&lex, &None)
},
api::HostExtReq::ParseLine(pline) => {
let api::ParseLine{ exported, comments, sys, line } = &pline;
let mut ctx = mk_ctx(*sys, hand.reqnot());
let parsers = ctx.cted.inst().dyn_parsers();
let comments = comments.iter().map(Comment::from_api).collect();
let line: Vec<GenTokTree> = ttv_from_api(line, &mut ctx);
let snip = Snippet::new(line.first().expect("Empty line"), &line);
let (head, tail) = snip.pop_front().unwrap();
let name = if let GenTok::Name(n) = &head.tok { n } else { panic!("No line head") };
let parser = parsers.iter().find(|p| p.line_head() == **name).expect("No parser candidate");
let o_line = match parser.parse(*exported, comments, tail) {
Err(e) => Err(e.to_api()),
Ok(t) => Ok(ttv_to_api(t, &mut |f, range| {
api::TokenTree{ range, token: api::Token::Atom(f.clone().build(ctx.clone())) }
})),
};
hand.handle(&pline, &o_line)
}
api::HostExtReq::AtomReq(atom_req) => {
let atom = atom_req.get_atom();
with_atom_record(&mk_ctx, hand.reqnot(), atom, |nfo, ctx, id, buf| {
let actx = AtomCtx(buf, atom.drop, ctx.clone());
match &atom_req {
api::AtomReq::SerializeAtom(ser) => {
let mut buf = enc_vec(&id);
let refs_opt = nfo.serialize(actx, &mut buf);
hand.handle(ser, &refs_opt.map(|refs| (buf, refs)))
}
api::AtomReq::AtomPrint(print@api::AtomPrint(_)) =>
hand.handle(print, &nfo.print(actx)),
api::AtomReq::Fwded(fwded) => {
let api::Fwded(_, key, payload) = &fwded;
let mut reply = Vec::new();
let some = nfo.handle_req(actx, Sym::from_api(*key), &mut &payload[..], &mut reply);
hand.handle(fwded, &some.then_some(reply))
}
api::AtomReq::CallRef(call@api::CallRef(_, arg)) => {
let ret = nfo.call_ref(actx, *arg);
hand.handle(call, &ret.api_return(ctx.clone(), &mut |h| hand.defer_drop(h)))
},
api::AtomReq::FinalCall(call@api::FinalCall(_, arg)) => {
let ret = nfo.call(actx, *arg);
hand.handle(call, &ret.api_return(ctx.clone(), &mut |h| hand.defer_drop(h)))
}
api::AtomReq::Command(cmd@api::Command(_)) => {
hand.handle(cmd, &match nfo.command(actx) {
Err(e) => Err(e.to_api()),
Ok(opt) => Ok(match opt {
None => api::NextStep::Halt,
Some(cont) => api::NextStep::Continue(
cont.api_return(ctx.clone(), &mut |h| hand.defer_drop(h))
),
})
})
}
}
})
},
api::HostExtReq::DeserAtom(deser) => {
let api::DeserAtom(sys, buf, refs) = &deser;
let mut read = &mut &buf[..];
let ctx = mk_ctx(*sys, hand.reqnot());
let id = api::AtomId::decode(&mut read);
let inst = ctx.cted.inst();
let nfo = atom_by_idx(inst.card(), id).expect("Deserializing atom with invalid ID");
hand.handle(&deser, &nfo.deserialize(ctx.clone(), read, refs))
},
orchid_api::HostExtReq::ApplyMacro(am) => {
let tok = hand.will_handle_as(&am);
let sys_ctx = mk_ctx(am.sys, hand.reqnot());
let ctx = RuleCtx {
args: (am.params.into_iter())
.map(|(k, v)| (
Tok::from_api(k),
mtreev_from_api(&v, &mut |_| panic!("No atom in macro prompt!"))
))
.collect(),
run_id: am.run_id,
sys: sys_ctx.clone(),
};
hand.handle_as(tok, &match apply_rule(am.id, ctx) {
Err(e) => e.keep_only(|e| *e != err_cascade()).map(|e| Err(e.to_api())),
Ok(t) => Some(Ok(mtreev_to_api(&t, &mut |a| {
api::MacroToken::Atom(a.clone().build(sys_ctx.clone()))
}))),
})
}
}),
);
init_replica(rn.clone().map());
while !exiting.load(Ordering::Relaxed) {
let rcvd = recv_parent_msg().unwrap();
rn.receive(&rcvd)
}
let api::HostHeader { log_strategy } = api::HostHeader::decode(&mut std::io::stdin().lock());
let mut buf = Vec::new();
let decls = (data.systems.iter().enumerate())
.map(|(id, sys)| (u16::try_from(id).expect("more than u16max system ctors"), sys))
.map(|(id, sys)| sys.decl(api::SysDeclId(NonZero::new(id + 1).unwrap())))
.collect_vec();
let systems = Arc::new(Mutex::new(HashMap::<api::SysId, SystemRecord>::new()));
api::ExtensionHeader { name: data.name.to_string(), systems: decls.clone() }.encode(&mut buf);
std::io::stdout().write_all(&buf).unwrap();
std::io::stdout().flush().unwrap();
let exiting = Arc::new(AtomicBool::new(false));
let logger = Arc::new(Logger::new(log_strategy));
let mk_ctx = clone!(logger, systems; move |id: api::SysId, reqnot: ReqNot<api::ExtMsgSet>| {
let cted = systems.lock().unwrap()[&id].cted.clone();
SysCtx { id, cted, logger: logger.clone(), reqnot }
});
let rn = ReqNot::<api::ExtMsgSet>::new(
clone!(logger; move |a, _| {
logger.log_buf("Upsending", a);
send_parent_msg(a).unwrap()
}),
clone!(systems, exiting, mk_ctx; move |n, reqnot| match n {
api::HostExtNotif::Exit => exiting.store(true, Ordering::Relaxed),
api::HostExtNotif::SystemDrop(api::SystemDrop(sys_id)) =>
mem::drop(systems.lock().unwrap().remove(&sys_id)),
api::HostExtNotif::AtomDrop(api::AtomDrop(sys_id, atom)) =>
OBJ_STORE.get(atom.0).unwrap().remove().dyn_free(mk_ctx(sys_id, reqnot)),
}),
clone!(systems, logger; move |hand, req| match req {
api::HostExtReq::Ping(ping@api::Ping) => hand.handle(&ping, &()),
api::HostExtReq::Sweep(sweep@api::Sweep) => hand.handle(&sweep, &sweep_replica()),
api::HostExtReq::SysReq(api::SysReq::NewSystem(new_sys)) => {
let i = decls.iter().enumerate().find(|(_, s)| s.id == new_sys.system).unwrap().0;
let cted = data.systems[i].new_system(&new_sys);
let mut vfses = HashMap::new();
let lex_filter = cted.inst().dyn_lexers().iter().fold(api::CharFilter(vec![]), |cf, lx| {
let lxcf = mk_char_filter(lx.char_filter().iter().cloned());
char_filter_union(&cf, &lxcf)
});
let mut lazy_mems = HashMap::new();
let ctx = SysCtx{
cted: cted.clone(),
id: new_sys.id,
logger: logger.clone(),
reqnot: hand.reqnot()
};
let mut tia_ctx = TIACtxImpl{
lazy: &mut lazy_mems,
sys: ctx.clone(),
basepath: &[],
path: Substack::Bottom,
};
let const_root = (cted.inst().dyn_env().into_iter())
.map(|(k, v)| (k.to_api(), v.into_api(&mut tia_ctx)))
.collect();
systems.lock().unwrap().insert(new_sys.id, SystemRecord {
declfs: cted.inst().dyn_vfs().to_api_rec(&mut vfses),
vfses,
cted,
lazy_members: lazy_mems
});
hand.handle(&new_sys, &api::SystemInst {
lex_filter,
const_root,
line_types: vec![]
})
}
api::HostExtReq::GetMember(get_tree@api::GetMember(sys_id, tree_id)) => {
let mut systems_g = systems.lock().unwrap();
let sys = systems_g.get_mut(&sys_id).expect("System not found");
let lazy = &mut sys.lazy_members;
let (path, cb) = match lazy.insert(tree_id, MemberRecord::Res) {
None => panic!("Tree for ID not found"),
Some(MemberRecord::Res) => panic!("This tree has already been transmitted"),
Some(MemberRecord::Gen(path, cb)) => (path, cb),
};
let tree = cb.build(path.clone());
hand.handle(&get_tree, &tree.into_api(&mut TIACtxImpl{
sys: SysCtx::new(sys_id, &sys.cted, &logger, hand.reqnot()),
path: Substack::Bottom,
basepath: &path,
lazy,
}))
}
api::HostExtReq::VfsReq(api::VfsReq::GetVfs(get_vfs@api::GetVfs(sys_id))) => {
let systems_g = systems.lock().unwrap();
hand.handle(&get_vfs, &systems_g[&sys_id].declfs)
}
api::HostExtReq::SysReq(api::SysReq::SysFwded(fwd)) => {
let api::SysFwded(sys_id, payload) = fwd;
let ctx = mk_ctx(sys_id, hand.reqnot());
let sys = ctx.cted.inst();
sys.dyn_request(hand, payload)
}
api::HostExtReq::VfsReq(api::VfsReq::VfsRead(vfs_read)) => {
let api::VfsRead(sys_id, vfs_id, path) = &vfs_read;
let systems_g = systems.lock().unwrap();
let path = path.iter().map(|t| Tok::from_api(*t)).collect_vec();
hand.handle(&vfs_read, &systems_g[sys_id].vfses[vfs_id].load(PathSlice::new(&path)))
}
api::HostExtReq::LexExpr(lex @ api::LexExpr{ sys, text, pos, id }) => {
let systems_g = systems.lock().unwrap();
let lexers = systems_g[&sys].cted.inst().dyn_lexers();
mem::drop(systems_g);
let text = Tok::from_api(text);
let ctx = LexContext { sys, id, pos, reqnot: hand.reqnot(), text: &text };
let trigger_char = text.chars().nth(pos as usize).unwrap();
for lx in lexers.iter().filter(|l| char_filter_match(l.char_filter(), trigger_char)) {
match lx.lex(&text[pos as usize..], &ctx) {
Err(e) if e.any(|e| *e == err_not_applicable()) => continue,
Err(e) => {
let eopt = e.keep_only(|e| *e != err_cascade()).map(|e| Err(e.to_api()));
return hand.handle(&lex, &eopt)
},
Ok((s, expr)) => {
let ctx = mk_ctx(sys, hand.reqnot());
let expr = expr.to_api(&mut |f, r| do_extra(f, r, ctx.clone()));
let pos = (text.len() - s.len()) as u32;
return hand.handle(&lex, &Some(Ok(api::LexedExpr{ pos, expr })))
}
}
}
writeln!(logger, "Got notified about n/a character '{trigger_char}'");
hand.handle(&lex, &None)
},
api::HostExtReq::ParseLine(pline) => {
let api::ParseLine{ exported, comments, sys, line } = &pline;
let mut ctx = mk_ctx(*sys, hand.reqnot());
let parsers = ctx.cted.inst().dyn_parsers();
let comments = comments.iter().map(Comment::from_api).collect();
let line: Vec<GenTokTree> = ttv_from_api(line, &mut ctx);
let snip = Snippet::new(line.first().expect("Empty line"), &line);
let (head, tail) = snip.pop_front().unwrap();
let name = if let GenTok::Name(n) = &head.tok { n } else { panic!("No line head") };
let parser = parsers.iter().find(|p| p.line_head() == **name).expect("No parser candidate");
let o_line = match parser.parse(*exported, comments, tail) {
Err(e) => Err(e.to_api()),
Ok(t) => Ok(ttv_to_api(t, &mut |f, range| {
api::TokenTree{ range, token: api::Token::Atom(f.clone().build(ctx.clone())) }
})),
};
hand.handle(&pline, &o_line)
}
api::HostExtReq::AtomReq(atom_req) => {
let atom = atom_req.get_atom();
with_atom_record(&mk_ctx, hand.reqnot(), atom, |nfo, ctx, id, buf| {
let actx = AtomCtx(buf, atom.drop, ctx.clone());
match &atom_req {
api::AtomReq::SerializeAtom(ser) => {
let mut buf = enc_vec(&id);
let refs_opt = nfo.serialize(actx, &mut buf);
hand.handle(ser, &refs_opt.map(|refs| (buf, refs)))
}
api::AtomReq::AtomPrint(print@api::AtomPrint(_)) =>
hand.handle(print, &nfo.print(actx)),
api::AtomReq::Fwded(fwded) => {
let api::Fwded(_, key, payload) = &fwded;
let mut reply = Vec::new();
let some = nfo.handle_req(actx, Sym::from_api(*key), &mut &payload[..], &mut reply);
hand.handle(fwded, &some.then_some(reply))
}
api::AtomReq::CallRef(call@api::CallRef(_, arg)) => {
let ret = nfo.call_ref(actx, *arg);
hand.handle(call, &ret.api_return(ctx.clone(), &mut |h| hand.defer_drop(h)))
},
api::AtomReq::FinalCall(call@api::FinalCall(_, arg)) => {
let ret = nfo.call(actx, *arg);
hand.handle(call, &ret.api_return(ctx.clone(), &mut |h| hand.defer_drop(h)))
}
api::AtomReq::Command(cmd@api::Command(_)) => {
hand.handle(cmd, &match nfo.command(actx) {
Err(e) => Err(e.to_api()),
Ok(opt) => Ok(match opt {
None => api::NextStep::Halt,
Some(cont) => api::NextStep::Continue(
cont.api_return(ctx.clone(), &mut |h| hand.defer_drop(h))
),
})
})
}
}
})
},
api::HostExtReq::DeserAtom(deser) => {
let api::DeserAtom(sys, buf, refs) = &deser;
let mut read = &mut &buf[..];
let ctx = mk_ctx(*sys, hand.reqnot());
let id = api::AtomId::decode(&mut read);
let inst = ctx.cted.inst();
let nfo = atom_by_idx(inst.card(), id).expect("Deserializing atom with invalid ID");
hand.handle(&deser, &nfo.deserialize(ctx.clone(), read, refs))
},
orchid_api::HostExtReq::ApplyMacro(am) => {
let tok = hand.will_handle_as(&am);
let sys_ctx = mk_ctx(am.sys, hand.reqnot());
let ctx = RuleCtx {
args: (am.params.into_iter())
.map(|(k, v)| (
Tok::from_api(k),
mtreev_from_api(&v, &mut |_| panic!("No atom in macro prompt!"))
))
.collect(),
run_id: am.run_id,
sys: sys_ctx.clone(),
};
hand.handle_as(tok, &match apply_rule(am.id, ctx) {
Err(e) => e.keep_only(|e| *e != err_cascade()).map(|e| Err(e.to_api())),
Ok(t) => Some(Ok(mtreev_to_api(&t, &mut |a| {
api::MacroToken::Atom(a.clone().build(sys_ctx.clone()))
}))),
})
}
}),
);
init_replica(rn.clone().map());
while !exiting.load(Ordering::Relaxed) {
let rcvd = recv_parent_msg().unwrap();
rn.receive(&rcvd)
}
}

View File

@@ -1,9 +1,8 @@
use std::fmt;
use std::ops::Deref;
use std::sync::{Arc, OnceLock};
use std::{backtrace, fmt};
use derive_destructure::destructure;
use orchid_base::clone;
use orchid_base::error::{OrcErr, OrcErrv};
use orchid_base::interner::Tok;
use orchid_base::location::Pos;
@@ -17,122 +16,122 @@ use crate::system::SysCtx;
#[derive(destructure)]
pub struct ExprHandle {
pub tk: api::ExprTicket,
pub ctx: SysCtx,
pub tk: api::ExprTicket,
pub ctx: SysCtx,
}
impl ExprHandle {
pub(crate) fn from_args(ctx: SysCtx, tk: api::ExprTicket) -> Self { Self { ctx, tk } }
pub fn get_ctx(&self) -> SysCtx { self.ctx.clone() }
pub(crate) fn from_args(ctx: SysCtx, tk: api::ExprTicket) -> Self { Self { ctx, tk } }
pub fn get_ctx(&self) -> SysCtx { self.ctx.clone() }
}
impl fmt::Debug for ExprHandle {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "ExprHandle({})", self.tk.0)
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "ExprHandle({})", self.tk.0)
}
}
impl Clone for ExprHandle {
fn clone(&self) -> Self {
self.ctx.reqnot.notify(api::Acquire(self.ctx.id, self.tk));
Self { ctx: self.ctx.clone(), tk: self.tk }
}
fn clone(&self) -> Self {
self.ctx.reqnot.notify(api::Acquire(self.ctx.id, self.tk));
Self { ctx: self.ctx.clone(), tk: self.tk }
}
}
impl Drop for ExprHandle {
fn drop(&mut self) { self.ctx.reqnot.notify(api::Release(self.ctx.id, self.tk)) }
fn drop(&mut self) { self.ctx.reqnot.notify(api::Release(self.ctx.id, self.tk)) }
}
#[derive(Clone, Debug, destructure)]
pub struct Expr {
pub handle: Option<Arc<ExprHandle>>,
pub val: OnceLock<ExprData>,
pub handle: Option<Arc<ExprHandle>>,
pub val: OnceLock<ExprData>,
}
impl Expr {
pub fn new(hand: Arc<ExprHandle>) -> Self { Self { handle: Some(hand), val: OnceLock::new() } }
pub fn from_data(val: ExprData) -> Self { Self { handle: None, val: OnceLock::from(val) } }
pub fn get_data(&self) -> &ExprData {
self.val.get_or_init(|| {
let handle = self.handle.as_ref().expect("Either the value or the handle must be set");
let details = handle.ctx.reqnot.request(api::Inspect { target: handle.tk });
let pos = Pos::from_api(&details.location);
let kind = match details.kind {
api::InspectedKind::Atom(a) =>
ExprKind::Atom(ForeignAtom::new(handle.clone(), a, pos.clone())),
api::InspectedKind::Bottom(b) => ExprKind::Bottom(OrcErrv::from_api(&b)),
api::InspectedKind::Opaque => ExprKind::Opaque,
};
ExprData { pos, kind }
})
}
pub fn foreign_atom(self) -> Result<ForeignAtom<'static>, Self> {
match (self.get_data(), &self.handle) {
(ExprData { kind: ExprKind::Atom(atom), .. }, Some(_)) => Ok(atom.clone()),
_ => Err(self),
}
}
pub fn api_return(
self,
ctx: SysCtx,
do_slot: &mut impl FnMut(Arc<ExprHandle>),
) -> api::Expression {
if let Some(h) = self.handle {
do_slot(h.clone());
api::Expression { location: api::Location::SlotTarget, kind: api::ExpressionKind::Slot(h.tk) }
} else {
self.val.into_inner().expect("Either value or handle must be set").api_return(ctx, do_slot)
}
}
pub fn handle(&self) -> Option<Arc<ExprHandle>> { self.handle.clone() }
pub fn new(hand: Arc<ExprHandle>) -> Self { Self { handle: Some(hand), val: OnceLock::new() } }
pub fn from_data(val: ExprData) -> Self { Self { handle: None, val: OnceLock::from(val) } }
pub fn get_data(&self) -> &ExprData {
self.val.get_or_init(|| {
let handle = self.handle.as_ref().expect("Either the value or the handle must be set");
let details = handle.ctx.reqnot.request(api::Inspect { target: handle.tk });
let pos = Pos::from_api(&details.location);
let kind = match details.kind {
api::InspectedKind::Atom(a) =>
ExprKind::Atom(ForeignAtom::new(handle.clone(), a, pos.clone())),
api::InspectedKind::Bottom(b) => ExprKind::Bottom(OrcErrv::from_api(&b)),
api::InspectedKind::Opaque => ExprKind::Opaque,
};
ExprData { pos, kind }
})
}
pub fn foreign_atom(self) -> Result<ForeignAtom<'static>, Self> {
match (self.get_data(), &self.handle) {
(ExprData { kind: ExprKind::Atom(atom), .. }, Some(_)) => Ok(atom.clone()),
_ => Err(self),
}
}
pub fn api_return(
self,
ctx: SysCtx,
do_slot: &mut impl FnMut(Arc<ExprHandle>),
) -> api::Expression {
if let Some(h) = self.handle {
do_slot(h.clone());
api::Expression { location: api::Location::SlotTarget, kind: api::ExpressionKind::Slot(h.tk) }
} else {
self.val.into_inner().expect("Either value or handle must be set").api_return(ctx, do_slot)
}
}
pub fn handle(&self) -> Option<Arc<ExprHandle>> { self.handle.clone() }
}
impl Deref for Expr {
type Target = ExprData;
fn deref(&self) -> &Self::Target { self.get_data() }
type Target = ExprData;
fn deref(&self) -> &Self::Target { self.get_data() }
}
#[derive(Clone, Debug)]
pub struct ExprData {
pub pos: Pos,
pub kind: ExprKind,
pub pos: Pos,
pub kind: ExprKind,
}
impl ExprData {
pub fn api_return(
self,
ctx: SysCtx,
do_slot: &mut impl FnMut(Arc<ExprHandle>),
) -> api::Expression {
api::Expression { location: self.pos.to_api(), kind: self.kind.api_return(ctx, do_slot) }
}
pub fn api_return(
self,
ctx: SysCtx,
do_slot: &mut impl FnMut(Arc<ExprHandle>),
) -> api::Expression {
api::Expression { location: self.pos.to_api(), kind: self.kind.api_return(ctx, do_slot) }
}
}
#[derive(Clone, Debug)]
pub enum ExprKind {
Call(Box<Expr>, Box<Expr>),
Lambda(u64, Box<Expr>),
Arg(u64),
Seq(Box<Expr>, Box<Expr>),
Const(Tok<Vec<Tok<String>>>),
NewAtom(AtomFactory),
Atom(ForeignAtom<'static>),
Bottom(OrcErrv),
Opaque,
Call(Box<Expr>, Box<Expr>),
Lambda(u64, Box<Expr>),
Arg(u64),
Seq(Box<Expr>, Box<Expr>),
Const(Tok<Vec<Tok<String>>>),
NewAtom(AtomFactory),
Atom(ForeignAtom<'static>),
Bottom(OrcErrv),
Opaque,
}
impl ExprKind {
pub fn api_return(
self,
ctx: SysCtx,
do_slot: &mut impl FnMut(Arc<ExprHandle>),
) -> api::ExpressionKind {
use api::ExpressionKind as K;
match self {
Self::Call(f, x) =>
K::Call(Box::new(f.api_return(ctx.clone(), do_slot)), Box::new(x.api_return(ctx, do_slot))),
Self::Seq(a, b) =>
K::Seq(Box::new(a.api_return(ctx.clone(), do_slot)), Box::new(b.api_return(ctx, do_slot))),
Self::Lambda(arg, body) => K::Lambda(arg, Box::new(body.api_return(ctx, do_slot))),
Self::Arg(arg) => K::Arg(arg),
Self::Const(name) => K::Const(name.to_api()),
Self::Bottom(err) => K::Bottom(err.to_api()),
Self::NewAtom(fac) => K::NewAtom(fac.clone().build(ctx)),
kind @ (Self::Atom(_) | Self::Opaque) => panic!("{kind:?} should have a token"),
}
}
pub fn api_return(
self,
ctx: SysCtx,
do_slot: &mut impl FnMut(Arc<ExprHandle>),
) -> api::ExpressionKind {
use api::ExpressionKind as K;
match self {
Self::Call(f, x) =>
K::Call(Box::new(f.api_return(ctx.clone(), do_slot)), Box::new(x.api_return(ctx, do_slot))),
Self::Seq(a, b) =>
K::Seq(Box::new(a.api_return(ctx.clone(), do_slot)), Box::new(b.api_return(ctx, do_slot))),
Self::Lambda(arg, body) => K::Lambda(arg, Box::new(body.api_return(ctx, do_slot))),
Self::Arg(arg) => K::Arg(arg),
Self::Const(name) => K::Const(name.to_api()),
Self::Bottom(err) => K::Bottom(err.to_api()),
Self::NewAtom(fac) => K::NewAtom(fac.clone().build(ctx)),
kind @ (Self::Atom(_) | Self::Opaque) => panic!("{kind:?} should have a token"),
}
}
}
fn inherit(kind: ExprKind) -> Expr { Expr::from_data(ExprData { pos: Pos::Inherit, kind }) }
@@ -140,35 +139,35 @@ pub fn sym_ref(path: Tok<Vec<Tok<String>>>) -> Expr { inherit(ExprKind::Const(pa
pub fn atom<A: ToAtom>(atom: A) -> Expr { inherit(ExprKind::NewAtom(atom.to_atom_factory())) }
pub fn seq(ops: impl IntoIterator<Item = Expr>) -> Expr {
fn recur(mut ops: impl Iterator<Item = Expr>) -> Option<Expr> {
let op = ops.next()?;
Some(match recur(ops) {
None => op,
Some(rec) => inherit(ExprKind::Seq(Box::new(op), Box::new(rec))),
})
}
recur(ops.into_iter()).expect("Empty list provided to seq!")
fn recur(mut ops: impl Iterator<Item = Expr>) -> Option<Expr> {
let op = ops.next()?;
Some(match recur(ops) {
None => op,
Some(rec) => inherit(ExprKind::Seq(Box::new(op), Box::new(rec))),
})
}
recur(ops.into_iter()).expect("Empty list provided to seq!")
}
pub fn arg(n: u64) -> Expr { inherit(ExprKind::Arg(n)) }
pub fn lambda(n: u64, b: impl IntoIterator<Item = Expr>) -> Expr {
inherit(ExprKind::Lambda(n, Box::new(call(b))))
inherit(ExprKind::Lambda(n, Box::new(call(b))))
}
pub fn call(v: impl IntoIterator<Item = Expr>) -> Expr {
v.into_iter()
.reduce(|f, x| inherit(ExprKind::Call(Box::new(f), Box::new(x))))
.expect("Empty call expression")
v.into_iter()
.reduce(|f, x| inherit(ExprKind::Call(Box::new(f), Box::new(x))))
.expect("Empty call expression")
}
pub fn bot(ev: impl IntoIterator<Item = OrcErr>) -> Expr {
inherit(ExprKind::Bottom(OrcErrv::new(ev).unwrap()))
inherit(ExprKind::Bottom(OrcErrv::new(ev).unwrap()))
}
pub fn with<I: TryFromExpr, O: ToExpr>(
expr: Expr,
cont: impl Fn(I) -> O + Clone + Send + Sync + 'static,
expr: Expr,
cont: impl Fn(I) -> O + Clone + Send + Sync + 'static,
) -> Expr {
call([lambda(0, [seq([arg(0), call([Lambda::new(cont).to_expr(), arg(0)])])]), expr])
call([lambda(0, [seq([arg(0), call([Lambda::new(cont).to_expr(), arg(0)])])]), expr])
}

View File

@@ -7,25 +7,25 @@ use orchid_base::name::PathSlice;
use crate::api;
pub trait VirtFS: Send + Sync + 'static {
fn load(&self, path: &PathSlice) -> api::OrcResult<api::Loaded>;
fn load(&self, path: &PathSlice) -> api::OrcResult<api::Loaded>;
}
pub enum DeclFs {
Lazy(&'static dyn VirtFS),
Mod(&'static [(&'static str, DeclFs)]),
Lazy(&'static dyn VirtFS),
Mod(&'static [(&'static str, DeclFs)]),
}
impl DeclFs {
pub fn to_api_rec(&self, vfses: &mut HashMap<api::VfsId, &'static dyn VirtFS>) -> api::EagerVfs {
match self {
DeclFs::Lazy(fs) => {
let vfsc: u16 = vfses.len().try_into().expect("too many vfses (more than u16::MAX)");
let id = api::VfsId(NonZero::new(vfsc + 1).unwrap());
vfses.insert(id, *fs);
api::EagerVfs::Lazy(id)
},
DeclFs::Mod(children) => api::EagerVfs::Eager(
children.iter().map(|(k, v)| (intern(*k).to_api(), v.to_api_rec(vfses))).collect(),
),
}
}
pub fn to_api_rec(&self, vfses: &mut HashMap<api::VfsId, &'static dyn VirtFS>) -> api::EagerVfs {
match self {
DeclFs::Lazy(fs) => {
let vfsc: u16 = vfses.len().try_into().expect("too many vfses (more than u16::MAX)");
let id = api::VfsId(NonZero::new(vfsc + 1).unwrap());
vfses.insert(id, *fs);
api::EagerVfs::Lazy(id)
},
DeclFs::Mod(children) => api::EagerVfs::Eager(
children.iter().map(|(k, v)| (intern(*k).to_api(), v.to_api_rec(vfses))).collect(),
),
}
}
}

View File

@@ -19,114 +19,115 @@ use crate::expr::{Expr, ExprHandle};
use crate::system::SysCtx;
trait_set! {
trait FunCB = Fn(Vec<Expr>) -> OrcRes<Expr> + Send + Sync + 'static;
trait FunCB = Fn(Vec<Expr>) -> OrcRes<Expr> + Send + Sync + 'static;
}
pub trait ExprFunc<I, O>: Clone + Send + Sync + 'static {
const ARITY: u8;
fn apply(&self, v: Vec<Expr>) -> OrcRes<Expr>;
const ARITY: u8;
fn apply(&self, v: Vec<Expr>) -> OrcRes<Expr>;
}
lazy_static! {
static ref FUNS: Mutex<HashMap<Sym, (u8, Arc<dyn FunCB>)>> = Mutex::default();
static ref FUNS: Mutex<HashMap<Sym, (u8, Arc<dyn FunCB>)>> = Mutex::default();
}
/// An Atom representing a partially applied named native function. These
/// partial calls are serialized into the name of the native function and the
/// argument list.
///
///
/// See [Lambda] for the non-serializable variant
#[derive(Clone)]
pub(crate) struct Fun {
path: Sym,
args: Vec<Expr>,
arity: u8,
fun: Arc<dyn FunCB>,
path: Sym,
args: Vec<Expr>,
arity: u8,
fun: Arc<dyn FunCB>,
}
impl Fun {
pub fn new<I, O, F: ExprFunc<I, O>>(path: Sym, f: F) -> Self {
let mut fung = FUNS.lock().unwrap();
let fun = if let Some(x) = fung.get(&path) {
x.1.clone()
} else {
let fun = Arc::new(move |v| f.apply(v));
fung.insert(path.clone(), (F::ARITY, fun.clone()));
fun
};
Self { args: vec![], arity: F::ARITY, path, fun }
}
pub fn new<I, O, F: ExprFunc<I, O>>(path: Sym, f: F) -> Self {
let mut fung = FUNS.lock().unwrap();
let fun = if let Some(x) = fung.get(&path) {
x.1.clone()
} else {
let fun = Arc::new(move |v| f.apply(v));
fung.insert(path.clone(), (F::ARITY, fun.clone()));
fun
};
Self { args: vec![], arity: F::ARITY, path, fun }
}
}
impl Atomic for Fun {
type Data = ();
type Variant = OwnedVariant;
fn reg_reqs() -> MethodSet<Self> { MethodSet::new() }
type Data = ();
type Variant = OwnedVariant;
fn reg_reqs() -> MethodSet<Self> { MethodSet::new() }
}
impl OwnedAtom for Fun {
type Refs = Vec<Expr>;
fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
fn call_ref(&self, arg: ExprHandle) -> Expr {
let new_args = self.args.iter().cloned().chain([Expr::new(Arc::new(arg))]).collect_vec();
if new_args.len() == self.arity.into() {
(self.fun)(new_args).to_expr()
} else {
Self { args: new_args, arity: self.arity, fun: self.fun.clone(), path: self.path.clone() }
.to_expr()
}
}
fn call(self, arg: ExprHandle) -> Expr { self.call_ref(arg) }
fn serialize(&self, _: SysCtx, sink: &mut (impl io::Write + ?Sized)) -> Self::Refs {
self.path.encode(sink);
self.args.clone()
}
fn deserialize(ctx: impl DeserializeCtx, args: Self::Refs) -> Self {
let path = Sym::new(ctx.decode::<Vec<Tok<String>>>()).unwrap();
let (arity, fun) = FUNS.lock().unwrap().get(&path).unwrap().clone();
Self { args, arity, path, fun }
}
type Refs = Vec<Expr>;
fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
fn call_ref(&self, arg: ExprHandle) -> Expr {
let new_args = self.args.iter().cloned().chain([Expr::new(Arc::new(arg))]).collect_vec();
if new_args.len() == self.arity.into() {
(self.fun)(new_args).to_expr()
} else {
Self { args: new_args, arity: self.arity, fun: self.fun.clone(), path: self.path.clone() }
.to_expr()
}
}
fn call(self, arg: ExprHandle) -> Expr { self.call_ref(arg) }
fn serialize(&self, _: SysCtx, sink: &mut (impl io::Write + ?Sized)) -> Self::Refs {
self.path.encode(sink);
self.args.clone()
}
fn deserialize(ctx: impl DeserializeCtx, args: Self::Refs) -> Self {
let path = Sym::new(ctx.decode::<Vec<Tok<String>>>()).unwrap();
let (arity, fun) = FUNS.lock().unwrap().get(&path).unwrap().clone();
Self { args, arity, path, fun }
}
}
/// An Atom representing a partially applied native lambda. These are not serializable.
///
/// An Atom representing a partially applied native lambda. These are not
/// serializable.
///
/// See [Fun] for the serializable variant
#[derive(Clone)]
pub struct Lambda {
args: Vec<Expr>,
arity: u8,
fun: Arc<dyn FunCB>,
args: Vec<Expr>,
arity: u8,
fun: Arc<dyn FunCB>,
}
impl Lambda {
pub fn new<I, O, F: ExprFunc<I, O>>(f: F) -> Self {
let fun = Arc::new(move |v| f.apply(v));
Self { args: vec![], arity: F::ARITY, fun }
}
pub fn new<I, O, F: ExprFunc<I, O>>(f: F) -> Self {
let fun = Arc::new(move |v| f.apply(v));
Self { args: vec![], arity: F::ARITY, fun }
}
}
impl Atomic for Lambda {
type Data = ();
type Variant = OwnedVariant;
fn reg_reqs() -> MethodSet<Self> { MethodSet::new() }
type Data = ();
type Variant = OwnedVariant;
fn reg_reqs() -> MethodSet<Self> { MethodSet::new() }
}
impl OwnedAtom for Lambda {
type Refs = Never;
fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
fn call_ref(&self, arg: ExprHandle) -> Expr {
let new_args = self.args.iter().cloned().chain([Expr::new(Arc::new(arg))]).collect_vec();
if new_args.len() == self.arity.into() {
(self.fun)(new_args).to_expr()
} else {
Self { args: new_args, arity: self.arity, fun: self.fun.clone() }.to_expr()
}
}
fn call(self, arg: ExprHandle) -> Expr { self.call_ref(arg) }
type Refs = Never;
fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
fn call_ref(&self, arg: ExprHandle) -> Expr {
let new_args = self.args.iter().cloned().chain([Expr::new(Arc::new(arg))]).collect_vec();
if new_args.len() == self.arity.into() {
(self.fun)(new_args).to_expr()
} else {
Self { args: new_args, arity: self.arity, fun: self.fun.clone() }.to_expr()
}
}
fn call(self, arg: ExprHandle) -> Expr { self.call_ref(arg) }
}
mod expr_func_derives {
use orchid_base::error::OrcRes;
use orchid_base::error::OrcRes;
use super::ExprFunc;
use crate::conv::{ToExpr, TryFromExpr};
use crate::func_atom::Expr;
use super::ExprFunc;
use crate::conv::{ToExpr, TryFromExpr};
use crate::func_atom::Expr;
macro_rules! expr_func_derive {
macro_rules! expr_func_derive {
($arity: tt, $($t:ident),*) => {
paste::paste!{
impl<
@@ -144,18 +145,18 @@ mod expr_func_derives {
}
};
}
expr_func_derive!(1, A);
expr_func_derive!(2, A, B);
expr_func_derive!(3, A, B, C);
expr_func_derive!(4, A, B, C, D);
expr_func_derive!(5, A, B, C, D, E);
expr_func_derive!(6, A, B, C, D, E, F);
expr_func_derive!(7, A, B, C, D, E, F, G);
expr_func_derive!(8, A, B, C, D, E, F, G, H);
expr_func_derive!(9, A, B, C, D, E, F, G, H, I);
expr_func_derive!(10, A, B, C, D, E, F, G, H, I, J);
expr_func_derive!(11, A, B, C, D, E, F, G, H, I, J, K);
expr_func_derive!(12, A, B, C, D, E, F, G, H, I, J, K, L);
expr_func_derive!(13, A, B, C, D, E, F, G, H, I, J, K, L, M);
expr_func_derive!(14, A, B, C, D, E, F, G, H, I, J, K, L, M, N);
expr_func_derive!(1, A);
expr_func_derive!(2, A, B);
expr_func_derive!(3, A, B, C);
expr_func_derive!(4, A, B, C, D);
expr_func_derive!(5, A, B, C, D, E);
expr_func_derive!(6, A, B, C, D, E, F);
expr_func_derive!(7, A, B, C, D, E, F, G);
expr_func_derive!(8, A, B, C, D, E, F, G, H);
expr_func_derive!(9, A, B, C, D, E, F, G, H, I);
expr_func_derive!(10, A, B, C, D, E, F, G, H, I, J);
expr_func_derive!(11, A, B, C, D, E, F, G, H, I, J, K);
expr_func_derive!(12, A, B, C, D, E, F, G, H, I, J, K, L);
expr_func_derive!(13, A, B, C, D, E, F, G, H, I, J, K, L, M);
expr_func_derive!(14, A, B, C, D, E, F, G, H, I, J, K, L, M, N);
}

View File

@@ -1,6 +1,6 @@
use std::ops::{Range, RangeInclusive};
use orchid_base::error::{mk_err, OrcErr, OrcRes};
use orchid_base::error::{OrcErr, OrcRes, mk_err};
use orchid_base::intern;
use orchid_base::interner::Tok;
use orchid_base::location::Pos;
@@ -11,58 +11,58 @@ use crate::api;
use crate::tree::{GenTok, GenTokTree};
pub fn err_cascade() -> OrcErr {
mk_err(
intern!(str: "An error cascading from a recursive call"),
"This error should not surface. If you are seeing it, something is wrong",
[Pos::None.into()],
)
mk_err(
intern!(str: "An error cascading from a recursive call"),
"This error should not surface. If you are seeing it, something is wrong",
[Pos::None.into()],
)
}
pub fn err_not_applicable() -> OrcErr {
mk_err(
intern!(str: "Pseudo-error to communicate that the current branch in a dispatch doesn't apply"),
&*err_cascade().message,
[Pos::None.into()],
)
mk_err(
intern!(str: "Pseudo-error to communicate that the current branch in a dispatch doesn't apply"),
&*err_cascade().message,
[Pos::None.into()],
)
}
pub struct LexContext<'a> {
pub text: &'a Tok<String>,
pub sys: api::SysId,
pub id: api::ParsId,
pub pos: u32,
pub reqnot: ReqNot<api::ExtMsgSet>,
pub text: &'a Tok<String>,
pub sys: api::SysId,
pub id: api::ParsId,
pub pos: u32,
pub reqnot: ReqNot<api::ExtMsgSet>,
}
impl<'a> LexContext<'a> {
pub fn recurse(&self, tail: &'a str) -> OrcRes<(&'a str, GenTokTree<'a>)> {
let start = self.pos(tail);
let lx =
self.reqnot.request(api::SubLex { pos: start, id: self.id }).ok_or_else(err_cascade)?;
Ok((&self.text[lx.pos as usize..], GenTok::Slot(TokHandle::new(lx.ticket)).at(start..lx.pos)))
}
pub fn recurse(&self, tail: &'a str) -> OrcRes<(&'a str, GenTokTree<'a>)> {
let start = self.pos(tail);
let lx =
self.reqnot.request(api::SubLex { pos: start, id: self.id }).ok_or_else(err_cascade)?;
Ok((&self.text[lx.pos as usize..], GenTok::Slot(TokHandle::new(lx.ticket)).at(start..lx.pos)))
}
pub fn pos(&self, tail: &'a str) -> u32 { (self.text.len() - tail.len()) as u32 }
pub fn pos(&self, tail: &'a str) -> u32 { (self.text.len() - tail.len()) as u32 }
pub fn tok_ran(&self, len: u32, tail: &'a str) -> Range<u32> {
self.pos(tail) - len..self.pos(tail)
}
pub fn tok_ran(&self, len: u32, tail: &'a str) -> Range<u32> {
self.pos(tail) - len..self.pos(tail)
}
}
pub trait Lexer: Send + Sync + Sized + Default + 'static {
const CHAR_FILTER: &'static [RangeInclusive<char>];
fn lex<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)>;
const CHAR_FILTER: &'static [RangeInclusive<char>];
fn lex<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)>;
}
pub trait DynLexer: Send + Sync + 'static {
fn char_filter(&self) -> &'static [RangeInclusive<char>];
fn lex<'a>(&self, tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)>;
fn char_filter(&self) -> &'static [RangeInclusive<char>];
fn lex<'a>(&self, tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)>;
}
impl<T: Lexer> DynLexer for T {
fn char_filter(&self) -> &'static [RangeInclusive<char>] { T::CHAR_FILTER }
fn lex<'a>(&self, tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> {
T::lex(tail, ctx)
}
fn char_filter(&self) -> &'static [RangeInclusive<char>] { T::CHAR_FILTER }
fn lex<'a>(&self, tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> {
T::lex(tail, ctx)
}
}
pub type LexerObj = &'static dyn DynLexer;

View File

@@ -9,10 +9,10 @@ pub mod expr;
pub mod fs;
pub mod func_atom;
pub mod lexer;
pub mod macros;
pub mod msg;
pub mod other_system;
pub mod parser;
pub mod system;
pub mod system_ctor;
pub mod tree;
pub mod macros;

View File

@@ -1,101 +1,111 @@
use std::num::NonZero;
use std::sync::RwLock;
use ahash::HashMap;
use lazy_static::lazy_static;
use never::Never;
use orchid_base::{error::OrcRes, interner::{intern, Tok}, location::Pos, macros::{mtreev_from_api, mtreev_to_api, MTree}, parse::Comment, reqnot::Requester};
use orchid_base::error::OrcRes;
use orchid_base::interner::{Tok, intern};
use orchid_base::location::Pos;
use orchid_base::macros::{MTree, mtreev_from_api, mtreev_to_api};
use orchid_base::parse::Comment;
use orchid_base::reqnot::Requester;
use trait_set::trait_set;
use crate::{api, atom::AtomFactory, lexer::err_cascade, system::SysCtx};
use std::{num::NonZero, sync::RwLock};
use crate::api;
use crate::atom::AtomFactory;
use crate::lexer::err_cascade;
use crate::system::SysCtx;
pub trait Macro {
fn pattern() -> MTree<'static, Never>;
fn apply(binds: HashMap<Tok<String>, MTree<'_, Never>>) -> MTree<'_, AtomFactory>;
fn pattern() -> MTree<'static, Never>;
fn apply(binds: HashMap<Tok<String>, MTree<'_, Never>>) -> MTree<'_, AtomFactory>;
}
pub trait DynMacro {
fn pattern(&self) -> MTree<'static, Never>;
fn apply<'a>(&self, binds: HashMap<Tok<String>, MTree<'a, Never>>) -> MTree<'a, AtomFactory>;
fn pattern(&self) -> MTree<'static, Never>;
fn apply<'a>(&self, binds: HashMap<Tok<String>, MTree<'a, Never>>) -> MTree<'a, AtomFactory>;
}
impl<T: Macro> DynMacro for T {
fn pattern(&self) -> MTree<'static, Never> { Self::pattern() }
fn apply<'a>(&self, binds: HashMap<Tok<String>, MTree<'a, Never>>) -> MTree<'a, AtomFactory> {
Self::apply(binds)
}
fn pattern(&self) -> MTree<'static, Never> { Self::pattern() }
fn apply<'a>(&self, binds: HashMap<Tok<String>, MTree<'a, Never>>) -> MTree<'a, AtomFactory> {
Self::apply(binds)
}
}
pub struct RuleCtx<'a> {
pub(crate) args: HashMap<Tok<String>, Vec<MTree<'a, Never>>>,
pub(crate) run_id: api::ParsId,
pub(crate) sys: SysCtx,
pub(crate) args: HashMap<Tok<String>, Vec<MTree<'a, Never>>>,
pub(crate) run_id: api::ParsId,
pub(crate) sys: SysCtx,
}
impl<'a> RuleCtx<'a> {
pub fn recurse(&mut self, tree: &[MTree<'a, Never>]) -> OrcRes<Vec<MTree<'a, Never>>> {
let req = api::RunMacros{
run_id: self.run_id,
query: mtreev_to_api(tree, &mut |b| match *b {})
};
Ok(mtreev_from_api(
&self.sys.reqnot.request(req).ok_or_else(err_cascade)?,
&mut |_| panic!("Returned atom from Rule recursion")
))
}
pub fn getv(&mut self, key: &Tok<String>) -> Vec<MTree<'a, Never>> {
self.args.remove(key).expect("Key not found")
}
pub fn gets(&mut self, key: &Tok<String>) -> MTree<'a, Never> {
let v = self.getv(key);
assert!(v.len() == 1, "Not a scalar");
v.into_iter().next().unwrap()
}
pub fn unused_arg<'b>(&mut self, keys: impl IntoIterator<Item = &'b Tok<String>>) {
keys.into_iter().for_each(|k| {self.getv(k);});
}
pub fn recurse(&mut self, tree: &[MTree<'a, Never>]) -> OrcRes<Vec<MTree<'a, Never>>> {
let req =
api::RunMacros { run_id: self.run_id, query: mtreev_to_api(tree, &mut |b| match *b {}) };
Ok(mtreev_from_api(&self.sys.reqnot.request(req).ok_or_else(err_cascade)?, &mut |_| {
panic!("Returned atom from Rule recursion")
}))
}
pub fn getv(&mut self, key: &Tok<String>) -> Vec<MTree<'a, Never>> {
self.args.remove(key).expect("Key not found")
}
pub fn gets(&mut self, key: &Tok<String>) -> MTree<'a, Never> {
let v = self.getv(key);
assert!(v.len() == 1, "Not a scalar");
v.into_iter().next().unwrap()
}
pub fn unused_arg<'b>(&mut self, keys: impl IntoIterator<Item = &'b Tok<String>>) {
keys.into_iter().for_each(|k| {
self.getv(k);
});
}
}
trait_set! {
pub trait RuleCB = for<'a> Fn(RuleCtx<'a>) -> OrcRes<Vec<MTree<'a, AtomFactory>>> + Send + Sync;
pub trait RuleCB = for<'a> Fn(RuleCtx<'a>) -> OrcRes<Vec<MTree<'a, AtomFactory>>> + Send + Sync;
}
lazy_static!{
static ref RULES: RwLock<HashMap<api::MacroId, Box<dyn RuleCB>>> = RwLock::default();
lazy_static! {
static ref RULES: RwLock<HashMap<api::MacroId, Box<dyn RuleCB>>> = RwLock::default();
}
pub struct Rule {
pub(crate) comments: Vec<Comment>,
pub(crate) pattern: Vec<MTree<'static, Never>>,
pub(crate) id: api::MacroId,
pub(crate) comments: Vec<Comment>,
pub(crate) pattern: Vec<MTree<'static, Never>>,
pub(crate) id: api::MacroId,
}
impl Rule {
pub(crate) fn to_api(&self) -> api::MacroRule {
api::MacroRule {
comments: self.comments.iter().map(|c| c.to_api()).collect(),
location: api::Location::Inherit,
pattern: mtreev_to_api(&self.pattern, &mut |b| match *b {}),
id: self.id,
}
}
pub(crate) fn to_api(&self) -> api::MacroRule {
api::MacroRule {
comments: self.comments.iter().map(|c| c.to_api()).collect(),
location: api::Location::Inherit,
pattern: mtreev_to_api(&self.pattern, &mut |b| match *b {}),
id: self.id,
}
}
}
pub fn rule_cmt<'a>(
cmt: impl IntoIterator<Item = &'a str>,
pattern: Vec<MTree<'static, Never>>,
apply: impl RuleCB + 'static
cmt: impl IntoIterator<Item = &'a str>,
pattern: Vec<MTree<'static, Never>>,
apply: impl RuleCB + 'static,
) -> Rule {
let mut rules = RULES.write().unwrap();
let id = api::MacroId(NonZero::new(rules.len() as u64 + 1).unwrap());
rules.insert(id, Box::new(apply));
let comments = cmt.into_iter().map(|s| Comment { pos: Pos::Inherit, text: intern(s) }).collect();
Rule { comments, pattern, id }
let mut rules = RULES.write().unwrap();
let id = api::MacroId(NonZero::new(rules.len() as u64 + 1).unwrap());
rules.insert(id, Box::new(apply));
let comments = cmt.into_iter().map(|s| Comment { pos: Pos::Inherit, text: intern(s) }).collect();
Rule { comments, pattern, id }
}
pub fn rule(pattern: Vec<MTree<'static, Never>>, apply: impl RuleCB + 'static) -> Rule {
rule_cmt([], pattern, apply)
rule_cmt([], pattern, apply)
}
pub(crate) fn apply_rule(
id: api::MacroId,
ctx: RuleCtx<'static>
id: api::MacroId,
ctx: RuleCtx<'static>,
) -> OrcRes<Vec<MTree<'static, AtomFactory>>> {
let rules = RULES.read().unwrap();
rules[&id](ctx)
}
let rules = RULES.read().unwrap();
rules[&id](ctx)
}

View File

@@ -5,32 +5,32 @@ use crate::api;
use crate::system::{DynSystemCard, SystemCard};
pub struct SystemHandle<C: SystemCard> {
pub(crate) _card: PhantomData<C>,
pub(crate) id: api::SysId,
pub(crate) _card: PhantomData<C>,
pub(crate) id: api::SysId,
}
impl<C: SystemCard> SystemHandle<C> {
pub(crate) fn new(id: api::SysId) -> Self { Self { _card: PhantomData, id } }
pub fn id(&self) -> api::SysId { self.id }
pub(crate) fn new(id: api::SysId) -> Self { Self { _card: PhantomData, id } }
pub fn id(&self) -> api::SysId { self.id }
}
impl<C: SystemCard> Clone for SystemHandle<C> {
fn clone(&self) -> Self { Self::new(self.id) }
fn clone(&self) -> Self { Self::new(self.id) }
}
pub trait DynSystemHandle {
fn id(&self) -> api::SysId;
fn get_card(&self) -> &dyn DynSystemCard;
fn id(&self) -> api::SysId;
fn get_card(&self) -> &dyn DynSystemCard;
}
pub fn leak_card<T: Default>() -> &'static T {
const {
if 0 != size_of::<T>() {
panic!("Attempted to leak positively sized Card. Card types must always be zero-sized");
}
}
Box::leak(Box::default())
const {
if 0 != size_of::<T>() {
panic!("Attempted to leak positively sized Card. Card types must always be zero-sized");
}
}
Box::leak(Box::default())
}
impl<C: SystemCard> DynSystemHandle for SystemHandle<C> {
fn id(&self) -> api::SysId { self.id }
fn get_card(&self) -> &'static dyn DynSystemCard { leak_card::<C>() }
fn id(&self) -> api::SysId { self.id }
fn get_card(&self) -> &'static dyn DynSystemCard { leak_card::<C>() }
}

View File

@@ -7,34 +7,34 @@ use crate::tree::GenTokTree;
pub type GenSnippet<'a> = Snippet<'a, 'a, ForeignAtom<'a>, AtomFactory>;
pub trait Parser: Send + Sync + Sized + Default + 'static {
const LINE_HEAD: &'static str;
fn parse(
exported: bool,
comments: Vec<Comment>,
line: GenSnippet<'_>,
) -> OrcRes<Vec<GenTokTree<'_>>>;
const LINE_HEAD: &'static str;
fn parse(
exported: bool,
comments: Vec<Comment>,
line: GenSnippet<'_>,
) -> OrcRes<Vec<GenTokTree<'_>>>;
}
pub trait DynParser: Send + Sync + 'static {
fn line_head(&self) -> &'static str;
fn parse<'a>(
&self,
exported: bool,
comments: Vec<Comment>,
line: GenSnippet<'a>,
) -> OrcRes<Vec<GenTokTree<'a>>>;
fn line_head(&self) -> &'static str;
fn parse<'a>(
&self,
exported: bool,
comments: Vec<Comment>,
line: GenSnippet<'a>,
) -> OrcRes<Vec<GenTokTree<'a>>>;
}
impl<T: Parser> DynParser for T {
fn line_head(&self) -> &'static str { Self::LINE_HEAD }
fn parse<'a>(
&self,
exported: bool,
comments: Vec<Comment>,
line: GenSnippet<'a>,
) -> OrcRes<Vec<GenTokTree<'a>>> {
Self::parse(exported, comments, line)
}
fn line_head(&self) -> &'static str { Self::LINE_HEAD }
fn parse<'a>(
&self,
exported: bool,
comments: Vec<Comment>,
line: GenSnippet<'a>,
) -> OrcRes<Vec<GenTokTree<'a>>> {
Self::parse(exported, comments, line)
}
}
pub type ParserObj = &'static dyn DynParser;

View File

@@ -10,11 +10,10 @@ use orchid_base::logging::Logger;
use orchid_base::reqnot::{Receipt, ReqNot};
use crate::api;
use crate::atom::{get_info, AtomCtx, AtomDynfo, AtomicFeatures, ForeignAtom, TypAtom};
use crate::atom::{AtomCtx, AtomDynfo, AtomicFeatures, ForeignAtom, TypAtom, get_info};
use crate::entrypoint::ExtReq;
use crate::fs::DeclFs;
use crate::func_atom::Fun;
// use crate::fun::Fun;
use crate::lexer::LexerObj;
use crate::parser::ParserObj;
use crate::system_ctor::{CtedObj, SystemCtor};
@@ -22,118 +21,115 @@ use crate::tree::MemKind;
/// System as consumed by foreign code
pub trait SystemCard: Default + Send + Sync + 'static {
type Ctor: SystemCtor;
type Req: Coding;
fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>>;
type Ctor: SystemCtor;
type Req: Coding;
fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>>;
}
pub trait DynSystemCard: Send + Sync + 'static {
fn name(&self) -> &'static str;
/// Atoms explicitly defined by the system card. Do not rely on this for
/// querying atoms as it doesn't include the general atom types
fn atoms(&self) -> BoxedIter<Option<Box<dyn AtomDynfo>>>;
fn name(&self) -> &'static str;
/// Atoms explicitly defined by the system card. Do not rely on this for
/// querying atoms as it doesn't include the general atom types
fn atoms(&self) -> BoxedIter<Option<Box<dyn AtomDynfo>>>;
}
/// Atoms supported by this package which may appear in all extensions.
/// The indices of these are bitwise negated, such that the MSB of an atom index
/// marks whether it belongs to this package (0) or the importer (1)
fn general_atoms() -> impl Iterator<Item = Option<Box<dyn AtomDynfo>>> {
[Some(Fun::dynfo())].into_iter()
[Some(Fun::dynfo())].into_iter()
}
pub fn atom_info_for(
sys: &(impl DynSystemCard + ?Sized),
tid: TypeId,
sys: &(impl DynSystemCard + ?Sized),
tid: TypeId,
) -> Option<(api::AtomId, Box<dyn AtomDynfo>)> {
(sys.atoms().enumerate().map(|(i, o)| (NonZero::new(i as u64 + 1).unwrap(), o)))
.chain(general_atoms().enumerate().map(|(i, o)| (NonZero::new(!(i as u64)).unwrap(), o)))
.filter_map(|(i, o)| o.map(|a| (api::AtomId(i), a)))
.find(|ent| ent.1.tid() == tid)
(sys.atoms().enumerate().map(|(i, o)| (NonZero::new(i as u64 + 1).unwrap(), o)))
.chain(general_atoms().enumerate().map(|(i, o)| (NonZero::new(!(i as u64)).unwrap(), o)))
.filter_map(|(i, o)| o.map(|a| (api::AtomId(i), a)))
.find(|ent| ent.1.tid() == tid)
}
pub fn atom_by_idx(
sys: &(impl DynSystemCard + ?Sized),
tid: api::AtomId,
sys: &(impl DynSystemCard + ?Sized),
tid: api::AtomId,
) -> Option<Box<dyn AtomDynfo>> {
if (u64::from(tid.0) >> (u64::BITS - 1)) & 1 == 1 {
general_atoms().nth(!u64::from(tid.0) as usize).unwrap()
} else {
sys.atoms().nth(u64::from(tid.0) as usize - 1).unwrap()
}
if (u64::from(tid.0) >> (u64::BITS - 1)) & 1 == 1 {
general_atoms().nth(!u64::from(tid.0) as usize).unwrap()
} else {
sys.atoms().nth(u64::from(tid.0) as usize - 1).unwrap()
}
}
pub fn resolv_atom(
sys: &(impl DynSystemCard + ?Sized),
atom: &api::Atom,
) -> Box<dyn AtomDynfo> {
let tid = api::AtomId::decode(&mut &atom.data[..8]);
atom_by_idx(sys, tid).expect("Value of nonexistent type found")
pub fn resolv_atom(sys: &(impl DynSystemCard + ?Sized), atom: &api::Atom) -> Box<dyn AtomDynfo> {
let tid = api::AtomId::decode(&mut &atom.data[..8]);
atom_by_idx(sys, tid).expect("Value of nonexistent type found")
}
impl<T: SystemCard> DynSystemCard for T {
fn name(&self) -> &'static str { T::Ctor::NAME }
fn atoms(&self) -> BoxedIter<Option<Box<dyn AtomDynfo>>> { Box::new(Self::atoms().into_iter()) }
fn name(&self) -> &'static str { T::Ctor::NAME }
fn atoms(&self) -> BoxedIter<Option<Box<dyn AtomDynfo>>> { Box::new(Self::atoms().into_iter()) }
}
/// System as defined by author
pub trait System: Send + Sync + SystemCard + 'static {
fn env() -> Vec<(Tok<String>, MemKind)>;
fn vfs() -> DeclFs;
fn lexers() -> Vec<LexerObj>;
fn parsers() -> Vec<ParserObj>;
fn request(hand: ExtReq, req: Self::Req) -> Receipt;
fn env() -> Vec<(Tok<String>, MemKind)>;
fn vfs() -> DeclFs;
fn lexers() -> Vec<LexerObj>;
fn parsers() -> Vec<ParserObj>;
fn request(hand: ExtReq, req: Self::Req) -> Receipt;
}
pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
fn dyn_env(&self) -> HashMap<Tok<String>, MemKind>;
fn dyn_vfs(&self) -> DeclFs;
fn dyn_lexers(&self) -> Vec<LexerObj>;
fn dyn_parsers(&self) -> Vec<ParserObj>;
fn dyn_request(&self, hand: ExtReq, req: Vec<u8>) -> Receipt;
fn card(&self) -> &dyn DynSystemCard;
fn dyn_env(&self) -> HashMap<Tok<String>, MemKind>;
fn dyn_vfs(&self) -> DeclFs;
fn dyn_lexers(&self) -> Vec<LexerObj>;
fn dyn_parsers(&self) -> Vec<ParserObj>;
fn dyn_request(&self, hand: ExtReq, req: Vec<u8>) -> Receipt;
fn card(&self) -> &dyn DynSystemCard;
}
impl<T: System> DynSystem for T {
fn dyn_env(&self) -> HashMap<Tok<String>, MemKind> { Self::env().into_iter().collect() }
fn dyn_vfs(&self) -> DeclFs { Self::vfs() }
fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() }
fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() }
fn dyn_request(&self, hand: ExtReq, req: Vec<u8>) -> Receipt {
Self::request(hand, <Self as SystemCard>::Req::decode(&mut &req[..]))
}
fn card(&self) -> &dyn DynSystemCard { self }
fn dyn_env(&self) -> HashMap<Tok<String>, MemKind> { Self::env().into_iter().collect() }
fn dyn_vfs(&self) -> DeclFs { Self::vfs() }
fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() }
fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() }
fn dyn_request(&self, hand: ExtReq, req: Vec<u8>) -> Receipt {
Self::request(hand, <Self as SystemCard>::Req::decode(&mut &req[..]))
}
fn card(&self) -> &dyn DynSystemCard { self }
}
pub fn downcast_atom<A: AtomicFeatures>(foreign: ForeignAtom) -> Result<TypAtom<A>, ForeignAtom> {
let mut data = &foreign.atom.data[..];
let ctx = foreign.ctx.clone();
let info_ent = (ctx.cted.deps().find(|s| s.id() == foreign.atom.owner))
.map(|sys| get_info::<A>(sys.get_card()))
.filter(|(pos, _)| api::AtomId::decode(&mut data) == *pos);
match info_ent {
None => Err(foreign),
Some((_, info)) => {
let val = info.decode(AtomCtx(data, foreign.atom.drop, ctx));
let value = *val.downcast::<A::Data>().expect("atom decode returned wrong type");
Ok(TypAtom { value, data: foreign })
},
}
let mut data = &foreign.atom.data[..];
let ctx = foreign.ctx.clone();
let info_ent = (ctx.cted.deps().find(|s| s.id() == foreign.atom.owner))
.map(|sys| get_info::<A>(sys.get_card()))
.filter(|(pos, _)| api::AtomId::decode(&mut data) == *pos);
match info_ent {
None => Err(foreign),
Some((_, info)) => {
let val = info.decode(AtomCtx(data, foreign.atom.drop, ctx));
let value = *val.downcast::<A::Data>().expect("atom decode returned wrong type");
Ok(TypAtom { value, data: foreign })
},
}
}
#[derive(Clone)]
pub struct SysCtx {
pub reqnot: ReqNot<api::ExtMsgSet>,
pub id: api::SysId,
pub cted: CtedObj,
pub logger: Arc<Logger>,
pub reqnot: ReqNot<api::ExtMsgSet>,
pub id: api::SysId,
pub cted: CtedObj,
pub logger: Arc<Logger>,
}
impl SysCtx {
pub fn new(
id: api::SysId,
cted: &CtedObj,
logger: &Arc<Logger>,
reqnot: ReqNot<api::ExtMsgSet>,
) -> Self {
Self { cted: cted.clone(), id, logger: logger.clone(), reqnot }
}
pub fn new(
id: api::SysId,
cted: &CtedObj,
logger: &Arc<Logger>,
reqnot: ReqNot<api::ExtMsgSet>,
) -> Self {
Self { cted: cted.clone(), id, logger: logger.clone(), reqnot }
}
}

View File

@@ -1,7 +1,7 @@
use std::any::Any;
use std::sync::Arc;
use orchid_base::boxed_iter::{box_empty, box_once, BoxedIter};
use orchid_base::boxed_iter::{BoxedIter, box_empty, box_once};
use ordered_float::NotNan;
use crate::api;
@@ -9,94 +9,94 @@ use crate::other_system::{DynSystemHandle, SystemHandle};
use crate::system::{DynSystem, System, SystemCard};
pub struct Cted<Ctor: SystemCtor + ?Sized> {
pub deps: <Ctor::Deps as DepDef>::Sat,
pub inst: Arc<Ctor::Instance>,
pub deps: <Ctor::Deps as DepDef>::Sat,
pub inst: Arc<Ctor::Instance>,
}
impl<C: SystemCtor + ?Sized> Clone for Cted<C> {
fn clone(&self) -> Self { Self { deps: self.deps.clone(), inst: self.inst.clone() } }
fn clone(&self) -> Self { Self { deps: self.deps.clone(), inst: self.inst.clone() } }
}
pub trait DynCted: Send + Sync + 'static {
fn as_any(&self) -> &dyn Any;
fn deps<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)>;
fn inst(&self) -> Arc<dyn DynSystem>;
fn as_any(&self) -> &dyn Any;
fn deps<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)>;
fn inst(&self) -> Arc<dyn DynSystem>;
}
impl<C: SystemCtor + ?Sized> DynCted for Cted<C> {
fn as_any(&self) -> &dyn Any { self }
fn deps<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)> { self.deps.iter() }
fn inst(&self) -> Arc<dyn DynSystem> { self.inst.clone() }
fn as_any(&self) -> &dyn Any { self }
fn deps<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)> { self.deps.iter() }
fn inst(&self) -> Arc<dyn DynSystem> { self.inst.clone() }
}
pub type CtedObj = Arc<dyn DynCted>;
pub trait DepSat: Clone + Send + Sync + 'static {
fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)>;
fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)>;
}
pub trait DepDef {
type Sat: DepSat;
fn report(names: &mut impl FnMut(&'static str));
fn create(take: &mut impl FnMut() -> api::SysId) -> Self::Sat;
type Sat: DepSat;
fn report(names: &mut impl FnMut(&'static str));
fn create(take: &mut impl FnMut() -> api::SysId) -> Self::Sat;
}
impl<T: SystemCard> DepSat for SystemHandle<T> {
fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)> { box_once(self) }
fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)> { box_once(self) }
}
impl<T: SystemCard> DepDef for T {
type Sat = SystemHandle<Self>;
fn report(names: &mut impl FnMut(&'static str)) { names(T::Ctor::NAME) }
fn create(take: &mut impl FnMut() -> api::SysId) -> Self::Sat { SystemHandle::new(take()) }
type Sat = SystemHandle<Self>;
fn report(names: &mut impl FnMut(&'static str)) { names(T::Ctor::NAME) }
fn create(take: &mut impl FnMut() -> api::SysId) -> Self::Sat { SystemHandle::new(take()) }
}
impl DepSat for () {
fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)> { box_empty() }
fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)> { box_empty() }
}
impl DepDef for () {
type Sat = ();
fn create(_: &mut impl FnMut() -> api::SysId) -> Self::Sat {}
fn report(_: &mut impl FnMut(&'static str)) {}
type Sat = ();
fn create(_: &mut impl FnMut() -> api::SysId) -> Self::Sat {}
fn report(_: &mut impl FnMut(&'static str)) {}
}
pub trait SystemCtor: Send + Sync + 'static {
type Deps: DepDef;
type Instance: System;
const NAME: &'static str;
const VERSION: f64;
fn inst() -> Option<Self::Instance>;
type Deps: DepDef;
type Instance: System;
const NAME: &'static str;
const VERSION: f64;
fn inst() -> Option<Self::Instance>;
}
pub trait DynSystemCtor: Send + Sync + 'static {
fn decl(&self, id: api::SysDeclId) -> api::SystemDecl;
fn new_system(&self, new: &api::NewSystem) -> CtedObj;
fn decl(&self, id: api::SysDeclId) -> api::SystemDecl;
fn new_system(&self, new: &api::NewSystem) -> CtedObj;
}
impl<T: SystemCtor> DynSystemCtor for T {
fn decl(&self, id: api::SysDeclId) -> api::SystemDecl {
// Version is equivalent to priority for all practical purposes
let priority = NotNan::new(T::VERSION).unwrap();
// aggregate depends names
let mut depends = Vec::new();
T::Deps::report(&mut |n| depends.push(n.to_string()));
api::SystemDecl { name: T::NAME.to_string(), depends, id, priority }
}
fn new_system(&self, api::NewSystem { system: _, id: _, depends }: &api::NewSystem) -> CtedObj {
let mut ids = depends.iter().copied();
let inst = Arc::new(T::inst().expect("Constructor did not create system"));
let deps = T::Deps::create(&mut || ids.next().unwrap());
Arc::new(Cted::<T> { deps, inst })
}
fn decl(&self, id: api::SysDeclId) -> api::SystemDecl {
// Version is equivalent to priority for all practical purposes
let priority = NotNan::new(T::VERSION).unwrap();
// aggregate depends names
let mut depends = Vec::new();
T::Deps::report(&mut |n| depends.push(n.to_string()));
api::SystemDecl { name: T::NAME.to_string(), depends, id, priority }
}
fn new_system(&self, api::NewSystem { system: _, id: _, depends }: &api::NewSystem) -> CtedObj {
let mut ids = depends.iter().copied();
let inst = Arc::new(T::inst().expect("Constructor did not create system"));
let deps = T::Deps::create(&mut || ids.next().unwrap());
Arc::new(Cted::<T> { deps, inst })
}
}
mod dep_set_tuple_impls {
use orchid_base::box_chain;
use orchid_base::boxed_iter::BoxedIter;
use paste::paste;
use orchid_base::box_chain;
use orchid_base::boxed_iter::BoxedIter;
use paste::paste;
use super::{DepDef, DepSat};
use crate::api;
use crate::system_ctor::DynSystemHandle;
use super::{DepDef, DepSat};
use crate::api;
use crate::system_ctor::DynSystemHandle;
macro_rules! dep_set_tuple_impl {
macro_rules! dep_set_tuple_impl {
($($name:ident),*) => {
impl<$( $name :DepSat ),*> DepSat for ( $( $name , )* ) {
fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)> {
@@ -137,20 +137,20 @@ mod dep_set_tuple_impls {
};
}
dep_set_tuple_impl!(A);
dep_set_tuple_impl!(A, B); // 2
dep_set_tuple_impl!(A, B, C);
dep_set_tuple_impl!(A, B, C, D); // 4
dep_set_tuple_impl!(A, B, C, D, E);
dep_set_tuple_impl!(A, B, C, D, E, F);
dep_set_tuple_impl!(A, B, C, D, E, F, G);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H); // 8
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L); // 12
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M, N);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P); // 16
dep_set_tuple_impl!(A);
dep_set_tuple_impl!(A, B); // 2
dep_set_tuple_impl!(A, B, C);
dep_set_tuple_impl!(A, B, C, D); // 4
dep_set_tuple_impl!(A, B, C, D, E);
dep_set_tuple_impl!(A, B, C, D, E, F);
dep_set_tuple_impl!(A, B, C, D, E, F, G);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H); // 8
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L); // 12
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M, N);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P); // 16
}

View File

@@ -15,7 +15,7 @@ use trait_set::trait_set;
use crate::api;
use crate::atom::{AtomFactory, ForeignAtom};
use crate::conv::{ToExpr, TryFromExpr};
use crate::conv::ToExpr;
use crate::entrypoint::MemberRecord;
use crate::expr::Expr;
use crate::func_atom::{ExprFunc, Fun};
@@ -26,169 +26,169 @@ pub type GenTokTree<'a> = TokTree<'a, ForeignAtom<'a>, AtomFactory>;
pub type GenTok<'a> = Token<'a, ForeignAtom<'a>, AtomFactory>;
pub fn do_extra(f: &AtomFactory, r: Range<u32>, ctx: SysCtx) -> api::TokenTree {
api::TokenTree { range: r, token: api::Token::Atom(f.clone().build(ctx)) }
api::TokenTree { range: r, token: api::Token::Atom(f.clone().build(ctx)) }
}
fn with_export(mem: GenMember, public: bool) -> Vec<GenItem> {
(public.then(|| GenItemKind::Export(mem.name.clone()).at(Pos::Inherit)).into_iter())
.chain([GenItemKind::Member(mem).at(Pos::Inherit)])
.collect()
(public.then(|| GenItemKind::Export(mem.name.clone()).at(Pos::Inherit)).into_iter())
.chain([GenItemKind::Member(mem).at(Pos::Inherit)])
.collect()
}
pub struct GenItem {
pub kind: GenItemKind,
pub comments: Vec<Comment>,
pub pos: Pos,
pub kind: GenItemKind,
pub comments: Vec<Comment>,
pub pos: Pos,
}
impl GenItem {
pub fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::Item {
let kind = match self.kind {
GenItemKind::Export(n) => api::ItemKind::Export(n.to_api()),
GenItemKind::Member(mem) => api::ItemKind::Member(mem.into_api(ctx)),
GenItemKind::Import(cn) => api::ItemKind::Import(cn.tok().to_api()),
GenItemKind::Macro(prio, rules) => api::ItemKind::Macro(api::MacroBlock {
priority: prio,
rules: rules.into_iter().map(|r| r.to_api()).collect_vec(),
}),
};
let comments = self.comments.into_iter().map(|c| c.to_api()).collect_vec();
api::Item { location: self.pos.to_api(), comments, kind }
}
pub fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::Item {
let kind = match self.kind {
GenItemKind::Export(n) => api::ItemKind::Export(n.to_api()),
GenItemKind::Member(mem) => api::ItemKind::Member(mem.into_api(ctx)),
GenItemKind::Import(cn) => api::ItemKind::Import(cn.tok().to_api()),
GenItemKind::Macro(prio, rules) => api::ItemKind::Macro(api::MacroBlock {
priority: prio,
rules: rules.into_iter().map(|r| r.to_api()).collect_vec(),
}),
};
let comments = self.comments.into_iter().map(|c| c.to_api()).collect_vec();
api::Item { location: self.pos.to_api(), comments, kind }
}
}
pub fn cnst(public: bool, name: &str, value: impl ToExpr) -> Vec<GenItem> {
with_export(GenMember { name: intern(name), kind: MemKind::Const(value.to_expr()) }, public)
with_export(GenMember { name: intern(name), kind: MemKind::Const(value.to_expr()) }, public)
}
pub fn module(
public: bool,
name: &str,
imports: impl IntoIterator<Item = Sym>,
items: impl IntoIterator<Item = Vec<GenItem>>,
public: bool,
name: &str,
imports: impl IntoIterator<Item = Sym>,
items: impl IntoIterator<Item = Vec<GenItem>>,
) -> Vec<GenItem> {
let (name, kind) = root_mod(name, imports, items);
with_export(GenMember { name, kind }, public)
let (name, kind) = root_mod(name, imports, items);
with_export(GenMember { name, kind }, public)
}
pub fn root_mod(
name: &str,
imports: impl IntoIterator<Item = Sym>,
items: impl IntoIterator<Item = Vec<GenItem>>,
name: &str,
imports: impl IntoIterator<Item = Sym>,
items: impl IntoIterator<Item = Vec<GenItem>>,
) -> (Tok<String>, MemKind) {
let kind = MemKind::Mod {
imports: imports.into_iter().collect(),
items: items.into_iter().flatten().collect(),
};
(intern(name), kind)
let kind = MemKind::Mod {
imports: imports.into_iter().collect(),
items: items.into_iter().flatten().collect(),
};
(intern(name), kind)
}
pub fn fun<I, O>(exported: bool, name: &str, xf: impl ExprFunc<I, O>) -> Vec<GenItem> {
let fac = LazyMemberFactory::new(move |sym| MemKind::Const(Fun::new(sym, xf).to_expr()));
with_export(GenMember { name: intern(name), kind: MemKind::Lazy(fac) }, exported)
let fac = LazyMemberFactory::new(move |sym| MemKind::Const(Fun::new(sym, xf).to_expr()));
with_export(GenMember { name: intern(name), kind: MemKind::Lazy(fac) }, exported)
}
pub fn macro_block(prio: Option<f64>, rules: impl IntoIterator<Item = Rule>) -> Vec<GenItem> {
let prio = prio.map(|p| NotNan::new(p).unwrap());
vec![GenItemKind::Macro(prio, rules.into_iter().collect_vec()).gen()]
let prio = prio.map(|p| NotNan::new(p).unwrap());
vec![GenItemKind::Macro(prio, rules.into_iter().collect_vec()).gen()]
}
pub fn comments<'a>(
cmts: impl IntoIterator<Item = &'a str> + Clone,
mut val: Vec<GenItem>,
cmts: impl IntoIterator<Item = &'a str> + Clone,
mut val: Vec<GenItem>,
) -> Vec<GenItem> {
for v in val.iter_mut() {
v.comments
.extend(cmts.clone().into_iter().map(|c| Comment { text: intern(c), pos: Pos::Inherit }));
}
val
for v in val.iter_mut() {
v.comments
.extend(cmts.clone().into_iter().map(|c| Comment { text: intern(c), pos: Pos::Inherit }));
}
val
}
trait_set! {
trait LazyMemberCallback = FnOnce(Sym) -> MemKind + Send + Sync + DynClone
trait LazyMemberCallback = FnOnce(Sym) -> MemKind + Send + Sync + DynClone
}
pub struct LazyMemberFactory(Box<dyn LazyMemberCallback>);
impl LazyMemberFactory {
pub fn new(cb: impl FnOnce(Sym) -> MemKind + Send + Sync + Clone + 'static) -> Self {
Self(Box::new(cb))
}
pub fn build(self, path: Sym) -> MemKind { (self.0)(path) }
pub fn new(cb: impl FnOnce(Sym) -> MemKind + Send + Sync + Clone + 'static) -> Self {
Self(Box::new(cb))
}
pub fn build(self, path: Sym) -> MemKind { (self.0)(path) }
}
impl Clone for LazyMemberFactory {
fn clone(&self) -> Self { Self(clone_box(&*self.0)) }
fn clone(&self) -> Self { Self(clone_box(&*self.0)) }
}
pub enum GenItemKind {
Member(GenMember),
Export(Tok<String>),
Import(Sym),
Macro(Option<NotNan<f64>>, Vec<Rule>),
Member(GenMember),
Export(Tok<String>),
Import(Sym),
Macro(Option<NotNan<f64>>, Vec<Rule>),
}
impl GenItemKind {
pub fn at(self, pos: Pos) -> GenItem { GenItem { kind: self, comments: vec![], pos } }
pub fn gen(self) -> GenItem { GenItem { kind: self, comments: vec![], pos: Pos::Inherit } }
pub fn gen_equiv(self, comments: Vec<Comment>) -> GenItem {
GenItem { kind: self, comments, pos: Pos::Inherit }
}
pub fn at(self, pos: Pos) -> GenItem { GenItem { kind: self, comments: vec![], pos } }
pub fn gen(self) -> GenItem { GenItem { kind: self, comments: vec![], pos: Pos::Inherit } }
pub fn gen_equiv(self, comments: Vec<Comment>) -> GenItem {
GenItem { kind: self, comments, pos: Pos::Inherit }
}
}
pub struct GenMember {
name: Tok<String>,
kind: MemKind,
name: Tok<String>,
kind: MemKind,
}
impl GenMember {
pub fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::Member {
api::Member {
name: self.name.to_api(),
kind: self.kind.into_api(&mut ctx.push_path(self.name)),
}
}
pub fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::Member {
api::Member {
name: self.name.to_api(),
kind: self.kind.into_api(&mut ctx.push_path(self.name)),
}
}
}
pub enum MemKind {
Const(Expr),
Mod { imports: Vec<Sym>, items: Vec<GenItem> },
Lazy(LazyMemberFactory),
Const(Expr),
Mod { imports: Vec<Sym>, items: Vec<GenItem> },
Lazy(LazyMemberFactory),
}
impl MemKind {
pub fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::MemberKind {
match self {
Self::Lazy(lazy) => api::MemberKind::Lazy(ctx.with_lazy(lazy)),
Self::Const(c) =>
api::MemberKind::Const(c.api_return(ctx.sys(), &mut |_| panic!("Slot found in const tree"))),
Self::Mod { imports, items } => api::MemberKind::Module(api::Module {
items: (imports.into_iter())
.map(|t| GenItemKind::Import(t).gen())
.chain(items)
.map(|i| i.into_api(ctx))
.collect_vec(),
}),
}
}
pub fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::MemberKind {
match self {
Self::Lazy(lazy) => api::MemberKind::Lazy(ctx.with_lazy(lazy)),
Self::Const(c) =>
api::MemberKind::Const(c.api_return(ctx.sys(), &mut |_| panic!("Slot in const tree"))),
Self::Mod { imports, items } => api::MemberKind::Module(api::Module {
items: (imports.into_iter())
.map(|t| GenItemKind::Import(t).gen())
.chain(items)
.map(|i| i.into_api(ctx))
.collect_vec(),
}),
}
}
}
pub trait TreeIntoApiCtx {
fn sys(&self) -> SysCtx;
fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId;
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx;
fn sys(&self) -> SysCtx;
fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId;
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx;
}
pub struct TIACtxImpl<'a, 'b> {
pub sys: SysCtx,
pub basepath: &'a [Tok<String>],
pub path: Substack<'a, Tok<String>>,
pub lazy: &'b mut HashMap<api::TreeId, MemberRecord>,
pub sys: SysCtx,
pub basepath: &'a [Tok<String>],
pub path: Substack<'a, Tok<String>>,
pub lazy: &'b mut HashMap<api::TreeId, MemberRecord>,
}
impl<'a, 'b> TreeIntoApiCtx for TIACtxImpl<'a, 'b> {
fn sys(&self) -> SysCtx { self.sys.clone() }
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx {
TIACtxImpl {
sys: self.sys.clone(),
lazy: self.lazy,
basepath: self.basepath,
path: self.path.push(seg),
}
}
fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId {
let id = api::TreeId(NonZero::new((self.lazy.len() + 2) as u64).unwrap());
let path = Sym::new(self.basepath.iter().cloned().chain(self.path.unreverse())).unwrap();
self.lazy.insert(id, MemberRecord::Gen(path, fac));
id
}
fn sys(&self) -> SysCtx { self.sys.clone() }
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx {
TIACtxImpl {
sys: self.sys.clone(),
lazy: self.lazy,
basepath: self.basepath,
path: self.path.push(seg),
}
}
fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId {
let id = api::TreeId(NonZero::new((self.lazy.len() + 2) as u64).unwrap());
let path = Sym::new(self.basepath.iter().cloned().chain(self.path.unreverse())).unwrap();
self.lazy.insert(id, MemberRecord::Gen(path, fac));
id
}
}

View File

@@ -9,13 +9,13 @@ edition = "2021"
derive_destructure = "1.0.0"
hashbrown = "0.15.2"
itertools = "0.14.0"
lazy_static = "1.4.0"
lazy_static = "1.5.0"
never = "0.1.0"
num-traits = "0.2.19"
orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
orchid-base = { version = "0.1.0", path = "../orchid-base" }
ordered-float = "4.2.0"
ordered-float = "4.6.0"
paste = "1.0.15"
substack = "1.1.1"
trait-set = "0.3.0"

View File

@@ -4,41 +4,41 @@ use std::{fmt, io, mem, process};
use orchid_base::msg::{recv_msg, send_msg};
pub struct SharedChild {
child: process::Child,
stdin: Mutex<process::ChildStdin>,
stdout: Mutex<process::ChildStdout>,
debug: Option<(String, Mutex<Box<dyn fmt::Write>>)>,
child: process::Child,
stdin: Mutex<process::ChildStdin>,
stdout: Mutex<process::ChildStdout>,
debug: Option<(String, Mutex<Box<dyn fmt::Write>>)>,
}
impl SharedChild {
pub fn new(
command: &mut process::Command,
debug: Option<(&str, impl fmt::Write + 'static)>,
) -> io::Result<Self> {
let mut child =
command.stdin(process::Stdio::piped()).stdout(process::Stdio::piped()).spawn()?;
let stdin = Mutex::new(child.stdin.take().expect("Piped stdin above"));
let stdout = Mutex::new(child.stdout.take().expect("Piped stdout above"));
let debug = debug.map(|(n, w)| (n.to_string(), Mutex::new(Box::new(w) as Box<dyn fmt::Write>)));
Ok(Self { child, stdin, stdout, debug })
}
pub fn new(
command: &mut process::Command,
debug: Option<(&str, impl fmt::Write + 'static)>,
) -> io::Result<Self> {
let mut child =
command.stdin(process::Stdio::piped()).stdout(process::Stdio::piped()).spawn()?;
let stdin = Mutex::new(child.stdin.take().expect("Piped stdin above"));
let stdout = Mutex::new(child.stdout.take().expect("Piped stdout above"));
let debug = debug.map(|(n, w)| (n.to_string(), Mutex::new(Box::new(w) as Box<dyn fmt::Write>)));
Ok(Self { child, stdin, stdout, debug })
}
pub fn send_msg(&self, msg: &[u8]) -> io::Result<()> {
if let Some((n, dbg)) = &self.debug {
let mut dbg = dbg.lock().unwrap();
writeln!(dbg, "To {n}: {msg:?}").unwrap();
}
send_msg(&mut *self.stdin.lock().unwrap(), msg)
}
pub fn send_msg(&self, msg: &[u8]) -> io::Result<()> {
if let Some((n, dbg)) = &self.debug {
let mut dbg = dbg.lock().unwrap();
writeln!(dbg, "To {n}: {msg:?}").unwrap();
}
send_msg(&mut *self.stdin.lock().unwrap(), msg)
}
pub fn recv_msg(&self) -> io::Result<Vec<u8>> {
let msg = recv_msg(&mut *self.stdout.lock().unwrap());
if let Some((n, dbg)) = &self.debug {
let mut dbg = dbg.lock().unwrap();
writeln!(dbg, "From {n}: {msg:?}").unwrap();
}
msg
}
pub fn recv_msg(&self) -> io::Result<Vec<u8>> {
let msg = recv_msg(&mut *self.stdout.lock().unwrap());
if let Some((n, dbg)) = &self.debug {
let mut dbg = dbg.lock().unwrap();
writeln!(dbg, "From {n}: {msg:?}").unwrap();
}
msg
}
}
impl Drop for SharedChild {
fn drop(&mut self) { mem::drop(self.child.kill()) }
fn drop(&mut self) { mem::drop(self.child.kill()) }
}

View File

@@ -18,123 +18,124 @@ pub type ExprParseCtx = ();
#[derive(Clone, Debug)]
pub struct Expr {
is_canonical: Arc<AtomicBool>,
pos: Pos,
kind: Arc<RwLock<ExprKind>>,
is_canonical: Arc<AtomicBool>,
pos: Pos,
kind: Arc<RwLock<ExprKind>>,
}
impl Expr {
pub fn pos(&self) -> Pos { self.pos.clone() }
pub fn as_atom(&self) -> Option<AtomHand> { todo!() }
pub fn strong_count(&self) -> usize { todo!() }
pub fn id(&self) -> api::ExprTicket {
api::ExprTicket(
NonZeroU64::new(self.kind.as_ref() as *const RwLock<_> as usize as u64)
.expect("this is a ref, it cannot be null"),
)
}
pub fn canonicalize(&self) -> api::ExprTicket {
if !self.is_canonical.swap(true, Ordering::Relaxed) {
KNOWN_EXPRS.write().unwrap().entry(self.id()).or_insert_with(|| self.clone());
}
self.id()
}
pub fn resolve(tk: api::ExprTicket) -> Option<Self> {
KNOWN_EXPRS.read().unwrap().get(&tk).cloned()
}
pub fn from_api(api: &api::Expression, ctx: &mut ExprParseCtx) -> Self {
if let api::ExpressionKind::Slot(tk) = &api.kind {
return Self::resolve(*tk).expect("Invalid slot");
}
Self {
kind: Arc::new(RwLock::new(ExprKind::from_api(&api.kind, ctx))),
is_canonical: Arc::default(),
pos: Pos::from_api(&api.location),
}
}
pub fn to_api(&self) -> api::InspectedKind {
use api::InspectedKind as K;
match &*self.kind.read().unwrap() {
ExprKind::Atom(a) => K::Atom(a.to_api()),
ExprKind::Bottom(b) => K::Bottom(b.to_api()),
_ => K::Opaque,
}
}
pub fn pos(&self) -> Pos { self.pos.clone() }
pub fn as_atom(&self) -> Option<AtomHand> { todo!() }
pub fn strong_count(&self) -> usize { todo!() }
pub fn id(&self) -> api::ExprTicket {
api::ExprTicket(
NonZeroU64::new(self.kind.as_ref() as *const RwLock<_> as usize as u64)
.expect("this is a ref, it cannot be null"),
)
}
pub fn canonicalize(&self) -> api::ExprTicket {
if !self.is_canonical.swap(true, Ordering::Relaxed) {
KNOWN_EXPRS.write().unwrap().entry(self.id()).or_insert_with(|| self.clone());
}
self.id()
}
pub fn resolve(tk: api::ExprTicket) -> Option<Self> {
KNOWN_EXPRS.read().unwrap().get(&tk).cloned()
}
pub fn from_api(api: &api::Expression, ctx: &mut ExprParseCtx) -> Self {
if let api::ExpressionKind::Slot(tk) = &api.kind {
return Self::resolve(*tk).expect("Invalid slot");
}
Self {
kind: Arc::new(RwLock::new(ExprKind::from_api(&api.kind, ctx))),
is_canonical: Arc::default(),
pos: Pos::from_api(&api.location),
}
}
pub fn to_api(&self) -> api::InspectedKind {
use api::InspectedKind as K;
match &*self.kind.read().unwrap() {
ExprKind::Atom(a) => K::Atom(a.to_api()),
ExprKind::Bottom(b) => K::Bottom(b.to_api()),
_ => K::Opaque,
}
}
}
impl Drop for Expr {
fn drop(&mut self) {
// If the only two references left are this and known, remove from known
if Arc::strong_count(&self.kind) == 2 && self.is_canonical.load(Ordering::Relaxed) {
// if known is poisoned, a leak is preferable to a panicking destructor
if let Ok(mut w) = KNOWN_EXPRS.write() {
w.remove(&self.id());
}
}
}
fn drop(&mut self) {
// If the only two references left are this and known, remove from known
if Arc::strong_count(&self.kind) == 2 && self.is_canonical.load(Ordering::Relaxed) {
// if known is poisoned, a leak is preferable to a panicking destructor
if let Ok(mut w) = KNOWN_EXPRS.write() {
w.remove(&self.id());
}
}
}
}
lazy_static! {
static ref KNOWN_EXPRS: RwLock<HashMap<api::ExprTicket, Expr>> = RwLock::default();
static ref KNOWN_EXPRS: RwLock<HashMap<api::ExprTicket, Expr>> = RwLock::default();
}
#[derive(Clone, Debug)]
pub enum ExprKind {
Seq(Expr, Expr),
Call(Expr, Expr),
Atom(AtomHand),
Arg,
Lambda(Option<PathSet>, Expr),
Bottom(OrcErrv),
Const(Sym),
Seq(Expr, Expr),
Call(Expr, Expr),
Atom(AtomHand),
Arg,
Lambda(Option<PathSet>, Expr),
Bottom(OrcErrv),
Const(Sym),
}
impl ExprKind {
pub fn from_api(api: &api::ExpressionKind, ctx: &mut ExprParseCtx) -> Self {
match_mapping!(api, api::ExpressionKind => ExprKind {
Lambda(id => PathSet::from_api(*id, api), b => Expr::from_api(b, ctx)),
Bottom(b => OrcErrv::from_api(b)),
Call(f => Expr::from_api(f, ctx), x => Expr::from_api(x, ctx)),
Const(c => Sym::from_api(*c)),
Seq(a => Expr::from_api(a, ctx), b => Expr::from_api(b, ctx)),
} {
api::ExpressionKind::Arg(_) => ExprKind::Arg,
api::ExpressionKind::NewAtom(a) => ExprKind::Atom(AtomHand::from_api(a.clone())),
api::ExpressionKind::Slot(_) => panic!("Handled in Expr"),
})
}
pub fn from_api(api: &api::ExpressionKind, ctx: &mut ExprParseCtx) -> Self {
match_mapping!(api, api::ExpressionKind => ExprKind {
Lambda(id => PathSet::from_api(*id, api), b => Expr::from_api(b, ctx)),
Bottom(b => OrcErrv::from_api(b)),
Call(f => Expr::from_api(f, ctx), x => Expr::from_api(x, ctx)),
Const(c => Sym::from_api(*c)),
Seq(a => Expr::from_api(a, ctx), b => Expr::from_api(b, ctx)),
} {
api::ExpressionKind::Arg(_) => ExprKind::Arg,
api::ExpressionKind::NewAtom(a) => ExprKind::Atom(AtomHand::from_api(a.clone())),
api::ExpressionKind::Slot(_) => panic!("Handled in Expr"),
})
}
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub enum Step {
Left,
Right,
Left,
Right,
}
#[derive(Clone, Debug)]
pub struct PathSet {
/// The single steps through [super::nort::Clause::Apply]
pub steps: VecDeque<Step>,
/// if Some, it splits at a [super::nort::Clause::Apply]. If None, it ends in
/// a [super::nort::Clause::LambdaArg]
pub next: Option<(Box<PathSet>, Box<PathSet>)>,
/// The single steps through [super::nort::Clause::Apply]
pub steps: VecDeque<Step>,
/// if Some, it splits at a [super::nort::Clause::Apply]. If None, it ends in
/// a [super::nort::Clause::LambdaArg]
pub next: Option<(Box<PathSet>, Box<PathSet>)>,
}
impl PathSet {
pub fn after(mut self, step: Step) -> Self {
self.steps.push_front(step);
self
}
pub fn from_api(id: u64, api: &api::ExpressionKind) -> Option<Self> {
use api::ExpressionKind as K;
match &api {
K::Arg(id2) => (id == *id2).then(|| Self { steps: VecDeque::new(), next: None }),
K::Bottom(_) | K::Const(_) | K::NewAtom(_) | K::Slot(_) => None,
K::Lambda(_, b) => Self::from_api(id, &b.kind),
K::Call(l, r) | K::Seq(l, r) =>
match (Self::from_api(id, &l.kind), Self::from_api(id, &r.kind)) {
(Some(a), Some(b)) =>
Some(Self { steps: VecDeque::new(), next: Some((Box::new(a), Box::new(b))) }),
(Some(l), None) => Some(l.after(Step::Left)),
(None, Some(r)) => Some(r.after(Step::Right)),
(None, None) => None,
},
}
}
pub fn after(mut self, step: Step) -> Self {
self.steps.push_front(step);
self
}
pub fn from_api(id: u64, api: &api::ExpressionKind) -> Option<Self> {
use api::ExpressionKind as K;
match &api {
K::Arg(id2) => (id == *id2).then(|| Self { steps: VecDeque::new(), next: None }),
K::Bottom(_) | K::Const(_) | K::NewAtom(_) | K::Slot(_) => None,
K::Lambda(_, b) => Self::from_api(id, &b.kind),
K::Call(l, r) | K::Seq(l, r) => {
match (Self::from_api(id, &l.kind), Self::from_api(id, &r.kind)) {
(Some(a), Some(b)) =>
Some(Self { steps: VecDeque::new(), next: Some((Box::new(a), Box::new(b))) }),
(Some(l), None) => Some(l.after(Step::Left)),
(None, Some(r)) => Some(r.after(Step::Right)),
(None, None) => None,
}
},
}
}
}

View File

@@ -32,78 +32,78 @@ use crate::tree::{Member, ParsTokTree};
#[derive(Debug, destructure)]
pub struct AtomData {
owner: System,
drop: Option<api::AtomId>,
data: Vec<u8>,
owner: System,
drop: Option<api::AtomId>,
data: Vec<u8>,
}
impl AtomData {
fn api(self) -> api::Atom {
let (owner, drop, data) = self.destructure();
api::Atom { data, drop, owner: owner.id() }
}
fn api_ref(&self) -> api::Atom {
api::Atom { data: self.data.clone(), drop: self.drop, owner: self.owner.id() }
}
fn api(self) -> api::Atom {
let (owner, drop, data) = self.destructure();
api::Atom { data, drop, owner: owner.id() }
}
fn api_ref(&self) -> api::Atom {
api::Atom { data: self.data.clone(), drop: self.drop, owner: self.owner.id() }
}
}
impl Drop for AtomData {
fn drop(&mut self) {
if let Some(id) = self.drop {
self.owner.reqnot().notify(api::AtomDrop(self.owner.id(), id))
}
}
fn drop(&mut self) {
if let Some(id) = self.drop {
self.owner.reqnot().notify(api::AtomDrop(self.owner.id(), id))
}
}
}
#[derive(Clone, Debug)]
pub struct AtomHand(Arc<AtomData>);
impl AtomHand {
pub fn from_api(atom: api::Atom) -> Self {
fn create_new(api::Atom { data, drop, owner }: api::Atom) -> AtomHand {
let owner = System::resolve(owner).expect("Atom owned by non-existing system");
AtomHand(Arc::new(AtomData { data, drop, owner }))
}
if let Some(id) = atom.drop {
lazy_static! {
static ref OWNED_ATOMS: Mutex<HashMap<(api::SysId, api::AtomId), Weak<AtomData>>> =
Mutex::default();
}
let owner = atom.owner;
let mut owned_g = OWNED_ATOMS.lock().unwrap();
if let Some(data) = owned_g.get(&(owner, id)) {
if let Some(atom) = data.upgrade() {
return Self(atom);
}
}
let new = create_new(atom);
owned_g.insert((owner, id), Arc::downgrade(&new.0));
new
} else {
create_new(atom)
}
}
pub fn call(self, arg: Expr) -> api::Expression {
let owner_sys = self.0.owner.clone();
let reqnot = owner_sys.reqnot();
let ticket = owner_sys.give_expr(arg.canonicalize(), || arg);
match Arc::try_unwrap(self.0) {
Ok(data) => reqnot.request(api::FinalCall(data.api(), ticket)),
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), ticket)),
}
}
pub fn req(&self, key: api::TStrv, req: Vec<u8>) -> Option<Vec<u8>> {
self.0.owner.reqnot().request(api::Fwded(self.0.api_ref(), key, req))
}
pub fn api_ref(&self) -> api::Atom { self.0.api_ref() }
pub fn print(&self) -> String { self.0.owner.reqnot().request(api::AtomPrint(self.0.api_ref())) }
pub fn from_api(atom: api::Atom) -> Self {
fn create_new(api::Atom { data, drop, owner }: api::Atom) -> AtomHand {
let owner = System::resolve(owner).expect("Atom owned by non-existing system");
AtomHand(Arc::new(AtomData { data, drop, owner }))
}
if let Some(id) = atom.drop {
lazy_static! {
static ref OWNED_ATOMS: Mutex<HashMap<(api::SysId, api::AtomId), Weak<AtomData>>> =
Mutex::default();
}
let owner = atom.owner;
let mut owned_g = OWNED_ATOMS.lock().unwrap();
if let Some(data) = owned_g.get(&(owner, id)) {
if let Some(atom) = data.upgrade() {
return Self(atom);
}
}
let new = create_new(atom);
owned_g.insert((owner, id), Arc::downgrade(&new.0));
new
} else {
create_new(atom)
}
}
pub fn call(self, arg: Expr) -> api::Expression {
let owner_sys = self.0.owner.clone();
let reqnot = owner_sys.reqnot();
let ticket = owner_sys.give_expr(arg.canonicalize(), || arg);
match Arc::try_unwrap(self.0) {
Ok(data) => reqnot.request(api::FinalCall(data.api(), ticket)),
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), ticket)),
}
}
pub fn req(&self, key: api::TStrv, req: Vec<u8>) -> Option<Vec<u8>> {
self.0.owner.reqnot().request(api::Fwded(self.0.api_ref(), key, req))
}
pub fn api_ref(&self) -> api::Atom { self.0.api_ref() }
pub fn print(&self) -> String { self.0.owner.reqnot().request(api::AtomPrint(self.0.api_ref())) }
}
impl AtomRepr for AtomHand {
type Ctx = ();
fn from_api(atom: &orchid_api::Atom, _: Pos, (): &mut Self::Ctx) -> Self {
Self::from_api(atom.clone())
}
fn to_api(&self) -> orchid_api::Atom { self.api_ref() }
type Ctx = ();
fn from_api(atom: &orchid_api::Atom, _: Pos, (): &mut Self::Ctx) -> Self {
Self::from_api(atom.clone())
}
fn to_api(&self) -> orchid_api::Atom { self.api_ref() }
}
impl fmt::Display for AtomHand {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(&self.print()) }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(&self.print()) }
}
pub type OnMessage = Box<dyn FnMut(&[u8]) + Send>;
@@ -115,9 +115,9 @@ pub type OnMessage = Box<dyn FnMut(&[u8]) + Send>;
///
/// There are no ordering guarantees about these
pub trait ExtensionPort: Send + Sync {
fn set_onmessage(&self, callback: OnMessage);
fn send(&self, msg: &[u8]);
fn header(&self) -> &api::ExtensionHeader;
fn set_onmessage(&self, callback: OnMessage);
fn send(&self, msg: &[u8]);
fn header(&self) -> &api::ExtensionHeader;
}
/// Data held about an Extension. This is refcounted within [Extension]. It's
@@ -126,323 +126,323 @@ pub trait ExtensionPort: Send + Sync {
/// upgrading fails.
#[derive(destructure)]
pub struct ExtensionData {
port: Arc<dyn ExtensionPort>,
// child: Mutex<process::Child>,
// child_stdin: Mutex<ChildStdin>,
reqnot: ReqNot<api::HostMsgSet>,
systems: Vec<SystemCtor>,
logger: Logger,
port: Arc<dyn ExtensionPort>,
// child: Mutex<process::Child>,
// child_stdin: Mutex<ChildStdin>,
reqnot: ReqNot<api::HostMsgSet>,
systems: Vec<SystemCtor>,
logger: Logger,
}
impl Drop for ExtensionData {
fn drop(&mut self) { self.reqnot.notify(api::HostExtNotif::Exit); }
fn drop(&mut self) { self.reqnot.notify(api::HostExtNotif::Exit); }
}
fn acq_expr(sys: api::SysId, extk: api::ExprTicket) {
(System::resolve(sys).expect("Expr acq'd by invalid system"))
.give_expr(extk, || Expr::resolve(extk).expect("Invalid expr acq'd"));
(System::resolve(sys).expect("Expr acq'd by invalid system"))
.give_expr(extk, || Expr::resolve(extk).expect("Invalid expr acq'd"));
}
fn rel_expr(sys: api::SysId, extk: api::ExprTicket) {
let sys = System::resolve(sys).unwrap();
let mut exprs = sys.0.exprs.write().unwrap();
exprs.entry(extk).and_replace_entry_with(|_, (rc, rt)| {
(0 < rc.fetch_sub(1, Ordering::Relaxed)).then_some((rc, rt))
});
let sys = System::resolve(sys).unwrap();
let mut exprs = sys.0.exprs.write().unwrap();
exprs.entry(extk).and_replace_entry_with(|_, (rc, rt)| {
(0 < rc.fetch_sub(1, Ordering::Relaxed)).then_some((rc, rt))
});
}
#[derive(Clone)]
pub struct Extension(Arc<ExtensionData>);
impl Extension {
pub fn new_process(port: Arc<dyn ExtensionPort>, logger: Logger) -> io::Result<Self> {
let eh = port.header();
let ret = Arc::new_cyclic(|weak: &Weak<ExtensionData>| ExtensionData {
systems: (eh.systems.iter().cloned())
.map(|decl| SystemCtor { decl, ext: weak.clone() })
.collect(),
logger,
port: port.clone(),
reqnot: ReqNot::new(
clone!(weak; move |sfn, _| {
let data = weak.upgrade().unwrap();
data.logger.log_buf("Downsending", sfn);
data.port.send(sfn);
}),
clone!(weak; move |notif, _| match notif {
api::ExtHostNotif::ExprNotif(api::ExprNotif::Acquire(acq)) => acq_expr(acq.0, acq.1),
api::ExtHostNotif::ExprNotif(api::ExprNotif::Release(rel)) => rel_expr(rel.0, rel.1),
api::ExtHostNotif::ExprNotif(api::ExprNotif::Move(mov)) => {
acq_expr(mov.inc, mov.expr);
rel_expr(mov.dec, mov.expr);
},
api::ExtHostNotif::Log(api::Log(str)) => weak.upgrade().unwrap().logger.log(str),
}),
|hand, req| match req {
api::ExtHostReq::Ping(ping) => hand.handle(&ping, &()),
api::ExtHostReq::IntReq(intreq) => match intreq {
api::IntReq::InternStr(s) => hand.handle(&s, &intern(&**s.0).to_api()),
api::IntReq::InternStrv(v) => hand.handle(&v, &intern(&*v.0).to_api()),
api::IntReq::ExternStr(si) => hand.handle(&si, &Tok::<String>::from_api(si.0).arc()),
api::IntReq::ExternStrv(vi) => hand.handle(
&vi,
&Arc::new(
Tok::<Vec<Tok<String>>>::from_api(vi.0).iter().map(|t| t.to_api()).collect_vec(),
),
),
},
api::ExtHostReq::Fwd(ref fw @ api::Fwd(ref atom, ref key, ref body)) => {
let sys = System::resolve(atom.owner).unwrap();
hand.handle(fw, &sys.reqnot().request(api::Fwded(fw.0.clone(), *key, body.clone())))
},
api::ExtHostReq::SysFwd(ref fw @ api::SysFwd(id, ref body)) => {
let sys = System::resolve(id).unwrap();
hand.handle(fw, &sys.request(body.clone()))
},
api::ExtHostReq::SubLex(sl) => {
let (rep_in, rep_out) = sync_channel(0);
let lex_g = LEX_RECUR.lock().unwrap();
let req_in = lex_g.get(&sl.id).expect("Sublex for nonexistent lexid");
req_in.send(ReqPair(sl.clone(), rep_in)).unwrap();
hand.handle(&sl, &rep_out.recv().unwrap())
},
api::ExtHostReq::ExprReq(api::ExprReq::Inspect(ins @ api::Inspect { target })) => {
let expr = Expr::resolve(target).expect("Invalid ticket");
hand.handle(&ins, &api::Inspected {
refcount: expr.strong_count() as u32,
location: expr.pos().to_api(),
kind: expr.to_api(),
})
},
api::ExtHostReq::RunMacros(ref rm @ api::RunMacros { ref run_id, ref query }) => hand
.handle(
rm,
&macro_recur(
*run_id,
mtreev_from_api(query, &mut |_| panic!("Recursion never contains atoms")),
)
.map(|x| macro_treev_to_api(*run_id, x)),
),
},
),
});
let weak = Arc::downgrade(&ret);
port.set_onmessage(Box::new(move |msg| {
if let Some(xd) = weak.upgrade() {
xd.reqnot.receive(msg)
}
}));
Ok(Self(ret))
}
pub fn systems(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() }
pub fn new_process(port: Arc<dyn ExtensionPort>, logger: Logger) -> io::Result<Self> {
let eh = port.header();
let ret = Arc::new_cyclic(|weak: &Weak<ExtensionData>| ExtensionData {
systems: (eh.systems.iter().cloned())
.map(|decl| SystemCtor { decl, ext: weak.clone() })
.collect(),
logger,
port: port.clone(),
reqnot: ReqNot::new(
clone!(weak; move |sfn, _| {
let data = weak.upgrade().unwrap();
data.logger.log_buf("Downsending", sfn);
data.port.send(sfn);
}),
clone!(weak; move |notif, _| match notif {
api::ExtHostNotif::ExprNotif(api::ExprNotif::Acquire(acq)) => acq_expr(acq.0, acq.1),
api::ExtHostNotif::ExprNotif(api::ExprNotif::Release(rel)) => rel_expr(rel.0, rel.1),
api::ExtHostNotif::ExprNotif(api::ExprNotif::Move(mov)) => {
acq_expr(mov.inc, mov.expr);
rel_expr(mov.dec, mov.expr);
},
api::ExtHostNotif::Log(api::Log(str)) => weak.upgrade().unwrap().logger.log(str),
}),
|hand, req| match req {
api::ExtHostReq::Ping(ping) => hand.handle(&ping, &()),
api::ExtHostReq::IntReq(intreq) => match intreq {
api::IntReq::InternStr(s) => hand.handle(&s, &intern(&**s.0).to_api()),
api::IntReq::InternStrv(v) => hand.handle(&v, &intern(&*v.0).to_api()),
api::IntReq::ExternStr(si) => hand.handle(&si, &Tok::<String>::from_api(si.0).arc()),
api::IntReq::ExternStrv(vi) => hand.handle(
&vi,
&Arc::new(
Tok::<Vec<Tok<String>>>::from_api(vi.0).iter().map(|t| t.to_api()).collect_vec(),
),
),
},
api::ExtHostReq::Fwd(ref fw @ api::Fwd(ref atom, ref key, ref body)) => {
let sys = System::resolve(atom.owner).unwrap();
hand.handle(fw, &sys.reqnot().request(api::Fwded(fw.0.clone(), *key, body.clone())))
},
api::ExtHostReq::SysFwd(ref fw @ api::SysFwd(id, ref body)) => {
let sys = System::resolve(id).unwrap();
hand.handle(fw, &sys.request(body.clone()))
},
api::ExtHostReq::SubLex(sl) => {
let (rep_in, rep_out) = sync_channel(0);
let lex_g = LEX_RECUR.lock().unwrap();
let req_in = lex_g.get(&sl.id).expect("Sublex for nonexistent lexid");
req_in.send(ReqPair(sl.clone(), rep_in)).unwrap();
hand.handle(&sl, &rep_out.recv().unwrap())
},
api::ExtHostReq::ExprReq(api::ExprReq::Inspect(ins @ api::Inspect { target })) => {
let expr = Expr::resolve(target).expect("Invalid ticket");
hand.handle(&ins, &api::Inspected {
refcount: expr.strong_count() as u32,
location: expr.pos().to_api(),
kind: expr.to_api(),
})
},
api::ExtHostReq::RunMacros(ref rm @ api::RunMacros { ref run_id, ref query }) => hand
.handle(
rm,
&macro_recur(
*run_id,
mtreev_from_api(query, &mut |_| panic!("Recursion never contains atoms")),
)
.map(|x| macro_treev_to_api(*run_id, x)),
),
},
),
});
let weak = Arc::downgrade(&ret);
port.set_onmessage(Box::new(move |msg| {
if let Some(xd) = weak.upgrade() {
xd.reqnot.receive(msg)
}
}));
Ok(Self(ret))
}
pub fn systems(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() }
}
pub struct SystemCtor {
decl: api::SystemDecl,
ext: Weak<ExtensionData>,
decl: api::SystemDecl,
ext: Weak<ExtensionData>,
}
impl SystemCtor {
pub fn name(&self) -> &str { &self.decl.name }
pub fn priority(&self) -> NotNan<f64> { self.decl.priority }
pub fn depends(&self) -> impl ExactSizeIterator<Item = &str> {
self.decl.depends.iter().map(|s| &**s)
}
pub fn run<'a>(&self, depends: impl IntoIterator<Item = &'a System>) -> System {
let mut inst_g = SYSTEM_INSTS.write().unwrap();
let depends = depends.into_iter().map(|si| si.id()).collect_vec();
debug_assert_eq!(depends.len(), self.decl.depends.len(), "Wrong number of deps provided");
let ext = self.ext.upgrade().expect("SystemCtor should be freed before Extension");
static NEXT_ID: AtomicU16 = AtomicU16::new(1);
let id =
api::SysId(NonZero::new(NEXT_ID.fetch_add(1, Ordering::Relaxed)).expect("next_id wrapped"));
let sys_inst = ext.reqnot.request(api::NewSystem { depends, id, system: self.decl.id });
let data = System(Arc::new(SystemInstData {
decl_id: self.decl.id,
ext: Extension(ext),
exprs: RwLock::default(),
lex_filter: sys_inst.lex_filter,
const_root: OnceLock::new(),
line_types: sys_inst.line_types.into_iter().map(Tok::from_api).collect(),
id,
}));
let root = (sys_inst.const_root.into_iter())
.map(|(k, v)| {
Member::from_api(
api::Member { name: k, kind: v },
Substack::Bottom.push(Tok::from_api(k)),
&data,
)
})
.collect_vec();
data.0.const_root.set(root).unwrap();
inst_g.insert(id, data.clone());
data
}
pub fn name(&self) -> &str { &self.decl.name }
pub fn priority(&self) -> NotNan<f64> { self.decl.priority }
pub fn depends(&self) -> impl ExactSizeIterator<Item = &str> {
self.decl.depends.iter().map(|s| &**s)
}
pub fn run<'a>(&self, depends: impl IntoIterator<Item = &'a System>) -> System {
let mut inst_g = SYSTEM_INSTS.write().unwrap();
let depends = depends.into_iter().map(|si| si.id()).collect_vec();
debug_assert_eq!(depends.len(), self.decl.depends.len(), "Wrong number of deps provided");
let ext = self.ext.upgrade().expect("SystemCtor should be freed before Extension");
static NEXT_ID: AtomicU16 = AtomicU16::new(1);
let id =
api::SysId(NonZero::new(NEXT_ID.fetch_add(1, Ordering::Relaxed)).expect("next_id wrapped"));
let sys_inst = ext.reqnot.request(api::NewSystem { depends, id, system: self.decl.id });
let data = System(Arc::new(SystemInstData {
decl_id: self.decl.id,
ext: Extension(ext),
exprs: RwLock::default(),
lex_filter: sys_inst.lex_filter,
const_root: OnceLock::new(),
line_types: sys_inst.line_types.into_iter().map(Tok::from_api).collect(),
id,
}));
let root = (sys_inst.const_root.into_iter())
.map(|(k, v)| {
Member::from_api(
api::Member { name: k, kind: v },
Substack::Bottom.push(Tok::from_api(k)),
&data,
)
})
.collect_vec();
data.0.const_root.set(root).unwrap();
inst_g.insert(id, data.clone());
data
}
}
lazy_static! {
static ref SYSTEM_INSTS: RwLock<HashMap<api::SysId, System>> = RwLock::default();
static ref LEX_RECUR: Mutex<HashMap<api::ParsId, SyncSender<ReqPair<api::SubLex>>>> =
Mutex::default();
static ref SYSTEM_INSTS: RwLock<HashMap<api::SysId, System>> = RwLock::default();
static ref LEX_RECUR: Mutex<HashMap<api::ParsId, SyncSender<ReqPair<api::SubLex>>>> =
Mutex::default();
}
pub struct ReqPair<R: Request>(R, pub SyncSender<R::Response>);
#[derive(destructure)]
pub struct SystemInstData {
exprs: RwLock<HashMap<api::ExprTicket, (AtomicU32, Expr)>>,
ext: Extension,
decl_id: api::SysDeclId,
lex_filter: api::CharFilter,
id: api::SysId,
const_root: OnceLock<Vec<Member>>,
line_types: Vec<Tok<String>>,
exprs: RwLock<HashMap<api::ExprTicket, (AtomicU32, Expr)>>,
ext: Extension,
decl_id: api::SysDeclId,
lex_filter: api::CharFilter,
id: api::SysId,
const_root: OnceLock<Vec<Member>>,
line_types: Vec<Tok<String>>,
}
impl Drop for SystemInstData {
fn drop(&mut self) {
self.ext.0.reqnot.notify(api::SystemDrop(self.id));
if let Ok(mut g) = SYSTEM_INSTS.write() {
g.remove(&self.id);
}
}
fn drop(&mut self) {
self.ext.0.reqnot.notify(api::SystemDrop(self.id));
if let Ok(mut g) = SYSTEM_INSTS.write() {
g.remove(&self.id);
}
}
}
#[derive(Clone)]
pub struct System(Arc<SystemInstData>);
impl System {
pub fn id(&self) -> api::SysId { self.id }
fn resolve(id: api::SysId) -> Option<System> { SYSTEM_INSTS.read().unwrap().get(&id).cloned() }
fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { &self.0.ext.0.reqnot }
fn give_expr(&self, ticket: api::ExprTicket, get_expr: impl FnOnce() -> Expr) -> api::ExprTicket {
match self.0.exprs.write().unwrap().entry(ticket) {
Entry::Occupied(mut oe) => {
oe.get_mut().0.fetch_add(1, Ordering::Relaxed);
},
Entry::Vacant(v) => {
v.insert((AtomicU32::new(1), get_expr()));
},
}
ticket
}
pub fn get_tree(&self, id: api::TreeId) -> api::MemberKind {
self.reqnot().request(api::GetMember(self.0.id, id))
}
pub fn has_lexer(&self) -> bool { !self.0.lex_filter.0.is_empty() }
pub fn can_lex(&self, c: char) -> bool { char_filter_match(&self.0.lex_filter, c) }
/// Have this system lex a part of the source. It is assumed that
/// [Self::can_lex] was called and returned true.
pub fn lex(
&self,
source: Tok<String>,
pos: u32,
mut r: impl FnMut(u32) -> Option<api::SubLexed> + Send,
) -> api::OrcResult<Option<api::LexedExpr>> {
// get unique lex ID
static LEX_ID: AtomicU64 = AtomicU64::new(1);
let id = api::ParsId(NonZero::new(LEX_ID.fetch_add(1, Ordering::Relaxed)).unwrap());
thread::scope(|s| {
// create and register channel
let (req_in, req_out) = sync_channel(0);
LEX_RECUR.lock().unwrap().insert(id, req_in); // LEX_RECUR released
// spawn recursion handler which will exit when the sender is collected
s.spawn(move || {
while let Ok(ReqPair(sublex, rep_in)) = req_out.recv() {
rep_in.send(r(sublex.pos)).unwrap()
}
});
// Pass control to extension
let ret =
self.reqnot().request(api::LexExpr { id, pos, sys: self.id(), text: source.to_api() });
// collect sender to unblock recursion handler thread before returning
LEX_RECUR.lock().unwrap().remove(&id);
ret.transpose()
}) // exit recursion handler thread
}
pub fn can_parse(&self, line_type: Tok<String>) -> bool { self.line_types.contains(&line_type) }
pub fn line_types(&self) -> impl Iterator<Item = Tok<String>> + '_ {
self.line_types.iter().cloned()
}
pub fn parse(
&self,
line: Vec<ParsTokTree>,
exported: bool,
comments: Vec<Comment>,
) -> OrcRes<Vec<ParsTokTree>> {
let line = line.iter().map(|t| t.to_api(&mut |n, _| match *n {})).collect_vec();
let comments = comments.iter().map(Comment::to_api).collect_vec();
let parsed =
(self.reqnot().request(api::ParseLine { exported, sys: self.id(), comments, line }))
.map_err(|e| OrcErrv::from_api(&e))?;
Ok(ttv_from_api(parsed, &mut ()))
}
pub fn request(&self, req: Vec<u8>) -> Vec<u8> {
self.reqnot().request(api::SysFwded(self.id(), req))
}
pub fn id(&self) -> api::SysId { self.id }
fn resolve(id: api::SysId) -> Option<System> { SYSTEM_INSTS.read().unwrap().get(&id).cloned() }
fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { &self.0.ext.0.reqnot }
fn give_expr(&self, ticket: api::ExprTicket, get_expr: impl FnOnce() -> Expr) -> api::ExprTicket {
match self.0.exprs.write().unwrap().entry(ticket) {
Entry::Occupied(mut oe) => {
oe.get_mut().0.fetch_add(1, Ordering::Relaxed);
},
Entry::Vacant(v) => {
v.insert((AtomicU32::new(1), get_expr()));
},
}
ticket
}
pub fn get_tree(&self, id: api::TreeId) -> api::MemberKind {
self.reqnot().request(api::GetMember(self.0.id, id))
}
pub fn has_lexer(&self) -> bool { !self.0.lex_filter.0.is_empty() }
pub fn can_lex(&self, c: char) -> bool { char_filter_match(&self.0.lex_filter, c) }
/// Have this system lex a part of the source. It is assumed that
/// [Self::can_lex] was called and returned true.
pub fn lex(
&self,
source: Tok<String>,
pos: u32,
mut r: impl FnMut(u32) -> Option<api::SubLexed> + Send,
) -> api::OrcResult<Option<api::LexedExpr>> {
// get unique lex ID
static LEX_ID: AtomicU64 = AtomicU64::new(1);
let id = api::ParsId(NonZero::new(LEX_ID.fetch_add(1, Ordering::Relaxed)).unwrap());
thread::scope(|s| {
// create and register channel
let (req_in, req_out) = sync_channel(0);
LEX_RECUR.lock().unwrap().insert(id, req_in); // LEX_RECUR released
// spawn recursion handler which will exit when the sender is collected
s.spawn(move || {
while let Ok(ReqPair(sublex, rep_in)) = req_out.recv() {
rep_in.send(r(sublex.pos)).unwrap()
}
});
// Pass control to extension
let ret =
self.reqnot().request(api::LexExpr { id, pos, sys: self.id(), text: source.to_api() });
// collect sender to unblock recursion handler thread before returning
LEX_RECUR.lock().unwrap().remove(&id);
ret.transpose()
}) // exit recursion handler thread
}
pub fn can_parse(&self, line_type: Tok<String>) -> bool { self.line_types.contains(&line_type) }
pub fn line_types(&self) -> impl Iterator<Item = Tok<String>> + '_ {
self.line_types.iter().cloned()
}
pub fn parse(
&self,
line: Vec<ParsTokTree>,
exported: bool,
comments: Vec<Comment>,
) -> OrcRes<Vec<ParsTokTree>> {
let line = line.iter().map(|t| t.to_api(&mut |n, _| match *n {})).collect_vec();
let comments = comments.iter().map(Comment::to_api).collect_vec();
let parsed =
(self.reqnot().request(api::ParseLine { exported, sys: self.id(), comments, line }))
.map_err(|e| OrcErrv::from_api(&e))?;
Ok(ttv_from_api(parsed, &mut ()))
}
pub fn request(&self, req: Vec<u8>) -> Vec<u8> {
self.reqnot().request(api::SysFwded(self.id(), req))
}
}
impl fmt::Debug for System {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let ctor = (self.0.ext.0.systems.iter().find(|c| c.decl.id == self.0.decl_id))
.expect("System instance with no associated constructor");
write!(f, "System({} @ {} #{}, ", ctor.decl.name, ctor.decl.priority, self.0.id.0)?;
match self.0.exprs.read() {
Err(_) => write!(f, "expressions unavailable"),
Ok(r) => {
let rc: u32 = r.values().map(|v| v.0.load(Ordering::Relaxed)).sum();
write!(f, "{rc} refs to {} exprs", r.len())
},
}
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let ctor = (self.0.ext.0.systems.iter().find(|c| c.decl.id == self.0.decl_id))
.expect("System instance with no associated constructor");
write!(f, "System({} @ {} #{}, ", ctor.decl.name, ctor.decl.priority, self.0.id.0)?;
match self.0.exprs.read() {
Err(_) => write!(f, "expressions unavailable"),
Ok(r) => {
let rc: u32 = r.values().map(|v| v.0.load(Ordering::Relaxed)).sum();
write!(f, "{rc} refs to {} exprs", r.len())
},
}
}
}
impl Deref for System {
type Target = SystemInstData;
fn deref(&self) -> &Self::Target { self.0.as_ref() }
type Target = SystemInstData;
fn deref(&self) -> &Self::Target { self.0.as_ref() }
}
#[derive(Debug, Clone)]
pub enum SysResolvErr {
Loop(Vec<String>),
Missing(String),
Loop(Vec<String>),
Missing(String),
}
pub fn init_systems(tgts: &[String], exts: &[Extension]) -> Result<Vec<System>, SysResolvErr> {
let mut to_load = HashMap::<&str, &SystemCtor>::new();
let mut to_find = tgts.iter().map(|s| s.as_str()).collect::<VecDeque<&str>>();
while let Some(target) = to_find.pop_front() {
if to_load.contains_key(target) {
continue;
}
let ctor = (exts.iter())
.flat_map(|e| e.systems().filter(|c| c.decl.name == target))
.max_by_key(|c| c.decl.priority)
.ok_or_else(|| SysResolvErr::Missing(target.to_string()))?;
to_load.insert(target, ctor);
to_find.extend(ctor.decl.depends.iter().map(|s| s.as_str()));
}
let mut to_load_ordered = Vec::new();
fn walk_deps<'a>(
graph: &mut HashMap<&str, &'a SystemCtor>,
list: &mut Vec<&'a SystemCtor>,
chain: Stackframe<&str>,
) -> Result<(), SysResolvErr> {
if let Some(ctor) = graph.remove(chain.item) {
// if the above is none, the system is already queued. Missing systems are
// detected above
for dep in ctor.decl.depends.iter() {
if Substack::Frame(chain).iter().any(|c| c == dep) {
let mut circle = vec![dep.to_string()];
circle.extend(Substack::Frame(chain).iter().map(|s| s.to_string()));
return Err(SysResolvErr::Loop(circle));
}
walk_deps(graph, list, Substack::Frame(chain).new_frame(dep))?
}
list.push(ctor);
}
Ok(())
}
for tgt in tgts {
walk_deps(&mut to_load, &mut to_load_ordered, Substack::Bottom.new_frame(tgt))?;
}
let mut systems = HashMap::<&str, System>::new();
for ctor in to_load_ordered.iter() {
let sys = ctor.run(ctor.depends().map(|n| &systems[n]));
systems.insert(ctor.name(), sys);
}
Ok(systems.into_values().collect_vec())
let mut to_load = HashMap::<&str, &SystemCtor>::new();
let mut to_find = tgts.iter().map(|s| s.as_str()).collect::<VecDeque<&str>>();
while let Some(target) = to_find.pop_front() {
if to_load.contains_key(target) {
continue;
}
let ctor = (exts.iter())
.flat_map(|e| e.systems().filter(|c| c.decl.name == target))
.max_by_key(|c| c.decl.priority)
.ok_or_else(|| SysResolvErr::Missing(target.to_string()))?;
to_load.insert(target, ctor);
to_find.extend(ctor.decl.depends.iter().map(|s| s.as_str()));
}
let mut to_load_ordered = Vec::new();
fn walk_deps<'a>(
graph: &mut HashMap<&str, &'a SystemCtor>,
list: &mut Vec<&'a SystemCtor>,
chain: Stackframe<&str>,
) -> Result<(), SysResolvErr> {
if let Some(ctor) = graph.remove(chain.item) {
// if the above is none, the system is already queued. Missing systems are
// detected above
for dep in ctor.decl.depends.iter() {
if Substack::Frame(chain).iter().any(|c| c == dep) {
let mut circle = vec![dep.to_string()];
circle.extend(Substack::Frame(chain).iter().map(|s| s.to_string()));
return Err(SysResolvErr::Loop(circle));
}
walk_deps(graph, list, Substack::Frame(chain).new_frame(dep))?
}
list.push(ctor);
}
Ok(())
}
for tgt in tgts {
walk_deps(&mut to_load, &mut to_load_ordered, Substack::Bottom.new_frame(tgt))?;
}
let mut systems = HashMap::<&str, System>::new();
for ctor in to_load_ordered.iter() {
let sys = ctor.run(ctor.depends().map(|n| &systems[n]));
systems.insert(ctor.name(), sys);
}
Ok(systems.into_values().collect_vec())
}

View File

@@ -2,208 +2,208 @@ use std::num::NonZeroU64;
use std::sync::Arc;
use hashbrown::HashMap;
use orchid_base::error::{mk_errv, OrcErrv, OrcRes};
use orchid_base::{intern, match_mapping};
use orchid_base::interner::{intern, Tok};
use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
use orchid_base::interner::{Tok, intern};
use orchid_base::location::Pos;
use orchid_base::number::{num_to_err, parse_num};
use orchid_base::parse::{name_char, name_start, op_char, unrep_space};
use orchid_base::tokens::PARENS;
use orchid_base::tree::Ph;
use orchid_base::{intern, match_mapping};
use crate::api;
use crate::extension::{AtomHand, System};
use crate::tree::{ParsTok, ParsTokTree};
pub struct LexCtx<'a> {
pub systems: &'a [System],
pub source: &'a Tok<String>,
pub tail: &'a str,
pub sub_trees: &'a mut HashMap<api::TreeTicket, ParsTokTree>,
pub systems: &'a [System],
pub source: &'a Tok<String>,
pub tail: &'a str,
pub sub_trees: &'a mut HashMap<api::TreeTicket, ParsTokTree>,
}
impl<'a> LexCtx<'a> {
pub fn push<'b>(&'b mut self, pos: u32) -> LexCtx<'b>
where 'a: 'b {
LexCtx {
source: self.source,
tail: &self.source[pos as usize..],
systems: self.systems,
sub_trees: &mut *self.sub_trees,
}
}
pub fn get_pos(&self) -> u32 { self.end_pos() - self.tail.len() as u32 }
pub fn end_pos(&self) -> u32 { self.source.len() as u32 }
pub fn set_pos(&mut self, pos: u32) { self.tail = &self.source[pos as usize..] }
pub fn push_pos(&mut self, delta: u32) { self.set_pos(self.get_pos() + delta) }
pub fn set_tail(&mut self, tail: &'a str) { self.tail = tail }
pub fn strip_prefix(&mut self, tgt: &str) -> bool {
if let Some(src) = self.tail.strip_prefix(tgt) {
self.tail = src;
return true;
}
false
}
pub fn add_subtree(&mut self, subtree: ParsTokTree) -> api::TreeTicket {
let next_idx = api::TreeTicket(NonZeroU64::new(self.sub_trees.len() as u64 + 1).unwrap());
self.sub_trees.insert(next_idx, subtree);
next_idx
}
pub fn rm_subtree(&mut self, ticket: api::TreeTicket) -> ParsTokTree {
self.sub_trees.remove(&ticket).unwrap()
}
pub fn strip_char(&mut self, tgt: char) -> bool {
if let Some(src) = self.tail.strip_prefix(tgt) {
self.tail = src;
return true;
}
false
}
pub fn trim(&mut self, filter: impl Fn(char) -> bool) {
self.tail = self.tail.trim_start_matches(filter);
}
pub fn trim_ws(&mut self) { self.trim(|c| c.is_whitespace() && !"\r\n".contains(c)) }
pub fn get_start_matches(&mut self, filter: impl Fn(char) -> bool) -> &'a str {
let rest = self.tail.trim_start_matches(filter);
let matches = &self.tail[..self.tail.len() - rest.len()];
self.tail = rest;
matches
}
pub fn push<'b>(&'b mut self, pos: u32) -> LexCtx<'b>
where 'a: 'b {
LexCtx {
source: self.source,
tail: &self.source[pos as usize..],
systems: self.systems,
sub_trees: &mut *self.sub_trees,
}
}
pub fn get_pos(&self) -> u32 { self.end_pos() - self.tail.len() as u32 }
pub fn end_pos(&self) -> u32 { self.source.len() as u32 }
pub fn set_pos(&mut self, pos: u32) { self.tail = &self.source[pos as usize..] }
pub fn push_pos(&mut self, delta: u32) { self.set_pos(self.get_pos() + delta) }
pub fn set_tail(&mut self, tail: &'a str) { self.tail = tail }
pub fn strip_prefix(&mut self, tgt: &str) -> bool {
if let Some(src) = self.tail.strip_prefix(tgt) {
self.tail = src;
return true;
}
false
}
pub fn add_subtree(&mut self, subtree: ParsTokTree) -> api::TreeTicket {
let next_idx = api::TreeTicket(NonZeroU64::new(self.sub_trees.len() as u64 + 1).unwrap());
self.sub_trees.insert(next_idx, subtree);
next_idx
}
pub fn rm_subtree(&mut self, ticket: api::TreeTicket) -> ParsTokTree {
self.sub_trees.remove(&ticket).unwrap()
}
pub fn strip_char(&mut self, tgt: char) -> bool {
if let Some(src) = self.tail.strip_prefix(tgt) {
self.tail = src;
return true;
}
false
}
pub fn trim(&mut self, filter: impl Fn(char) -> bool) {
self.tail = self.tail.trim_start_matches(filter);
}
pub fn trim_ws(&mut self) { self.trim(|c| c.is_whitespace() && !"\r\n".contains(c)) }
pub fn get_start_matches(&mut self, filter: impl Fn(char) -> bool) -> &'a str {
let rest = self.tail.trim_start_matches(filter);
let matches = &self.tail[..self.tail.len() - rest.len()];
self.tail = rest;
matches
}
}
pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
let start = ctx.get_pos();
assert!(
!ctx.tail.is_empty() && !ctx.tail.starts_with(unrep_space),
"Lexing empty string or whitespace to token!\n\
let start = ctx.get_pos();
assert!(
!ctx.tail.is_empty() && !ctx.tail.starts_with(unrep_space),
"Lexing empty string or whitespace to token!\n\
Invocations of lex_tok should check for empty string"
);
let tok = if ctx.strip_prefix("\r\n") || ctx.strip_prefix("\r") || ctx.strip_prefix("\n") {
ParsTok::BR
} else if ctx.strip_prefix("::") {
ParsTok::NS
} else if ctx.strip_prefix("--[") {
let (cmt, tail) = ctx.tail.split_once("]--").ok_or_else(|| {
mk_errv(intern!(str: "Unterminated block comment"), "This block comment has no ending ]--", [
Pos::Range(start..start + 3).into(),
])
})?;
ctx.set_tail(tail);
ParsTok::Comment(Arc::new(cmt.to_string()))
} else if let Some(tail) = ctx.tail.strip_prefix("--").filter(|t| !t.starts_with(op_char)) {
let end = tail.find(['\n', '\r']).map_or(tail.len(), |n| n - 1);
ctx.push_pos(end as u32);
ParsTok::Comment(Arc::new(tail[2..end].to_string()))
} else if ctx.strip_char('\\') {
let mut arg = Vec::new();
ctx.trim_ws();
while !ctx.strip_char('.') {
if ctx.tail.is_empty() {
return Err(mk_errv(
intern!(str: "Unclosed lambda"),
"Lambdae started with \\ should separate arguments from body with .",
[Pos::Range(start..start + 1).into()],
));
}
arg.push(lex_once(ctx)?);
ctx.trim_ws();
}
ParsTok::LambdaHead(arg)
} else if let Some((lp, rp, paren)) = PARENS.iter().find(|(lp, ..)| ctx.strip_char(*lp)) {
let mut body = Vec::new();
ctx.trim_ws();
while !ctx.strip_char(*rp) {
if ctx.tail.is_empty() {
return Err(mk_errv(
intern!(str: "unclosed paren"),
format!("this {lp} has no matching {rp}"),
[Pos::Range(start..start + 1).into()],
));
}
body.push(lex_once(ctx)?);
ctx.trim_ws();
}
ParsTok::S(*paren, body)
} else if ctx.strip_prefix("macro") &&
!ctx.tail.chars().next().is_some_and(|x| x.is_ascii_alphabetic())
{
ctx.strip_prefix("macro");
if ctx.strip_char('(') {
let pos = ctx.get_pos();
let numstr = ctx.get_start_matches(|x| x != ')').trim();
let num = parse_num(numstr).map_err(|e| num_to_err(e, pos))?;
ParsTok::Macro(Some(num.to_f64()))
} else {
ParsTok::Macro(None)
}
} else {
for sys in ctx.systems {
let mut errors = Vec::new();
if ctx.tail.starts_with(|c| sys.can_lex(c)) {
let lx =
sys.lex(ctx.source.clone(), ctx.get_pos(), |pos| match lex_once(&mut ctx.push(pos)) {
Ok(t) => Some(api::SubLexed { pos, ticket: ctx.add_subtree(t) }),
Err(e) => {
errors.push(e);
None
},
});
match lx {
Err(e) => return Err(errors.into_iter().fold(OrcErrv::from_api(&e), |a, b| a + b)),
Ok(Some(lexed)) => return Ok(tt_to_owned(&lexed.expr, &mut ctx.push(lexed.pos))),
Ok(None) => match errors.into_iter().reduce(|a, b| a + b) {
Some(errors) => return Err(errors),
None => continue,
},
}
}
}
if ctx.tail.starts_with(name_start) {
ParsTok::Name(intern(ctx.get_start_matches(name_char)))
} else if ctx.tail.starts_with(op_char) {
ParsTok::Name(intern(ctx.get_start_matches(op_char)))
} else {
return Err(mk_errv(
intern!(str: "Unrecognized character"),
"The following syntax is meaningless.",
[Pos::Range(start..start + 1).into()],
));
}
};
Ok(ParsTokTree { tok, range: start..ctx.get_pos() })
);
let tok = if ctx.strip_prefix("\r\n") || ctx.strip_prefix("\r") || ctx.strip_prefix("\n") {
ParsTok::BR
} else if ctx.strip_prefix("::") {
ParsTok::NS
} else if ctx.strip_prefix("--[") {
let (cmt, tail) = ctx.tail.split_once("]--").ok_or_else(|| {
mk_errv(intern!(str: "Unterminated block comment"), "This block comment has no ending ]--", [
Pos::Range(start..start + 3).into(),
])
})?;
ctx.set_tail(tail);
ParsTok::Comment(Arc::new(cmt.to_string()))
} else if let Some(tail) = ctx.tail.strip_prefix("--").filter(|t| !t.starts_with(op_char)) {
let end = tail.find(['\n', '\r']).map_or(tail.len(), |n| n - 1);
ctx.push_pos(end as u32);
ParsTok::Comment(Arc::new(tail[2..end].to_string()))
} else if ctx.strip_char('\\') {
let mut arg = Vec::new();
ctx.trim_ws();
while !ctx.strip_char('.') {
if ctx.tail.is_empty() {
return Err(mk_errv(
intern!(str: "Unclosed lambda"),
"Lambdae started with \\ should separate arguments from body with .",
[Pos::Range(start..start + 1).into()],
));
}
arg.push(lex_once(ctx)?);
ctx.trim_ws();
}
ParsTok::LambdaHead(arg)
} else if let Some((lp, rp, paren)) = PARENS.iter().find(|(lp, ..)| ctx.strip_char(*lp)) {
let mut body = Vec::new();
ctx.trim_ws();
while !ctx.strip_char(*rp) {
if ctx.tail.is_empty() {
return Err(mk_errv(
intern!(str: "unclosed paren"),
format!("this {lp} has no matching {rp}"),
[Pos::Range(start..start + 1).into()],
));
}
body.push(lex_once(ctx)?);
ctx.trim_ws();
}
ParsTok::S(*paren, body)
} else if ctx.strip_prefix("macro")
&& !ctx.tail.chars().next().is_some_and(|x| x.is_ascii_alphabetic())
{
ctx.strip_prefix("macro");
if ctx.strip_char('(') {
let pos = ctx.get_pos();
let numstr = ctx.get_start_matches(|x| x != ')').trim();
let num = parse_num(numstr).map_err(|e| num_to_err(e, pos))?;
ParsTok::Macro(Some(num.to_f64()))
} else {
ParsTok::Macro(None)
}
} else {
for sys in ctx.systems {
let mut errors = Vec::new();
if ctx.tail.starts_with(|c| sys.can_lex(c)) {
let lx =
sys.lex(ctx.source.clone(), ctx.get_pos(), |pos| match lex_once(&mut ctx.push(pos)) {
Ok(t) => Some(api::SubLexed { pos, ticket: ctx.add_subtree(t) }),
Err(e) => {
errors.push(e);
None
},
});
match lx {
Err(e) => return Err(errors.into_iter().fold(OrcErrv::from_api(&e), |a, b| a + b)),
Ok(Some(lexed)) => return Ok(tt_to_owned(&lexed.expr, &mut ctx.push(lexed.pos))),
Ok(None) => match errors.into_iter().reduce(|a, b| a + b) {
Some(errors) => return Err(errors),
None => continue,
},
}
}
}
if ctx.tail.starts_with(name_start) {
ParsTok::Name(intern(ctx.get_start_matches(name_char)))
} else if ctx.tail.starts_with(op_char) {
ParsTok::Name(intern(ctx.get_start_matches(op_char)))
} else {
return Err(mk_errv(
intern!(str: "Unrecognized character"),
"The following syntax is meaningless.",
[Pos::Range(start..start + 1).into()],
));
}
};
Ok(ParsTokTree { tok, range: start..ctx.get_pos() })
}
fn tt_to_owned(api: &api::TokenTree, ctx: &mut LexCtx<'_>) -> ParsTokTree {
let tok = match_mapping!(&api.token, api::Token => ParsTok {
Atom(atom => AtomHand::from_api(atom.clone())),
Bottom(err => OrcErrv::from_api(err)),
LambdaHead(arg => ttv_to_owned(arg, ctx)),
Name(name => Tok::from_api(*name)),
S(p.clone(), b.iter().map(|t| tt_to_owned(t, ctx)).collect()),
BR, NS,
Comment(c.clone()),
Ph(ph => Ph::from_api(ph)),
Macro(*prio),
} {
api::Token::Slot(id) => return ctx.rm_subtree(*id),
});
ParsTokTree { range: api.range.clone(), tok }
let tok = match_mapping!(&api.token, api::Token => ParsTok {
Atom(atom => AtomHand::from_api(atom.clone())),
Bottom(err => OrcErrv::from_api(err)),
LambdaHead(arg => ttv_to_owned(arg, ctx)),
Name(name => Tok::from_api(*name)),
S(p.clone(), b.iter().map(|t| tt_to_owned(t, ctx)).collect()),
BR, NS,
Comment(c.clone()),
Ph(ph => Ph::from_api(ph)),
Macro(*prio),
} {
api::Token::Slot(id) => return ctx.rm_subtree(*id),
});
ParsTokTree { range: api.range.clone(), tok }
}
fn ttv_to_owned<'a>(
api: impl IntoIterator<Item = &'a api::TokenTree>,
ctx: &mut LexCtx<'_>
api: impl IntoIterator<Item = &'a api::TokenTree>,
ctx: &mut LexCtx<'_>,
) -> Vec<ParsTokTree> {
api.into_iter().map(|t| tt_to_owned(t, ctx)).collect()
api.into_iter().map(|t| tt_to_owned(t, ctx)).collect()
}
pub fn lex(text: Tok<String>, systems: &[System]) -> OrcRes<Vec<ParsTokTree>> {
let mut sub_trees = HashMap::new();
let mut ctx = LexCtx { source: &text, sub_trees: &mut sub_trees, tail: &text[..], systems };
let mut tokv = Vec::new();
ctx.trim(unrep_space);
while !ctx.tail.is_empty() {
tokv.push(lex_once(&mut ctx)?);
ctx.trim(unrep_space);
}
Ok(tokv)
let mut sub_trees = HashMap::new();
let mut ctx = LexCtx { source: &text, sub_trees: &mut sub_trees, tail: &text[..], systems };
let mut tokv = Vec::new();
ctx.trim(unrep_space);
while !ctx.tail.is_empty() {
tokv.push(lex_once(&mut ctx)?);
ctx.trim(unrep_space);
}
Ok(tokv)
}

View File

@@ -4,8 +4,8 @@ pub mod child;
pub mod expr;
pub mod extension;
pub mod lex;
pub mod macros;
pub mod parse;
pub mod rule;
pub mod subprocess;
pub mod tree;
pub mod macros;
pub mod rule;

View File

@@ -17,158 +17,158 @@ pub type MacTok = MTok<'static, AtomHand>;
pub type MacTree = MTree<'static, AtomHand>;
trait_set! {
trait MacroCB = Fn(Vec<MacTree>) -> Option<Vec<MacTree>> + Send + Sync;
trait MacroCB = Fn(Vec<MacTree>) -> Option<Vec<MacTree>> + Send + Sync;
}
lazy_static! {
static ref RECURSION: RwLock<HashMap<api::ParsId, Box<dyn MacroCB>>> = RwLock::default();
static ref MACRO_SLOTS: RwLock<HashMap<api::ParsId, HashMap<api::MacroTreeId, Arc<MacTok>>>> =
RwLock::default();
static ref RECURSION: RwLock<HashMap<api::ParsId, Box<dyn MacroCB>>> = RwLock::default();
static ref MACRO_SLOTS: RwLock<HashMap<api::ParsId, HashMap<api::MacroTreeId, Arc<MacTok>>>> =
RwLock::default();
}
pub fn macro_recur(run_id: api::ParsId, input: Vec<MacTree>) -> Option<Vec<MacTree>> {
(RECURSION.read().unwrap()[&run_id])(input)
(RECURSION.read().unwrap()[&run_id])(input)
}
pub fn macro_treev_to_api(run_id: api::ParsId, mtree: Vec<MacTree>) -> Vec<api::MacroTree> {
let mut g = MACRO_SLOTS.write().unwrap();
let run_cache = g.get_mut(&run_id).expect("Parser run not found");
mtreev_to_api(&mtree, &mut |a: &AtomHand| {
let id = api::MacroTreeId((run_cache.len() as u64 + 1).try_into().unwrap());
run_cache.insert(id, Arc::new(MacTok::Atom(a.clone())));
api::MacroToken::Slot(id)
})
let mut g = MACRO_SLOTS.write().unwrap();
let run_cache = g.get_mut(&run_id).expect("Parser run not found");
mtreev_to_api(&mtree, &mut |a: &AtomHand| {
let id = api::MacroTreeId((run_cache.len() as u64 + 1).try_into().unwrap());
run_cache.insert(id, Arc::new(MacTok::Atom(a.clone())));
api::MacroToken::Slot(id)
})
}
pub fn macro_treev_from_api(api: Vec<api::MacroTree>) -> Vec<MacTree> {
mtreev_from_api(&api, &mut |atom| MacTok::Atom(AtomHand::from_api(atom.clone())))
mtreev_from_api(&api, &mut |atom| MacTok::Atom(AtomHand::from_api(atom.clone())))
}
pub fn deslot_macro(run_id: api::ParsId, tree: &[MacTree]) -> Option<Vec<MacTree>> {
let mut slots = (MACRO_SLOTS.write().unwrap()).remove(&run_id).expect("Run not found");
return work(&mut slots, tree);
fn work(
slots: &mut HashMap<api::MacroTreeId, Arc<MacTok>>,
tree: &[MacTree],
) -> Option<Vec<MacTree>> {
let items = (tree.iter())
.map(|t| {
Some(MacTree {
tok: match &*t.tok {
MacTok::Atom(_) | MacTok::Name(_) | MacTok::Ph(_) => return None,
MacTok::Ref(_) => panic!("Ref is an extension-local optimization"),
MacTok::Done(_) => panic!("Created and removed by matcher"),
MacTok::Slot(slot) => slots.get(&slot.id()).expect("Slot not found").clone(),
MacTok::S(paren, b) => Arc::new(MacTok::S(*paren, work(slots, b)?)),
MacTok::Lambda(a, b) => Arc::new(match (work(slots, a), work(slots, b)) {
(None, None) => return None,
(Some(a), None) => MacTok::Lambda(a, b.clone()),
(None, Some(b)) => MacTok::Lambda(a.clone(), b),
(Some(a), Some(b)) => MacTok::Lambda(a, b),
}),
},
pos: t.pos.clone(),
})
})
.collect_vec();
let any_changed = items.iter().any(Option::is_some);
any_changed.then(|| {
(items.into_iter().enumerate())
.map(|(i, opt)| opt.unwrap_or_else(|| tree[i].clone()))
.collect_vec()
})
}
let mut slots = (MACRO_SLOTS.write().unwrap()).remove(&run_id).expect("Run not found");
return work(&mut slots, tree);
fn work(
slots: &mut HashMap<api::MacroTreeId, Arc<MacTok>>,
tree: &[MacTree],
) -> Option<Vec<MacTree>> {
let items = (tree.iter())
.map(|t| {
Some(MacTree {
tok: match &*t.tok {
MacTok::Atom(_) | MacTok::Name(_) | MacTok::Ph(_) => return None,
MacTok::Ref(_) => panic!("Ref is an extension-local optimization"),
MacTok::Done(_) => panic!("Created and removed by matcher"),
MacTok::Slot(slot) => slots.get(&slot.id()).expect("Slot not found").clone(),
MacTok::S(paren, b) => Arc::new(MacTok::S(*paren, work(slots, b)?)),
MacTok::Lambda(a, b) => Arc::new(match (work(slots, a), work(slots, b)) {
(None, None) => return None,
(Some(a), None) => MacTok::Lambda(a, b.clone()),
(None, Some(b)) => MacTok::Lambda(a.clone(), b),
(Some(a), Some(b)) => MacTok::Lambda(a, b),
}),
},
pos: t.pos.clone(),
})
})
.collect_vec();
let any_changed = items.iter().any(Option::is_some);
any_changed.then(|| {
(items.into_iter().enumerate())
.map(|(i, opt)| opt.unwrap_or_else(|| tree[i].clone()))
.collect_vec()
})
}
}
pub struct Macro<Matcher> {
deps: HashSet<Sym>,
cases: Vec<(Matcher, Code)>,
deps: HashSet<Sym>,
cases: Vec<(Matcher, Code)>,
}
pub struct MacroRepo {
named: HashMap<Sym, Vec<Macro<NamedMatcher>>>,
prio: Vec<Macro<PriodMatcher>>,
named: HashMap<Sym, Vec<Macro<NamedMatcher>>>,
prio: Vec<Macro<PriodMatcher>>,
}
impl MacroRepo {
/// TODO: the recursion inside this function needs to be moved into Orchid.
/// See the markdown note
pub fn process_exprv(&self, target: &[MacTree]) -> Option<Vec<MacTree>> {
let mut workcp = target.to_vec();
let mut lexicon;
/// TODO: the recursion inside this function needs to be moved into Orchid.
/// See the markdown note
pub fn process_exprv(&self, target: &[MacTree]) -> Option<Vec<MacTree>> {
let mut workcp = target.to_vec();
let mut lexicon;
'try_named: loop {
lexicon = HashSet::new();
target.iter().for_each(|tgt| fill_lexicon(tgt, &mut lexicon));
'try_named: loop {
lexicon = HashSet::new();
target.iter().for_each(|tgt| fill_lexicon(tgt, &mut lexicon));
for (i, tree) in workcp.iter().enumerate() {
let MacTok::Name(name) = &*tree.tok else { continue };
let matches = (self.named.get(name).into_iter().flatten())
.filter(|m| m.deps.is_subset(&lexicon))
.filter_map(|mac| {
mac.cases.iter().find_map(|cas| cas.0.apply(&workcp[i..], |_| false).map(|s| (cas, s)))
})
.collect_vec();
assert!(
matches.len() < 2,
"Multiple conflicting matches on {:?}: {:?}",
&workcp[i..],
matches
);
let Some((case, (state, tail))) = matches.into_iter().next() else { continue };
let inj = (run_body(&case.1, state).into_iter())
.map(|MacTree { pos, tok }| MacTree { pos, tok: Arc::new(MacTok::Done(tok)) });
workcp.splice(i..(workcp.len() - tail.len()), inj);
continue 'try_named;
}
break;
}
for (i, tree) in workcp.iter().enumerate() {
let MacTok::Name(name) = &*tree.tok else { continue };
let matches = (self.named.get(name).into_iter().flatten())
.filter(|m| m.deps.is_subset(&lexicon))
.filter_map(|mac| {
mac.cases.iter().find_map(|cas| cas.0.apply(&workcp[i..], |_| false).map(|s| (cas, s)))
})
.collect_vec();
assert!(
matches.len() < 2,
"Multiple conflicting matches on {:?}: {:?}",
&workcp[i..],
matches
);
let Some((case, (state, tail))) = matches.into_iter().next() else { continue };
let inj = (run_body(&case.1, state).into_iter())
.map(|MacTree { pos, tok }| MacTree { pos, tok: Arc::new(MacTok::Done(tok)) });
workcp.splice(i..(workcp.len() - tail.len()), inj);
continue 'try_named;
}
break;
}
if let Some(((_, body), state)) = (self.prio.iter())
.filter(|mac| mac.deps.is_subset(&lexicon))
.flat_map(|mac| &mac.cases)
.find_map(|case| case.0.apply(&workcp, |_| false).map(|state| (case, state)))
{
return Some(run_body(body, state));
}
if let Some(((_, body), state)) = (self.prio.iter())
.filter(|mac| mac.deps.is_subset(&lexicon))
.flat_map(|mac| &mac.cases)
.find_map(|case| case.0.apply(&workcp, |_| false).map(|state| (case, state)))
{
return Some(run_body(body, state));
}
let results = (workcp.into_iter())
.map(|mt| match &*mt.tok {
MTok::S(p, body) => self.process_exprv(body).map(|body| MTok::S(*p, body).at(mt.pos)),
MTok::Lambda(arg, body) => match (self.process_exprv(arg), self.process_exprv(body)) {
(Some(arg), Some(body)) => Some(MTok::Lambda(arg, body).at(mt.pos)),
(Some(arg), None) => Some(MTok::Lambda(arg, body.to_vec()).at(mt.pos)),
(None, Some(body)) => Some(MTok::Lambda(arg.to_vec(), body).at(mt.pos)),
(None, None) => None,
},
_ => None,
})
.collect_vec();
results.iter().any(Option::is_some).then(|| {
(results.into_iter().zip(target))
.map(|(opt, fb)| opt.unwrap_or_else(|| fb.clone()))
.collect_vec()
})
}
let results = (workcp.into_iter())
.map(|mt| match &*mt.tok {
MTok::S(p, body) => self.process_exprv(body).map(|body| MTok::S(*p, body).at(mt.pos)),
MTok::Lambda(arg, body) => match (self.process_exprv(arg), self.process_exprv(body)) {
(Some(arg), Some(body)) => Some(MTok::Lambda(arg, body).at(mt.pos)),
(Some(arg), None) => Some(MTok::Lambda(arg, body.to_vec()).at(mt.pos)),
(None, Some(body)) => Some(MTok::Lambda(arg.to_vec(), body).at(mt.pos)),
(None, None) => None,
},
_ => None,
})
.collect_vec();
results.iter().any(Option::is_some).then(|| {
(results.into_iter().zip(target))
.map(|(opt, fb)| opt.unwrap_or_else(|| fb.clone()))
.collect_vec()
})
}
}
fn fill_lexicon(tgt: &MacTree, lexicon: &mut HashSet<Sym>) {
match &*tgt.tok {
MTok::Name(n) => {
lexicon.insert(n.clone());
},
MTok::Lambda(arg, body) => {
arg.iter().for_each(|t| fill_lexicon(t, lexicon));
body.iter().for_each(|t| fill_lexicon(t, lexicon))
},
MTok::S(_, body) => body.iter().for_each(|t| fill_lexicon(t, lexicon)),
_ => (),
}
match &*tgt.tok {
MTok::Name(n) => {
lexicon.insert(n.clone());
},
MTok::Lambda(arg, body) => {
arg.iter().for_each(|t| fill_lexicon(t, lexicon));
body.iter().for_each(|t| fill_lexicon(t, lexicon))
},
MTok::S(_, body) => body.iter().for_each(|t| fill_lexicon(t, lexicon)),
_ => (),
}
}
fn run_body(body: &Code, mut state: MatchState<'_>) -> Vec<MacTree> {
let inject: Vec<MacTree> = todo!("Call the interpreter with bindings");
inject
.into_iter()
.map(|MTree { pos, tok }| MTree { pos, tok: Arc::new(MTok::Done(tok)) })
.collect_vec()
let inject: Vec<MacTree> = todo!("Call the interpreter with bindings");
inject
.into_iter()
.map(|MTree { pos, tok }| MTree { pos, tok: Arc::new(MTok::Done(tok)) })
.collect_vec()
}

View File

@@ -3,279 +3,295 @@ use std::{iter, thread};
use itertools::Itertools;
use never::Never;
use orchid_base::error::{mk_err, mk_errv, OrcErrv, OrcRes, Reporter};
use orchid_base::error::{OrcErrv, OrcRes, Reporter, mk_err, mk_errv};
use orchid_base::intern;
use orchid_base::interner::Tok;
use orchid_base::location::Pos;
use orchid_base::macros::{MTok, MTree};
use orchid_base::name::Sym;
use orchid_base::parse::{
expect_end, line_items, parse_multiname, strip_fluff, try_pop_no_fluff, Comment, Import,
Parsed, Snippet,
Comment, Import, Parsed, Snippet, expect_end, line_items, parse_multiname, strip_fluff,
try_pop_no_fluff,
};
use orchid_base::tree::{Paren, TokTree, Token};
use substack::Substack;
use crate::extension::{AtomHand, System};
use crate::macros::MacTree;
use crate::tree::{Code, CodeLocator, Item, ItemKind, Member, MemberKind, Module, ParsTokTree, Rule, RuleKind};
use crate::tree::{
Code, CodeLocator, Item, ItemKind, Member, MemberKind, Module, ParsTokTree, Rule, RuleKind,
};
type ParsSnippet<'a> = Snippet<'a, 'static, AtomHand, Never>;
pub trait ParseCtx: Send + Sync {
fn systems(&self) -> impl Iterator<Item = &System>;
fn reporter(&self) -> &impl Reporter;
fn systems(&self) -> impl Iterator<Item = &System>;
fn reporter(&self) -> &impl Reporter;
}
pub fn parse_items(
ctx: &impl ParseCtx,
path: Substack<Tok<String>>,
items: ParsSnippet
ctx: &impl ParseCtx,
path: Substack<Tok<String>>,
items: ParsSnippet,
) -> OrcRes<Vec<Item>> {
let lines = line_items(items);
let mut ok = iter::from_fn(|| None).take(lines.len()).collect_vec();
thread::scope(|s| {
let mut threads = Vec::new();
for (slot, Parsed { output: cmts, tail }) in ok.iter_mut().zip(lines.into_iter()) {
let path = &path;
threads.push(s.spawn(move || {
*slot = Some(parse_item(ctx, path.clone(), cmts, tail)?);
Ok::<(), OrcErrv>(())
}))
}
for t in threads {
t.join().unwrap().err().into_iter().flatten().for_each(|e| ctx.reporter().report(e))
}
});
Ok(ok.into_iter().flatten().flatten().collect_vec())
let lines = line_items(items);
let mut ok = iter::from_fn(|| None).take(lines.len()).collect_vec();
thread::scope(|s| {
let mut threads = Vec::new();
for (slot, Parsed { output: cmts, tail }) in ok.iter_mut().zip(lines.into_iter()) {
let path = &path;
threads.push(s.spawn(move || {
*slot = Some(parse_item(ctx, path.clone(), cmts, tail)?);
Ok::<(), OrcErrv>(())
}))
}
for t in threads {
t.join().unwrap().err().into_iter().flatten().for_each(|e| ctx.reporter().report(e))
}
});
Ok(ok.into_iter().flatten().flatten().collect_vec())
}
pub fn parse_item(
ctx: &impl ParseCtx,
path: Substack<Tok<String>>,
comments: Vec<Comment>,
item: ParsSnippet,
ctx: &impl ParseCtx,
path: Substack<Tok<String>>,
comments: Vec<Comment>,
item: ParsSnippet,
) -> OrcRes<Vec<Item>> {
match item.pop_front() {
Some((TokTree { tok: Token::Name(n), .. }, postdisc)) => match n {
n if *n == intern!(str: "export") => match try_pop_no_fluff(postdisc)? {
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } =>
parse_exportable_item(ctx, path, comments, true, n.clone(), tail),
Parsed { output: TokTree { tok: Token::NS, .. }, tail } => {
let Parsed { output: exports, tail } = parse_multiname(ctx.reporter(), tail)?;
let mut ok = Vec::new();
exports.into_iter().for_each(|(e, pos)| match (&e.path.as_slice(), e.name) {
([], Some(n)) =>
ok.push(Item { comments: comments.clone(), pos, kind: ItemKind::Export(n) }),
(_, Some(_)) => ctx.reporter().report(mk_err(
intern!(str: "Compound export"),
"Cannot export compound names (names containing the :: separator)",
[pos.into()],
)),
(_, None) => ctx.reporter().report(mk_err(
intern!(str: "Wildcard export"),
"Exports cannot contain the globstar *",
[pos.into()],
)),
});
expect_end(tail)?;
Ok(ok)
},
Parsed { output, .. } => Err(mk_errv(
intern!(str: "Malformed export"),
"`export` can either prefix other lines or list names inside ::( ) or ::[ ]",
[Pos::Range(output.range.clone()).into()],
)),
},
n if *n == intern!(str: "import") => parse_import(ctx, postdisc).map(|v| {
Vec::from_iter(v.into_iter().map(|(t, pos)| Item {
comments: comments.clone(),
pos,
kind: ItemKind::Import(t),
}))
}),
n => parse_exportable_item(ctx, path, comments, false, n.clone(), postdisc),
},
Some(_) =>
Err(mk_errv(intern!(str: "Expected a line type"), "All lines must begin with a keyword", [
Pos::Range(item.pos()).into(),
])),
None => unreachable!("These lines are filtered and aggregated in earlier stages"),
}
match item.pop_front() {
Some((TokTree { tok: Token::Name(n), .. }, postdisc)) => match n {
n if *n == intern!(str: "export") => match try_pop_no_fluff(postdisc)? {
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } =>
parse_exportable_item(ctx, path, comments, true, n.clone(), tail),
Parsed { output: TokTree { tok: Token::NS, .. }, tail } => {
let Parsed { output: exports, tail } = parse_multiname(ctx.reporter(), tail)?;
let mut ok = Vec::new();
exports.into_iter().for_each(|(e, pos)| match (&e.path.as_slice(), e.name) {
([], Some(n)) =>
ok.push(Item { comments: comments.clone(), pos, kind: ItemKind::Export(n) }),
(_, Some(_)) => ctx.reporter().report(mk_err(
intern!(str: "Compound export"),
"Cannot export compound names (names containing the :: separator)",
[pos.into()],
)),
(_, None) => ctx.reporter().report(mk_err(
intern!(str: "Wildcard export"),
"Exports cannot contain the globstar *",
[pos.into()],
)),
});
expect_end(tail)?;
Ok(ok)
},
Parsed { output, .. } => Err(mk_errv(
intern!(str: "Malformed export"),
"`export` can either prefix other lines or list names inside ::( ) or ::[ ]",
[Pos::Range(output.range.clone()).into()],
)),
},
n if *n == intern!(str: "import") => parse_import(ctx, postdisc).map(|v| {
Vec::from_iter(v.into_iter().map(|(t, pos)| Item {
comments: comments.clone(),
pos,
kind: ItemKind::Import(t),
}))
}),
n => parse_exportable_item(ctx, path, comments, false, n.clone(), postdisc),
},
Some(_) =>
Err(mk_errv(intern!(str: "Expected a line type"), "All lines must begin with a keyword", [
Pos::Range(item.pos()).into(),
])),
None => unreachable!("These lines are filtered and aggregated in earlier stages"),
}
}
pub fn parse_import(ctx: &impl ParseCtx, tail: ParsSnippet) -> OrcRes<Vec<(Import, Pos)>> {
let Parsed { output: imports, tail } = parse_multiname(ctx.reporter(), tail)?;
expect_end(tail)?;
Ok(imports)
let Parsed { output: imports, tail } = parse_multiname(ctx.reporter(), tail)?;
expect_end(tail)?;
Ok(imports)
}
pub fn parse_exportable_item(
ctx: &impl ParseCtx,
path: Substack<Tok<String>>,
comments: Vec<Comment>,
exported: bool,
discr: Tok<String>,
tail: ParsSnippet,
ctx: &impl ParseCtx,
path: Substack<Tok<String>>,
comments: Vec<Comment>,
exported: bool,
discr: Tok<String>,
tail: ParsSnippet,
) -> OrcRes<Vec<Item>> {
let kind = if discr == intern!(str: "mod") {
let (name, body) = parse_module(ctx, path, tail)?;
ItemKind::Member(Member::new(name, MemberKind::Mod(body)))
} else if discr == intern!(str: "const") {
let (name, val) = parse_const(tail)?;
let locator = CodeLocator::to_const(path.push(name.clone()).unreverse());
ItemKind::Member(Member::new(name, MemberKind::Const(Code::from_code(locator, val))))
} else if let Some(sys) = ctx.systems().find(|s| s.can_parse(discr.clone())) {
let line = sys.parse(tail.to_vec(), exported, comments)?;
return parse_items(ctx, path, Snippet::new(tail.prev(), &line));
} else {
let ext_lines = ctx.systems().flat_map(System::line_types).join(", ");
return Err(mk_errv(
intern!(str: "Unrecognized line type"),
format!("Line types are: const, mod, macro, grammar, {ext_lines}"),
[Pos::Range(tail.prev().range.clone()).into()],
));
};
Ok(vec![Item { comments, pos: Pos::Range(tail.pos()), kind }])
let kind = if discr == intern!(str: "mod") {
let (name, body) = parse_module(ctx, path, tail)?;
ItemKind::Member(Member::new(name, MemberKind::Mod(body)))
} else if discr == intern!(str: "const") {
let (name, val) = parse_const(tail)?;
let locator = CodeLocator::to_const(path.push(name.clone()).unreverse());
ItemKind::Member(Member::new(name, MemberKind::Const(Code::from_code(locator, val))))
} else if let Some(sys) = ctx.systems().find(|s| s.can_parse(discr.clone())) {
let line = sys.parse(tail.to_vec(), exported, comments)?;
return parse_items(ctx, path, Snippet::new(tail.prev(), &line));
} else {
let ext_lines = ctx.systems().flat_map(System::line_types).join(", ");
return Err(mk_errv(
intern!(str: "Unrecognized line type"),
format!("Line types are: const, mod, macro, grammar, {ext_lines}"),
[Pos::Range(tail.prev().range.clone()).into()],
));
};
Ok(vec![Item { comments, pos: Pos::Range(tail.pos()), kind }])
}
pub fn parse_module(
ctx: &impl ParseCtx,
path: Substack<Tok<String>>,
tail: ParsSnippet
ctx: &impl ParseCtx,
path: Substack<Tok<String>>,
tail: ParsSnippet,
) -> OrcRes<(Tok<String>, Module)> {
let (name, tail) = match try_pop_no_fluff(tail)? {
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } => (n.clone(), tail),
Parsed { output, .. } =>
return Err(mk_errv(
intern!(str: "Missing module name"),
format!("A name was expected, {output} was found"),
[Pos::Range(output.range.clone()).into()],
)),
};
let Parsed { output, tail: surplus } = try_pop_no_fluff(tail)?;
expect_end(surplus)?;
let body = output.as_s(Paren::Round).ok_or_else(|| mk_errv(
intern!(str: "Expected module body"),
format!("A ( block ) was expected, {output} was found"),
[Pos::Range(output.range.clone()).into()],
))?;
let path = path.push(name.clone());
Ok((name, Module::new(parse_items(ctx, path, body)?)))
let (name, tail) = match try_pop_no_fluff(tail)? {
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } => (n.clone(), tail),
Parsed { output, .. } => {
return Err(mk_errv(
intern!(str: "Missing module name"),
format!("A name was expected, {output} was found"),
[Pos::Range(output.range.clone()).into()],
));
},
};
let Parsed { output, tail: surplus } = try_pop_no_fluff(tail)?;
expect_end(surplus)?;
let body = output.as_s(Paren::Round).ok_or_else(|| {
mk_errv(
intern!(str: "Expected module body"),
format!("A ( block ) was expected, {output} was found"),
[Pos::Range(output.range.clone()).into()],
)
})?;
let path = path.push(name.clone());
Ok((name, Module::new(parse_items(ctx, path, body)?)))
}
pub fn parse_const(tail: ParsSnippet) -> OrcRes<(Tok<String>, Vec<ParsTokTree>)> {
let Parsed { output, tail } = try_pop_no_fluff(tail)?;
let name = output.as_name().ok_or_else(|| mk_errv(
intern!(str: "Missing module name"),
format!("A name was expected, {output} was found"),
[Pos::Range(output.range.clone()).into()],
))?;
let Parsed { output, tail } = try_pop_no_fluff(tail)?;
if !output.is_kw(intern!(str: "=")) {
return Err(mk_errv(
intern!(str: "Missing walrus := separator"),
format!("Expected operator := , found {output}"),
[Pos::Range(output.range.clone()).into()],
))
}
try_pop_no_fluff(tail)?;
Ok((name, tail.iter().flat_map(strip_fluff).collect_vec()))
let Parsed { output, tail } = try_pop_no_fluff(tail)?;
let name = output.as_name().ok_or_else(|| {
mk_errv(
intern!(str: "Missing module name"),
format!("A name was expected, {output} was found"),
[Pos::Range(output.range.clone()).into()],
)
})?;
let Parsed { output, tail } = try_pop_no_fluff(tail)?;
if !output.is_kw(intern!(str: "=")) {
return Err(mk_errv(
intern!(str: "Missing walrus := separator"),
format!("Expected operator := , found {output}"),
[Pos::Range(output.range.clone()).into()],
));
}
try_pop_no_fluff(tail)?;
Ok((name, tail.iter().flat_map(strip_fluff).collect_vec()))
}
pub fn parse_mtree(mut snip: ParsSnippet<'_>) -> OrcRes<Vec<MacTree>> {
let mut mtreev = Vec::new();
while let Some((ttree, tail)) = snip.pop_front() {
let (range, tok, tail) = match &ttree.tok {
Token::S(p, b) => (
ttree.range.clone(),
MTok::S(*p, parse_mtree(Snippet::new(ttree, b))?),
tail,
),
Token::Name(tok) => {
let mut segments = vec![tok.clone()];
let mut end = ttree.range.end;
while let Some((TokTree { tok: Token::NS, .. }, tail)) = snip.pop_front() {
let Parsed { output, tail } = try_pop_no_fluff(tail)?;
segments.push(output.as_name().ok_or_else(|| mk_errv(
intern!(str: "Namespaced name interrupted"),
"In expression context, :: must always be followed by a name.\n\
::() is permitted only in import and export items",
[Pos::Range(output.range.clone()).into()]
))?);
snip = tail;
end = output.range.end;
}
(ttree.range.start..end, MTok::Name(Sym::new(segments).unwrap()), snip)
},
Token::NS => return Err(mk_errv(
intern!(str: "Unexpected :: in macro pattern"),
":: can only follow a name outside export statements",
[Pos::Range(ttree.range.clone()).into()]
)),
Token::Ph(ph) => (ttree.range.clone(), MTok::Ph(ph.clone()), tail),
Token::Atom(_) | Token::Macro(_) => return Err(mk_errv(
intern!(str: "Unsupported token in macro patterns"),
format!("Macro patterns can only contain names, braces, and lambda, not {ttree}."),
[Pos::Range(ttree.range.clone()).into()]
)),
Token::BR | Token::Comment(_) => continue,
Token::Bottom(e) => return Err(e.clone()),
Token::LambdaHead(arg) => (
ttree.range.start..snip.pos().end,
MTok::Lambda(parse_mtree(Snippet::new(ttree, arg))?, parse_mtree(tail)?),
Snippet::new(ttree, &[]),
),
Token::Slot(_) | Token::X(_) => panic!("Did not expect {} in parsed token tree", &ttree.tok),
};
mtreev.push(MTree { pos: Pos::Range(range.clone()), tok: Arc::new(tok) });
snip = tail;
}
Ok(mtreev)
let mut mtreev = Vec::new();
while let Some((ttree, tail)) = snip.pop_front() {
let (range, tok, tail) = match &ttree.tok {
Token::S(p, b) =>
(ttree.range.clone(), MTok::S(*p, parse_mtree(Snippet::new(ttree, b))?), tail),
Token::Name(tok) => {
let mut segments = vec![tok.clone()];
let mut end = ttree.range.end;
while let Some((TokTree { tok: Token::NS, .. }, tail)) = snip.pop_front() {
let Parsed { output, tail } = try_pop_no_fluff(tail)?;
segments.push(output.as_name().ok_or_else(|| {
mk_errv(
intern!(str: "Namespaced name interrupted"),
"In expression context, :: must always be followed by a name.\n\
::() is permitted only in import and export items",
[Pos::Range(output.range.clone()).into()],
)
})?);
snip = tail;
end = output.range.end;
}
(ttree.range.start..end, MTok::Name(Sym::new(segments).unwrap()), snip)
},
Token::NS => {
return Err(mk_errv(
intern!(str: "Unexpected :: in macro pattern"),
":: can only follow a name outside export statements",
[Pos::Range(ttree.range.clone()).into()],
));
},
Token::Ph(ph) => (ttree.range.clone(), MTok::Ph(ph.clone()), tail),
Token::Atom(_) | Token::Macro(_) => {
return Err(mk_errv(
intern!(str: "Unsupported token in macro patterns"),
format!("Macro patterns can only contain names, braces, and lambda, not {ttree}."),
[Pos::Range(ttree.range.clone()).into()],
));
},
Token::BR | Token::Comment(_) => continue,
Token::Bottom(e) => return Err(e.clone()),
Token::LambdaHead(arg) => (
ttree.range.start..snip.pos().end,
MTok::Lambda(parse_mtree(Snippet::new(ttree, arg))?, parse_mtree(tail)?),
Snippet::new(ttree, &[]),
),
Token::Slot(_) | Token::X(_) => panic!("Did not expect {} in parsed token tree", &ttree.tok),
};
mtreev.push(MTree { pos: Pos::Range(range.clone()), tok: Arc::new(tok) });
snip = tail;
}
Ok(mtreev)
}
pub fn parse_macro(tail: ParsSnippet, macro_i: u16, path: Substack<Tok<String>>) -> OrcRes<Vec<Rule>> {
let (surplus, prev, block) = match try_pop_no_fluff(tail)? {
Parsed { tail, output: o@TokTree { tok: Token::S(Paren::Round, b), .. } } => (tail, o, b),
Parsed { output, .. } => return Err(mk_errv(
intern!(str: "m"),
"Macro blocks must either start with a block or a ..$:number",
[Pos::Range(output.range.clone()).into()]
)),
};
expect_end(surplus)?;
let mut errors = Vec::new();
let mut rules = Vec::new();
for (i, item) in line_items(Snippet::new(prev, block)).into_iter().enumerate() {
let Parsed { tail, output } = try_pop_no_fluff(item.tail)?;
if !output.is_kw(intern!(str: "rule")) {
errors.extend(mk_errv(
intern!(str: "non-rule in macro"),
format!("Expected `rule`, got {output}"),
[Pos::Range(output.range.clone()).into()]
));
continue
};
let (pat, body) = match tail.split_once(|t| t.is_kw(intern!(str: "=>"))) {
Some((a, b)) => (a, b),
None => {
errors.extend(mk_errv(
intern!(str: "no => in macro rule"),
"The pattern and body of a rule must be separated by a =>",
[Pos::Range(tail.pos()).into()],
));
continue
}
};
rules.push(Rule {
comments: item.output,
pos: Pos::Range(tail.pos()),
pattern: parse_mtree(pat)?,
kind: RuleKind::Native(Code::from_code(
CodeLocator::to_rule(path.unreverse(), macro_i, i as u16),
body.to_vec(),
))
})
}
if let Ok(e) = OrcErrv::new(errors) { Err(e) } else { Ok(rules) }
pub fn parse_macro(
tail: ParsSnippet,
macro_i: u16,
path: Substack<Tok<String>>,
) -> OrcRes<Vec<Rule>> {
let (surplus, prev, block) = match try_pop_no_fluff(tail)? {
Parsed { tail, output: o @ TokTree { tok: Token::S(Paren::Round, b), .. } } => (tail, o, b),
Parsed { output, .. } => {
return Err(mk_errv(
intern!(str: "m"),
"Macro blocks must either start with a block or a ..$:number",
[Pos::Range(output.range.clone()).into()],
));
},
};
expect_end(surplus)?;
let mut errors = Vec::new();
let mut rules = Vec::new();
for (i, item) in line_items(Snippet::new(prev, block)).into_iter().enumerate() {
let Parsed { tail, output } = try_pop_no_fluff(item.tail)?;
if !output.is_kw(intern!(str: "rule")) {
errors.extend(mk_errv(
intern!(str: "non-rule in macro"),
format!("Expected `rule`, got {output}"),
[Pos::Range(output.range.clone()).into()],
));
continue;
};
let (pat, body) = match tail.split_once(|t| t.is_kw(intern!(str: "=>"))) {
Some((a, b)) => (a, b),
None => {
errors.extend(mk_errv(
intern!(str: "no => in macro rule"),
"The pattern and body of a rule must be separated by a =>",
[Pos::Range(tail.pos()).into()],
));
continue;
},
};
rules.push(Rule {
comments: item.output,
pos: Pos::Range(tail.pos()),
pattern: parse_mtree(pat)?,
kind: RuleKind::Native(Code::from_code(
CodeLocator::to_rule(path.unreverse(), macro_i, i as u16),
body.to_vec(),
)),
})
}
if let Ok(e) = OrcErrv::new(errors) { Err(e) } else { Ok(rules) }
}

View File

@@ -1,29 +1,30 @@
use orchid_base::name::Sym;
use super::scal_match::scalv_match;
use super::shared::AnyMatcher;
use super::vec_match::vec_match;
use orchid_base::name::Sym;
use crate::macros::MacTree;
use crate::rule::state::MatchState;
#[must_use]
pub fn any_match<'a>(
matcher: &AnyMatcher,
seq: &'a [MacTree],
save_loc: &impl Fn(Sym) -> bool,
matcher: &AnyMatcher,
seq: &'a [MacTree],
save_loc: &impl Fn(Sym) -> bool,
) -> Option<MatchState<'a>> {
match matcher {
AnyMatcher::Scalar(scalv) => scalv_match(scalv, seq, save_loc),
AnyMatcher::Vec { left, mid, right } => {
if seq.len() < left.len() + right.len() {
return None;
};
let left_split = left.len();
let right_split = seq.len() - right.len();
Some(
scalv_match(left, &seq[..left_split], save_loc)?
.combine(scalv_match(right, &seq[right_split..], save_loc)?)
.combine(vec_match(mid, &seq[left_split..right_split], save_loc)?),
)
},
}
match matcher {
AnyMatcher::Scalar(scalv) => scalv_match(scalv, seq, save_loc),
AnyMatcher::Vec { left, mid, right } => {
if seq.len() < left.len() + right.len() {
return None;
};
let left_split = left.len();
let right_split = seq.len() - right.len();
Some(
scalv_match(left, &seq[..left_split], save_loc)?
.combine(scalv_match(right, &seq[right_split..], save_loc)?)
.combine(vec_match(mid, &seq[left_split..right_split], save_loc)?),
)
},
}
}

View File

@@ -1,6 +1,6 @@
use itertools::Itertools;
use orchid_api::PhKind;
use orchid_base::interner::Tok;
use itertools::Itertools;
use orchid_base::side::Side;
use orchid_base::tree::Ph;
@@ -14,30 +14,30 @@ pub type MaxVecSplit<'a> = (&'a [MacTree], (Tok<String>, u8, bool), &'a [MacTree
/// slice of Expr's
#[must_use]
fn split_at_max_vec(pattern: &[MacTree]) -> Option<MaxVecSplit> {
let rngidx = pattern
.iter()
.position_max_by_key(|expr| vec_attrs(expr).map(|attrs| attrs.1 as i64).unwrap_or(-1))?;
let (left, not_left) = pattern.split_at(rngidx);
let (placeh, right) =
not_left.split_first().expect("The index of the greatest element must be less than the length");
vec_attrs(placeh).map(|attrs| (left, attrs, right))
let rngidx = pattern
.iter()
.position_max_by_key(|expr| vec_attrs(expr).map(|attrs| attrs.1 as i64).unwrap_or(-1))?;
let (left, not_left) = pattern.split_at(rngidx);
let (placeh, right) =
not_left.split_first().expect("The index of the greatest element must be less than the length");
vec_attrs(placeh).map(|attrs| (left, attrs, right))
}
#[must_use]
fn scal_cnt<'a>(iter: impl Iterator<Item = &'a MacTree>) -> usize {
iter.take_while(|expr| vec_attrs(expr).is_none()).count()
iter.take_while(|expr| vec_attrs(expr).is_none()).count()
}
#[must_use]
pub fn mk_any(pattern: &[MacTree]) -> AnyMatcher {
let left_split = scal_cnt(pattern.iter());
if pattern.len() <= left_split {
return AnyMatcher::Scalar(mk_scalv(pattern));
}
let (left, not_left) = pattern.split_at(left_split);
let right_split = not_left.len() - scal_cnt(pattern.iter().rev());
let (mid, right) = not_left.split_at(right_split);
AnyMatcher::Vec { left: mk_scalv(left), mid: mk_vec(mid), right: mk_scalv(right) }
let left_split = scal_cnt(pattern.iter());
if pattern.len() <= left_split {
return AnyMatcher::Scalar(mk_scalv(pattern));
}
let (left, not_left) = pattern.split_at(left_split);
let right_split = not_left.len() - scal_cnt(pattern.iter().rev());
let (mid, right) = not_left.split_at(right_split);
AnyMatcher::Vec { left: mk_scalv(left), mid: mk_vec(mid), right: mk_scalv(right) }
}
/// Pattern MUST NOT contain vectorial placeholders
@@ -47,105 +47,103 @@ fn mk_scalv(pattern: &[MacTree]) -> Vec<ScalMatcher> { pattern.iter().map(mk_sca
/// Pattern MUST start and end with a vectorial placeholder
#[must_use]
pub fn mk_vec(pattern: &[MacTree]) -> VecMatcher {
debug_assert!(!pattern.is_empty(), "pattern cannot be empty");
debug_assert!(pattern.first().map(vec_attrs).is_some(), "pattern must start with a vectorial");
debug_assert!(pattern.last().map(vec_attrs).is_some(), "pattern must end with a vectorial");
let (left, (key, _, nonzero), right) = split_at_max_vec(pattern)
.expect("pattern must have vectorial placeholders at least at either end");
let r_sep_size = scal_cnt(right.iter());
let (r_sep, r_side) = right.split_at(r_sep_size);
let l_sep_size = scal_cnt(left.iter().rev());
let (l_side, l_sep) = left.split_at(left.len() - l_sep_size);
let main = VecMatcher::Placeh { key: key.clone(), nonzero };
match (left, right) {
(&[], &[]) => VecMatcher::Placeh { key, nonzero },
(&[], _) => VecMatcher::Scan {
direction: Side::Left,
left: Box::new(main),
sep: mk_scalv(r_sep),
right: Box::new(mk_vec(r_side)),
},
(_, &[]) => VecMatcher::Scan {
direction: Side::Right,
left: Box::new(mk_vec(l_side)),
sep: mk_scalv(l_sep),
right: Box::new(main),
},
(..) => {
let mut key_order =
l_side.iter().chain(r_side.iter()).filter_map(vec_attrs).collect::<Vec<_>>();
key_order.sort_by_key(|(_, prio, _)| -(*prio as i64));
VecMatcher::Middle {
left: Box::new(mk_vec(l_side)),
left_sep: mk_scalv(l_sep),
mid: Box::new(main),
right_sep: mk_scalv(r_sep),
right: Box::new(mk_vec(r_side)),
key_order: key_order.into_iter().map(|(n, ..)| n).collect(),
}
},
}
debug_assert!(!pattern.is_empty(), "pattern cannot be empty");
debug_assert!(pattern.first().map(vec_attrs).is_some(), "pattern must start with a vectorial");
debug_assert!(pattern.last().map(vec_attrs).is_some(), "pattern must end with a vectorial");
let (left, (key, _, nonzero), right) = split_at_max_vec(pattern)
.expect("pattern must have vectorial placeholders at least at either end");
let r_sep_size = scal_cnt(right.iter());
let (r_sep, r_side) = right.split_at(r_sep_size);
let l_sep_size = scal_cnt(left.iter().rev());
let (l_side, l_sep) = left.split_at(left.len() - l_sep_size);
let main = VecMatcher::Placeh { key: key.clone(), nonzero };
match (left, right) {
(&[], &[]) => VecMatcher::Placeh { key, nonzero },
(&[], _) => VecMatcher::Scan {
direction: Side::Left,
left: Box::new(main),
sep: mk_scalv(r_sep),
right: Box::new(mk_vec(r_side)),
},
(_, &[]) => VecMatcher::Scan {
direction: Side::Right,
left: Box::new(mk_vec(l_side)),
sep: mk_scalv(l_sep),
right: Box::new(main),
},
(..) => {
let mut key_order =
l_side.iter().chain(r_side.iter()).filter_map(vec_attrs).collect::<Vec<_>>();
key_order.sort_by_key(|(_, prio, _)| -(*prio as i64));
VecMatcher::Middle {
left: Box::new(mk_vec(l_side)),
left_sep: mk_scalv(l_sep),
mid: Box::new(main),
right_sep: mk_scalv(r_sep),
right: Box::new(mk_vec(r_side)),
key_order: key_order.into_iter().map(|(n, ..)| n).collect(),
}
},
}
}
/// Pattern MUST NOT be a vectorial placeholder
#[must_use]
fn mk_scalar(pattern: &MacTree) -> ScalMatcher {
match &*pattern.tok {
MacTok::Atom(_) | MacTok::Done(_) => panic!("Atoms and Done aren't supported in matchers"),
MacTok::Name(n) => ScalMatcher::Name(n.clone()),
MacTok::Ph(Ph { name, kind }) => match kind {
PhKind::Vector { .. } => {
panic!("Scalar matcher cannot be built from vector pattern")
},
PhKind::Scalar =>
ScalMatcher::Placeh { key: name.clone() },
},
MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body))),
MacTok::Lambda(arg, body) => ScalMatcher::Lambda(Box::new(mk_any(arg)), Box::new(mk_any(body))),
MacTok::Ref(_) | MacTok::Slot(_) => panic!("Extension-only variants"),
}
match &*pattern.tok {
MacTok::Atom(_) | MacTok::Done(_) => panic!("Atoms and Done aren't supported in matchers"),
MacTok::Name(n) => ScalMatcher::Name(n.clone()),
MacTok::Ph(Ph { name, kind }) => match kind {
PhKind::Vector { .. } => {
panic!("Scalar matcher cannot be built from vector pattern")
},
PhKind::Scalar => ScalMatcher::Placeh { key: name.clone() },
},
MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body))),
MacTok::Lambda(arg, body) => ScalMatcher::Lambda(Box::new(mk_any(arg)), Box::new(mk_any(body))),
MacTok::Ref(_) | MacTok::Slot(_) => panic!("Extension-only variants"),
}
}
#[cfg(test)]
mod test {
use std::sync::Arc;
use std::sync::Arc;
use orchid_api::PhKind;
use orchid_base::{intern, location::SourceRange, sym, tree::Ph, tokens::Paren};
use orchid_api::PhKind;
use orchid_base::location::SourceRange;
use orchid_base::tokens::Paren;
use orchid_base::tree::Ph;
use orchid_base::{intern, sym};
use crate::macros::{MacTok, MacTree};
use super::mk_any;
use crate::macros::{MacTok, MacTree};
use super::mk_any;
#[test]
fn test_scan() {
let ex = |tok: MacTok| MacTree{ tok: Arc::new(tok), pos: SourceRange::mock().pos() };
let pattern = vec![
ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 0, at_least_one: false },
name: intern!(str: "::prefix"),
})),
ex(MacTok::Name(sym!(prelude::do))),
ex(MacTok::S(
Paren::Round,
vec![
ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 0, at_least_one: false },
name: intern!(str: "expr"),
})),
ex(MacTok::Name(sym!(prelude::;))),
ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 1, at_least_one: false },
name: intern!(str: "rest"),
})),
],
)),
ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 0, at_least_one: false },
name: intern!(str: "::suffix"),
})),
];
let matcher = mk_any(&pattern);
println!("{matcher}");
}
#[test]
fn test_scan() {
let ex = |tok: MacTok| MacTree { tok: Arc::new(tok), pos: SourceRange::mock().pos() };
let pattern = vec![
ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 0, at_least_one: false },
name: intern!(str: "::prefix"),
})),
ex(MacTok::Name(sym!(prelude::do))),
ex(MacTok::S(Paren::Round, vec![
ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 0, at_least_one: false },
name: intern!(str: "expr"),
})),
ex(MacTok::Name(sym!(prelude::;))),
ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 1, at_least_one: false },
name: intern!(str: "rest"),
})),
])),
ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 0, at_least_one: false },
name: intern!(str: "::suffix"),
})),
];
let matcher = mk_any(&pattern);
println!("{matcher}");
}
}

View File

@@ -21,65 +21,66 @@ pub fn last_is_vec(pattern: &[MacTree]) -> bool { vec_attrs(pattern.last().unwra
pub struct NamedMatcher(AnyMatcher);
impl NamedMatcher {
pub fn new(pattern: &[MacTree]) -> Self {
assert!(
matches!(pattern.first().map(|tree| &*tree.tok), Some(MacTok::Name(_))),
"Named matchers must begin with a name"
);
pub fn new(pattern: &[MacTree]) -> Self {
assert!(
matches!(pattern.first().map(|tree| &*tree.tok), Some(MacTok::Name(_))),
"Named matchers must begin with a name"
);
match last_is_vec(pattern) {
true => Self(mk_any(pattern)),
false => {
let kind: PhKind = PhKind::Vector { priority: 0, at_least_one: false };
let suffix = [MacTok::Ph(Ph { name: intern!(str: "::after"), kind }).at(Pos::None)];
Self(mk_any(&pattern.iter().chain(&suffix).cloned().collect_vec()))
},
}
}
/// Also returns the tail, if any, which should be matched further
/// Note that due to how priod works below, the main usable information from the tail is
/// its length
pub fn apply<'a>(
&self,
seq: &'a [MacTree],
save_loc: impl Fn(Sym) -> bool,
) -> Option<(MatchState<'a>, &'a [MacTree])> {
any_match(&self.0, seq, &save_loc).map(|mut state| match state.remove(intern!(str: "::after")) {
Some(StateEntry::Scalar(_)) => panic!("::after can never be a scalar entry!"),
Some(StateEntry::Vec(v)) => (state, v),
None => (state, &[][..]),
})
}
match last_is_vec(pattern) {
true => Self(mk_any(pattern)),
false => {
let kind: PhKind = PhKind::Vector { priority: 0, at_least_one: false };
let suffix = [MacTok::Ph(Ph { name: intern!(str: "::after"), kind }).at(Pos::None)];
Self(mk_any(&pattern.iter().chain(&suffix).cloned().collect_vec()))
},
}
}
/// Also returns the tail, if any, which should be matched further
/// Note that due to how priod works below, the main usable information from
/// the tail is its length
pub fn apply<'a>(
&self,
seq: &'a [MacTree],
save_loc: impl Fn(Sym) -> bool,
) -> Option<(MatchState<'a>, &'a [MacTree])> {
any_match(&self.0, seq, &save_loc).map(|mut state| {
match state.remove(intern!(str: "::after")) {
Some(StateEntry::Scalar(_)) => panic!("::after can never be a scalar entry!"),
Some(StateEntry::Vec(v)) => (state, v),
None => (state, &[][..]),
}
})
}
}
impl fmt::Display for NamedMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) }
}
impl fmt::Debug for NamedMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "NamedMatcher({self})") }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "NamedMatcher({self})") }
}
pub struct PriodMatcher(VecMatcher);
impl PriodMatcher {
pub fn new(pattern: &[MacTree]) -> Self {
assert!(
pattern.first().and_then(vec_attrs).is_some()
&& pattern.last().and_then(vec_attrs).is_some(),
"Prioritized matchers must start and end with a vectorial",
);
Self(mk_vec(pattern))
}
/// tokens before the offset always match the prefix
pub fn apply<'a>(
&self,
seq: &'a [MacTree],
save_loc: impl Fn(Sym) -> bool,
) -> Option<MatchState<'a>> {
vec_match(&self.0, seq, &save_loc)
}
pub fn new(pattern: &[MacTree]) -> Self {
assert!(
pattern.first().and_then(vec_attrs).is_some() && pattern.last().and_then(vec_attrs).is_some(),
"Prioritized matchers must start and end with a vectorial",
);
Self(mk_vec(pattern))
}
/// tokens before the offset always match the prefix
pub fn apply<'a>(
&self,
seq: &'a [MacTree],
save_loc: impl Fn(Sym) -> bool,
) -> Option<MatchState<'a>> {
vec_match(&self.0, seq, &save_loc)
}
}
impl fmt::Display for PriodMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) }
}
impl fmt::Debug for PriodMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "PriodMatcher({self})") }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "PriodMatcher({self})") }
}

View File

@@ -4,10 +4,9 @@
//!
//! convert pattern into hierarchy of plain, scan, middle
//! - plain: accept any sequence or any non-empty sequence
//! - scan: a single scalar pattern moves LTR or RTL, submatchers on either
//! side
//! - middle: two scalar patterns walk over all permutations of matches
//! while getting progressively closer to each other
//! - scan: a single scalar pattern moves LTR or RTL, submatchers on either side
//! - middle: two scalar patterns walk over all permutations of matches while
//! getting progressively closer to each other
//!
//! # Application
//!
@@ -16,10 +15,10 @@
mod any_match;
mod build;
pub mod matcher;
mod scal_match;
pub mod shared;
mod vec_match;
pub mod state;
mod vec_attrs;
pub mod matcher;
// pub mod matcher;
mod vec_match;
// pub mod matcher;

View File

@@ -7,38 +7,38 @@ use crate::rule::state::{MatchState, StateEntry};
#[must_use]
pub fn scal_match<'a>(
matcher: &ScalMatcher,
expr: &'a MacTree,
save_loc: &impl Fn(Sym) -> bool,
matcher: &ScalMatcher,
expr: &'a MacTree,
save_loc: &impl Fn(Sym) -> bool,
) -> Option<MatchState<'a>> {
match (matcher, &*expr.tok) {
(ScalMatcher::Name(n1), MacTok::Name(n2)) if n1 == n2 => Some(match save_loc(n1.clone()) {
true => MatchState::from_name(n1.clone(), expr.pos.clone()),
false => MatchState::default(),
}),
(ScalMatcher::Placeh { .. }, MacTok::Done(_)) => None,
(ScalMatcher::Placeh { key }, _) =>
Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))),
(ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 =>
any_match(b_mat, &body[..], save_loc),
(ScalMatcher::Lambda(arg_mat, b_mat), MacTok::Lambda(arg, body)) =>
Some(any_match(arg_mat, arg, save_loc)?.combine(any_match(b_mat, body, save_loc)?)),
_ => None,
}
match (matcher, &*expr.tok) {
(ScalMatcher::Name(n1), MacTok::Name(n2)) if n1 == n2 => Some(match save_loc(n1.clone()) {
true => MatchState::from_name(n1.clone(), expr.pos.clone()),
false => MatchState::default(),
}),
(ScalMatcher::Placeh { .. }, MacTok::Done(_)) => None,
(ScalMatcher::Placeh { key }, _) =>
Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))),
(ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 =>
any_match(b_mat, &body[..], save_loc),
(ScalMatcher::Lambda(arg_mat, b_mat), MacTok::Lambda(arg, body)) =>
Some(any_match(arg_mat, arg, save_loc)?.combine(any_match(b_mat, body, save_loc)?)),
_ => None,
}
}
#[must_use]
pub fn scalv_match<'a>(
matchers: &[ScalMatcher],
seq: &'a [MacTree],
save_loc: &impl Fn(Sym) -> bool,
matchers: &[ScalMatcher],
seq: &'a [MacTree],
save_loc: &impl Fn(Sym) -> bool,
) -> Option<MatchState<'a>> {
if seq.len() != matchers.len() {
return None;
}
let mut state = MatchState::default();
for (matcher, expr) in matchers.iter().zip(seq.iter()) {
state = state.combine(scal_match(matcher, expr, save_loc)?);
}
Some(state)
if seq.len() != matchers.len() {
return None;
}
let mut state = MatchState::default();
for (matcher, expr) in matchers.iter().zip(seq.iter()) {
state = state.combine(scal_match(matcher, expr, save_loc)?);
}
Some(state)
}

View File

@@ -9,93 +9,93 @@ use orchid_base::side::Side;
use orchid_base::tokens::{PARENS, Paren};
pub enum ScalMatcher {
Name(Sym),
S(Paren, Box<AnyMatcher>),
Lambda(Box<AnyMatcher>, Box<AnyMatcher>),
Placeh { key: Tok<String> },
Name(Sym),
S(Paren, Box<AnyMatcher>),
Lambda(Box<AnyMatcher>, Box<AnyMatcher>),
Placeh { key: Tok<String> },
}
pub enum VecMatcher {
Placeh {
key: Tok<String>,
nonzero: bool,
},
Scan {
left: Box<VecMatcher>,
sep: Vec<ScalMatcher>,
right: Box<VecMatcher>,
/// The separator traverses the sequence towards this side
direction: Side,
},
Middle {
/// Matches the left outer region
left: Box<VecMatcher>,
/// Matches the left separator
left_sep: Vec<ScalMatcher>,
/// Matches the middle - can only ever be a plain placeholder
mid: Box<VecMatcher>,
/// Matches the right separator
right_sep: Vec<ScalMatcher>,
/// Matches the right outer region
right: Box<VecMatcher>,
/// Order of significance for sorting equally good projects based on
/// the length of matches on either side.
///
/// Vectorial keys that appear on either side, in priority order
key_order: Vec<Tok<String>>,
},
Placeh {
key: Tok<String>,
nonzero: bool,
},
Scan {
left: Box<VecMatcher>,
sep: Vec<ScalMatcher>,
right: Box<VecMatcher>,
/// The separator traverses the sequence towards this side
direction: Side,
},
Middle {
/// Matches the left outer region
left: Box<VecMatcher>,
/// Matches the left separator
left_sep: Vec<ScalMatcher>,
/// Matches the middle - can only ever be a plain placeholder
mid: Box<VecMatcher>,
/// Matches the right separator
right_sep: Vec<ScalMatcher>,
/// Matches the right outer region
right: Box<VecMatcher>,
/// Order of significance for sorting equally good projects based on
/// the length of matches on either side.
///
/// Vectorial keys that appear on either side, in priority order
key_order: Vec<Tok<String>>,
},
}
pub enum AnyMatcher {
Scalar(Vec<ScalMatcher>),
Vec { left: Vec<ScalMatcher>, mid: VecMatcher, right: Vec<ScalMatcher> },
Scalar(Vec<ScalMatcher>),
Vec { left: Vec<ScalMatcher>, mid: VecMatcher, right: Vec<ScalMatcher> },
}
// ################ Display ################
impl fmt::Display for ScalMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Placeh { key } => write!(f, "${key}"),
Self::Name(n) => write!(f, "{n}"),
Self::S(t, body) => {
let (l, r, _) = PARENS.iter().find(|r| r.2 == *t).unwrap();
write!(f, "{l}{body}{r}")
},
Self::Lambda(arg, body) => write!(f, "\\{arg}.{body}"),
}
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Placeh { key } => write!(f, "${key}"),
Self::Name(n) => write!(f, "{n}"),
Self::S(t, body) => {
let (l, r, _) = PARENS.iter().find(|r| r.2 == *t).unwrap();
write!(f, "{l}{body}{r}")
},
Self::Lambda(arg, body) => write!(f, "\\{arg}.{body}"),
}
}
}
impl fmt::Display for VecMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Placeh { key, nonzero: true } => write!(f, "...${key}"),
Self::Placeh { key, nonzero: false } => write!(f, "..${key}"),
Self::Scan { left, sep, right, direction } => {
let arrow = if direction == &Side::Left { "<==" } else { "==>" };
write!(f, "Scan{{{left} {arrow} {} {arrow} {right}}}", sep.iter().join(" "))
},
Self::Middle { left, left_sep, mid, right_sep, right, .. } => {
let left_sep_s = left_sep.iter().join(" ");
let right_sep_s = right_sep.iter().join(" ");
write!(f, "Middle{{{left}|{left_sep_s}|{mid}|{right_sep_s}|{right}}}")
},
}
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Placeh { key, nonzero: true } => write!(f, "...${key}"),
Self::Placeh { key, nonzero: false } => write!(f, "..${key}"),
Self::Scan { left, sep, right, direction } => {
let arrow = if direction == &Side::Left { "<==" } else { "==>" };
write!(f, "Scan{{{left} {arrow} {} {arrow} {right}}}", sep.iter().join(" "))
},
Self::Middle { left, left_sep, mid, right_sep, right, .. } => {
let left_sep_s = left_sep.iter().join(" ");
let right_sep_s = right_sep.iter().join(" ");
write!(f, "Middle{{{left}|{left_sep_s}|{mid}|{right_sep_s}|{right}}}")
},
}
}
}
impl fmt::Display for AnyMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Scalar(s) => {
write!(f, "({})", s.iter().join(" "))
},
Self::Vec { left, mid, right } => {
let lefts = left.iter().join(" ");
let rights = right.iter().join(" ");
write!(f, "[{lefts}|{mid}|{rights}]")
},
}
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Scalar(s) => {
write!(f, "({})", s.iter().join(" "))
},
Self::Vec { left, mid, right } => {
let lefts = left.iter().join(" ");
let rights = right.iter().join(" ");
write!(f, "[{lefts}|{mid}|{rights}]")
},
}
}
}

View File

@@ -11,83 +11,81 @@ use orchid_base::name::Sym;
use crate::macros::MacTree;
enum StackAction {
Return(Box<dyn Any>),
Call {
target: Box<dyn FnOnce(Box<dyn Any>) -> StackAction>,
param: Box<dyn Any>,
tail: Box<dyn FnOnce(Box<dyn Any>) -> StackAction>
}
Return(Box<dyn Any>),
Call {
target: Box<dyn FnOnce(Box<dyn Any>) -> StackAction>,
param: Box<dyn Any>,
tail: Box<dyn FnOnce(Box<dyn Any>) -> StackAction>,
},
}
struct Trampoline {
stack: Vec<Box<dyn FnOnce(Box<dyn Any>) -> StackAction>>
stack: Vec<Box<dyn FnOnce(Box<dyn Any>) -> StackAction>>,
}
#[derive(Clone, Copy, Debug)]
pub enum StateEntry<'a> {
Vec(&'a [MacTree]),
Scalar(&'a MacTree),
Vec(&'a [MacTree]),
Scalar(&'a MacTree),
}
#[derive(Clone, Debug)]
pub struct MatchState<'a> {
placeholders: HashMap<Tok<String>, StateEntry<'a>>,
name_posv: HashMap<Sym, Vec<Pos>>,
placeholders: HashMap<Tok<String>, StateEntry<'a>>,
name_posv: HashMap<Sym, Vec<Pos>>,
}
impl<'a> MatchState<'a> {
pub fn from_ph(key: Tok<String>, entry: StateEntry<'a>) -> Self {
Self { placeholders: HashMap::from([(key, entry)]), name_posv: HashMap::new() }
}
pub fn combine(self, s: Self) -> Self {
Self {
placeholders: self.placeholders.into_iter().chain(s.placeholders).collect(),
name_posv: join_maps(self.name_posv, s.name_posv, |_, l, r| l.into_iter().chain(r).collect()),
}
}
pub fn ph_len(&self, key: &Tok<String>) -> Option<usize> {
match self.placeholders.get(key)? {
StateEntry::Vec(slc) => Some(slc.len()),
_ => None,
}
}
pub fn from_name(name: Sym, location: Pos) -> Self {
Self { name_posv: HashMap::from([(name, vec![location])]), placeholders: HashMap::new() }
}
pub fn remove(&mut self, name: Tok<String>) -> Option<StateEntry<'a>> {
self.placeholders.remove(&name)
}
pub fn mk_owned(self) -> OwnedState {
OwnedState {
placeholders: (self.placeholders.into_iter())
.map(|(k, v)| {
(
k.clone(),
match_mapping!(v, StateEntry => OwnedEntry {
Scalar(tree.clone()),
Vec(v.to_vec()),
}),
)
})
.collect(),
name_posv: self.name_posv,
}
}
pub fn from_ph(key: Tok<String>, entry: StateEntry<'a>) -> Self {
Self { placeholders: HashMap::from([(key, entry)]), name_posv: HashMap::new() }
}
pub fn combine(self, s: Self) -> Self {
Self {
placeholders: self.placeholders.into_iter().chain(s.placeholders).collect(),
name_posv: join_maps(self.name_posv, s.name_posv, |_, l, r| l.into_iter().chain(r).collect()),
}
}
pub fn ph_len(&self, key: &Tok<String>) -> Option<usize> {
match self.placeholders.get(key)? {
StateEntry::Vec(slc) => Some(slc.len()),
_ => None,
}
}
pub fn from_name(name: Sym, location: Pos) -> Self {
Self { name_posv: HashMap::from([(name, vec![location])]), placeholders: HashMap::new() }
}
pub fn remove(&mut self, name: Tok<String>) -> Option<StateEntry<'a>> {
self.placeholders.remove(&name)
}
pub fn mk_owned(self) -> OwnedState {
OwnedState {
placeholders: (self.placeholders.into_iter())
.map(|(k, v)| {
(
k.clone(),
match_mapping!(v, StateEntry => OwnedEntry {
Scalar(tree.clone()),
Vec(v.to_vec()),
}),
)
})
.collect(),
name_posv: self.name_posv,
}
}
}
impl Default for MatchState<'static> {
fn default() -> Self { Self { name_posv: HashMap::new(), placeholders: HashMap::new() } }
fn default() -> Self { Self { name_posv: HashMap::new(), placeholders: HashMap::new() } }
}
#[derive(Clone, Debug)]
pub enum OwnedEntry {
Vec(Vec<MacTree>),
Scalar(MacTree),
Vec(Vec<MacTree>),
Scalar(MacTree),
}
pub struct OwnedState {
placeholders: HashMap<Tok<String>, OwnedEntry>,
name_posv: HashMap<Sym, Vec<Pos>>,
placeholders: HashMap<Tok<String>, OwnedEntry>,
name_posv: HashMap<Sym, Vec<Pos>>,
}
impl OwnedState {
pub fn get(&self, key: &Tok<String>) -> Option<&OwnedEntry> { self.placeholders.get(key) }
pub fn positions(&self, name: &Sym) -> &[Pos] { self.name_posv.get(name).map_or(&[], |v| &v[..]) }
pub fn get(&self, key: &Tok<String>) -> Option<&OwnedEntry> { self.placeholders.get(key) }
pub fn positions(&self, name: &Sym) -> &[Pos] { self.name_posv.get(name).map_or(&[], |v| &v[..]) }
}

View File

@@ -8,9 +8,9 @@ use crate::macros::{MacTok, MacTree};
/// a vectorial placeholder
#[must_use]
pub fn vec_attrs(expr: &MacTree) -> Option<(Tok<String>, u8, bool)> {
match (*expr.tok).clone() {
MacTok::Ph(Ph { kind: PhKind::Vector { priority, at_least_one }, name }) =>
Some((name, priority, at_least_one)),
_ => None,
}
match (*expr.tok).clone() {
MacTok::Ph(Ph { kind: PhKind::Vector { priority, at_least_one }, name }) =>
Some((name, priority, at_least_one)),
_ => None,
}
}

View File

@@ -1,94 +1,95 @@
use std::cmp::Ordering;
use itertools::Itertools;
use orchid_base::name::Sym;
use super::scal_match::scalv_match;
use super::shared::VecMatcher;
use orchid_base::name::Sym;
use crate::{macros::MacTree, rule::state::{MatchState, StateEntry}};
use crate::macros::MacTree;
use crate::rule::state::{MatchState, StateEntry};
#[must_use]
pub fn vec_match<'a>(
matcher: &VecMatcher,
seq: &'a [MacTree],
save_loc: &impl Fn(Sym) -> bool,
matcher: &VecMatcher,
seq: &'a [MacTree],
save_loc: &impl Fn(Sym) -> bool,
) -> Option<MatchState<'a>> {
match matcher {
VecMatcher::Placeh { key, nonzero } => {
if *nonzero && seq.is_empty() {
return None;
}
Some(MatchState::from_ph(key.clone(), StateEntry::Vec(seq)))
},
VecMatcher::Scan { left, sep, right, direction } => {
if seq.len() < sep.len() {
return None;
}
for lpos in direction.walk(0..=seq.len() - sep.len()) {
let rpos = lpos + sep.len();
let state = vec_match(left, &seq[..lpos], save_loc)
.and_then(|s| Some(s.combine(scalv_match(sep, &seq[lpos..rpos], save_loc)?)))
.and_then(|s| Some(s.combine(vec_match(right, &seq[rpos..], save_loc)?)));
if let Some(s) = state {
return Some(s);
}
}
None
},
// XXX predict heap space usage and allocation count
VecMatcher::Middle { left, left_sep, mid, right_sep, right, key_order } => {
if seq.len() < left_sep.len() + right_sep.len() {
return None;
}
// Valid locations for the left separator
let lposv = seq[..seq.len() - right_sep.len()]
.windows(left_sep.len())
.enumerate()
.filter_map(|(i, window)| scalv_match(left_sep, window, save_loc).map(|s| (i, s)))
.collect::<Vec<_>>();
// Valid locations for the right separator
let rposv = seq[left_sep.len()..]
.windows(right_sep.len())
.enumerate()
.filter_map(|(i, window)| scalv_match(right_sep, window, save_loc).map(|s| (i, s)))
.collect::<Vec<_>>();
// Valid combinations of locations for the separators
let mut pos_pairs = lposv
.into_iter()
.cartesian_product(rposv)
.filter(|((lpos, _), (rpos, _))| lpos + left_sep.len() <= *rpos)
.map(|((lpos, lstate), (rpos, rstate))| (lpos, rpos, lstate.combine(rstate)))
.collect::<Vec<_>>();
// In descending order of size
pos_pairs.sort_by_key(|(l, r, _)| -((r - l) as i64));
let eql_clusters = pos_pairs.into_iter().chunk_by(|(al, ar, _)| ar - al);
for (_gap_size, cluster) in eql_clusters.into_iter() {
let best_candidate = cluster
.into_iter()
.filter_map(|(lpos, rpos, state)| {
Some(
state
.combine(vec_match(left, &seq[..lpos], save_loc)?)
.combine(vec_match(mid, &seq[lpos + left_sep.len()..rpos], save_loc)?)
.combine(vec_match(right, &seq[rpos + right_sep.len()..], save_loc)?),
)
})
.max_by(|a, b| {
for key in key_order {
let alen = a.ph_len(key).expect("key_order references scalar or missing");
let blen = b.ph_len(key).expect("key_order references scalar or missing");
match alen.cmp(&blen) {
Ordering::Equal => (),
any => return any,
}
}
Ordering::Equal
});
if let Some(state) = best_candidate {
return Some(state);
}
}
None
},
}
match matcher {
VecMatcher::Placeh { key, nonzero } => {
if *nonzero && seq.is_empty() {
return None;
}
Some(MatchState::from_ph(key.clone(), StateEntry::Vec(seq)))
},
VecMatcher::Scan { left, sep, right, direction } => {
if seq.len() < sep.len() {
return None;
}
for lpos in direction.walk(0..=seq.len() - sep.len()) {
let rpos = lpos + sep.len();
let state = vec_match(left, &seq[..lpos], save_loc)
.and_then(|s| Some(s.combine(scalv_match(sep, &seq[lpos..rpos], save_loc)?)))
.and_then(|s| Some(s.combine(vec_match(right, &seq[rpos..], save_loc)?)));
if let Some(s) = state {
return Some(s);
}
}
None
},
// XXX predict heap space usage and allocation count
VecMatcher::Middle { left, left_sep, mid, right_sep, right, key_order } => {
if seq.len() < left_sep.len() + right_sep.len() {
return None;
}
// Valid locations for the left separator
let lposv = seq[..seq.len() - right_sep.len()]
.windows(left_sep.len())
.enumerate()
.filter_map(|(i, window)| scalv_match(left_sep, window, save_loc).map(|s| (i, s)))
.collect::<Vec<_>>();
// Valid locations for the right separator
let rposv = seq[left_sep.len()..]
.windows(right_sep.len())
.enumerate()
.filter_map(|(i, window)| scalv_match(right_sep, window, save_loc).map(|s| (i, s)))
.collect::<Vec<_>>();
// Valid combinations of locations for the separators
let mut pos_pairs = lposv
.into_iter()
.cartesian_product(rposv)
.filter(|((lpos, _), (rpos, _))| lpos + left_sep.len() <= *rpos)
.map(|((lpos, lstate), (rpos, rstate))| (lpos, rpos, lstate.combine(rstate)))
.collect::<Vec<_>>();
// In descending order of size
pos_pairs.sort_by_key(|(l, r, _)| -((r - l) as i64));
let eql_clusters = pos_pairs.into_iter().chunk_by(|(al, ar, _)| ar - al);
for (_gap_size, cluster) in eql_clusters.into_iter() {
let best_candidate = cluster
.into_iter()
.filter_map(|(lpos, rpos, state)| {
Some(
state
.combine(vec_match(left, &seq[..lpos], save_loc)?)
.combine(vec_match(mid, &seq[lpos + left_sep.len()..rpos], save_loc)?)
.combine(vec_match(right, &seq[rpos + right_sep.len()..], save_loc)?),
)
})
.max_by(|a, b| {
for key in key_order {
let alen = a.ph_len(key).expect("key_order references scalar or missing");
let blen = b.ph_len(key).expect("key_order references scalar or missing");
match alen.cmp(&blen) {
Ordering::Equal => (),
any => return any,
}
}
Ordering::Equal
});
if let Some(state) = best_candidate {
return Some(state);
}
}
None
},
}
}

View File

@@ -1,7 +1,7 @@
use std::io::{self, BufRead as _, Write};
use std::path::PathBuf;
use std::sync::mpsc::{sync_channel, SyncSender};
use std::sync::Mutex;
use std::sync::mpsc::{SyncSender, sync_channel};
use std::{process, thread};
use orchid_api_traits::{Decode, Encode};
@@ -12,68 +12,61 @@ use crate::api;
use crate::extension::{ExtensionPort, OnMessage};
pub struct Subprocess {
child: Mutex<process::Child>,
stdin: Mutex<process::ChildStdin>,
set_onmessage: SyncSender<OnMessage>,
header: api::ExtensionHeader,
child: Mutex<process::Child>,
stdin: Mutex<process::ChildStdin>,
set_onmessage: SyncSender<OnMessage>,
header: api::ExtensionHeader,
}
impl Subprocess {
pub fn new(mut cmd: process::Command, logger: Logger) -> io::Result<Self> {
let prog_pbuf = PathBuf::from(cmd.get_program());
let prog = prog_pbuf.file_stem().unwrap_or(cmd.get_program()).to_string_lossy().to_string();
let mut child = cmd
.stdin(process::Stdio::piped())
.stdout(process::Stdio::piped())
.stderr(process::Stdio::piped())
.spawn()?;
let mut stdin = child.stdin.take().unwrap();
api::HostHeader { log_strategy: logger.strat() }.encode(&mut stdin);
stdin.flush()?;
let mut stdout = child.stdout.take().unwrap();
let header = api::ExtensionHeader::decode(&mut stdout);
let child_stderr = child.stderr.take().unwrap();
let (set_onmessage, recv_onmessage) = sync_channel(0);
thread::Builder::new().name(format!("stdout-fwd:{prog}")).spawn(move || {
let mut onmessage: Box<dyn FnMut(&[u8]) + Send> = recv_onmessage.recv().unwrap();
drop(recv_onmessage);
loop {
match recv_msg(&mut stdout) {
Ok(msg) => onmessage(&msg[..]),
Err(e) if e.kind() == io::ErrorKind::BrokenPipe => break,
Err(e) => panic!("Failed to read from stdout: {}, {e}", e.kind()),
}
}
})?;
thread::Builder::new().name(format!("stderr-fwd:{prog}")).spawn(move || {
let mut reader = io::BufReader::new(child_stderr);
loop {
let mut buf = String::new();
if 0 == reader.read_line(&mut buf).unwrap() {
break;
}
logger.log(buf);
}
})?;
Ok(Self {
child: Mutex::new(child),
stdin: Mutex::new(stdin),
set_onmessage,
header,
})
}
pub fn new(mut cmd: process::Command, logger: Logger) -> io::Result<Self> {
let prog_pbuf = PathBuf::from(cmd.get_program());
let prog = prog_pbuf.file_stem().unwrap_or(cmd.get_program()).to_string_lossy().to_string();
let mut child = cmd
.stdin(process::Stdio::piped())
.stdout(process::Stdio::piped())
.stderr(process::Stdio::piped())
.spawn()?;
let mut stdin = child.stdin.take().unwrap();
api::HostHeader { log_strategy: logger.strat() }.encode(&mut stdin);
stdin.flush()?;
let mut stdout = child.stdout.take().unwrap();
let header = api::ExtensionHeader::decode(&mut stdout);
let child_stderr = child.stderr.take().unwrap();
let (set_onmessage, recv_onmessage) = sync_channel(0);
thread::Builder::new().name(format!("stdout-fwd:{prog}")).spawn(move || {
let mut onmessage: Box<dyn FnMut(&[u8]) + Send> = recv_onmessage.recv().unwrap();
drop(recv_onmessage);
loop {
match recv_msg(&mut stdout) {
Ok(msg) => onmessage(&msg[..]),
Err(e) if e.kind() == io::ErrorKind::BrokenPipe => break,
Err(e) => panic!("Failed to read from stdout: {}, {e}", e.kind()),
}
}
})?;
thread::Builder::new().name(format!("stderr-fwd:{prog}")).spawn(move || {
let mut reader = io::BufReader::new(child_stderr);
loop {
let mut buf = String::new();
if 0 == reader.read_line(&mut buf).unwrap() {
break;
}
logger.log(buf);
}
})?;
Ok(Self { child: Mutex::new(child), stdin: Mutex::new(stdin), set_onmessage, header })
}
}
impl Drop for Subprocess {
fn drop(&mut self) { self.child.lock().unwrap().wait().expect("Extension exited with error"); }
fn drop(&mut self) { self.child.lock().unwrap().wait().expect("Extension exited with error"); }
}
impl ExtensionPort for Subprocess {
fn set_onmessage(&self, callback: OnMessage) {
self.set_onmessage.send(callback).unwrap();
}
fn header(&self) -> &orchid_api::ExtensionHeader { &self.header }
fn send(&self, msg: &[u8]) {
if msg.starts_with(&[0, 0, 0, 0x1c]) {
panic!("Received unnecessary prefix");
}
send_msg(&mut *self.stdin.lock().unwrap(), msg).unwrap()
}
fn set_onmessage(&self, callback: OnMessage) { self.set_onmessage.send(callback).unwrap(); }
fn header(&self) -> &orchid_api::ExtensionHeader { &self.header }
fn send(&self, msg: &[u8]) {
if msg.starts_with(&[0, 0, 0, 0x1c]) {
panic!("Received unnecessary prefix");
}
send_msg(&mut *self.stdin.lock().unwrap(), msg).unwrap()
}
}

View File

@@ -4,14 +4,14 @@ use std::sync::{Mutex, OnceLock};
use itertools::Itertools;
use never::Never;
use orchid_base::error::OrcRes;
use orchid_base::interner::{intern, Tok};
use orchid_base::interner::{Tok, intern};
use orchid_base::location::Pos;
use orchid_base::macros::mtreev_from_api;
use orchid_base::name::Sym;
use orchid_base::parse::{Comment, Import};
use orchid_base::tree::{TokTree, Token};
use ordered_float::NotNan;
use substack::{with_iter_stack, Substack};
use substack::{Substack, with_iter_stack};
use crate::api;
use crate::expr::Expr;
@@ -23,172 +23,168 @@ pub type ParsTok = Token<'static, AtomHand, Never>;
#[derive(Debug)]
pub struct Item {
pub pos: Pos,
pub comments: Vec<Comment>,
pub kind: ItemKind,
pub pos: Pos,
pub comments: Vec<Comment>,
pub kind: ItemKind,
}
#[derive(Debug)]
pub enum ItemKind {
Member(Member),
Export(Tok<String>),
Import(Import),
Macro(Option<NotNan<f64>>, Vec<Rule>)
Member(Member),
Export(Tok<String>),
Import(Import),
Macro(Option<NotNan<f64>>, Vec<Rule>),
}
impl Item {
pub fn from_api(
tree: api::Item,
path: Substack<Tok<String>>,
sys: &System
) -> Self {
let kind = match tree.kind {
api::ItemKind::Member(m) => ItemKind::Member(Member::from_api(m, path, sys)),
api::ItemKind::Import(i) =>
ItemKind::Import(Import{ path: Sym::from_api(i).iter().collect(), name: None }),
api::ItemKind::Export(e) => ItemKind::Export(Tok::from_api(e)),
api::ItemKind::Macro(api::MacroBlock { priority, rules }) => ItemKind::Macro(priority, {
Vec::from_iter(rules.into_iter().map(|api| Rule {
pos: Pos::from_api(&api.location),
pattern: mtreev_from_api(&api.pattern, &mut |a| MacTok::Atom(AtomHand::from_api(a.clone()))),
kind: RuleKind::Remote(sys.clone(), api.id),
comments: api.comments.iter().map(Comment::from_api).collect_vec()
}))
})
};
let comments = tree.comments.iter().map(Comment::from_api).collect_vec();
Self { pos: Pos::from_api(&tree.location), comments, kind }
}
pub fn from_api(tree: api::Item, path: Substack<Tok<String>>, sys: &System) -> Self {
let kind = match tree.kind {
api::ItemKind::Member(m) => ItemKind::Member(Member::from_api(m, path, sys)),
api::ItemKind::Import(i) =>
ItemKind::Import(Import { path: Sym::from_api(i).iter().collect(), name: None }),
api::ItemKind::Export(e) => ItemKind::Export(Tok::from_api(e)),
api::ItemKind::Macro(api::MacroBlock { priority, rules }) => ItemKind::Macro(priority, {
Vec::from_iter(rules.into_iter().map(|api| Rule {
pos: Pos::from_api(&api.location),
pattern: mtreev_from_api(&api.pattern, &mut |a| {
MacTok::Atom(AtomHand::from_api(a.clone()))
}),
kind: RuleKind::Remote(sys.clone(), api.id),
comments: api.comments.iter().map(Comment::from_api).collect_vec(),
}))
}),
};
let comments = tree.comments.iter().map(Comment::from_api).collect_vec();
Self { pos: Pos::from_api(&tree.location), comments, kind }
}
}
#[derive(Debug)]
pub struct Member {
pub name: Tok<String>,
pub kind: OnceLock<MemberKind>,
pub lazy: Mutex<Option<LazyMemberHandle>>,
pub name: Tok<String>,
pub kind: OnceLock<MemberKind>,
pub lazy: Mutex<Option<LazyMemberHandle>>,
}
impl Member {
pub fn from_api(
api: api::Member,
path: Substack<Tok<String>>,
sys: &System,
) -> Self {
let name = Tok::from_api(api.name);
let full_path = path.push(name.clone());
let kind = match api.kind {
api::MemberKind::Lazy(id) =>
return LazyMemberHandle(id, sys.clone(), intern(&full_path.unreverse())).into_member(name),
api::MemberKind::Const(c) => MemberKind::Const(Code::from_expr(
CodeLocator::to_const(full_path.unreverse()),
Expr::from_api(&c, &mut ())
)),
api::MemberKind::Module(m) => MemberKind::Mod(Module::from_api(m, full_path, sys)),
};
Member { name, kind: OnceLock::from(kind), lazy: Mutex::default() }
}
pub fn new(name: Tok<String>, kind: MemberKind) -> Self {
Member { name, kind: OnceLock::from(kind), lazy: Mutex::default() }
}
pub fn from_api(api: api::Member, path: Substack<Tok<String>>, sys: &System) -> Self {
let name = Tok::from_api(api.name);
let full_path = path.push(name.clone());
let kind = match api.kind {
api::MemberKind::Lazy(id) => {
return LazyMemberHandle(id, sys.clone(), intern(&full_path.unreverse())).into_member(name);
},
api::MemberKind::Const(c) => MemberKind::Const(Code::from_expr(
CodeLocator::to_const(full_path.unreverse()),
Expr::from_api(&c, &mut ()),
)),
api::MemberKind::Module(m) => MemberKind::Mod(Module::from_api(m, full_path, sys)),
};
Member { name, kind: OnceLock::from(kind), lazy: Mutex::default() }
}
pub fn new(name: Tok<String>, kind: MemberKind) -> Self {
Member { name, kind: OnceLock::from(kind), lazy: Mutex::default() }
}
}
#[derive(Debug)]
pub enum MemberKind {
Const(Code),
Mod(Module),
Const(Code),
Mod(Module),
}
#[derive(Debug)]
pub struct Module {
pub imports: Vec<Sym>,
pub exports: Vec<Tok<String>>,
pub items: Vec<Item>,
pub imports: Vec<Sym>,
pub exports: Vec<Tok<String>>,
pub items: Vec<Item>,
}
impl Module {
pub fn new(items: impl IntoIterator<Item = Item>) -> Self {
let items = items.into_iter().collect_vec();
let exports = (items.iter())
.filter_map(|i| match &i.kind {
ItemKind::Export(e) => Some(e.clone()),
_ => None,
})
.collect_vec();
Self { imports: vec![], exports, items }
}
pub fn from_api(m: api::Module, path: Substack<Tok<String>>, sys: &System) -> Self {
let mut output = Vec::new();
for item in m.items.into_iter() {
let next = Item::from_api(item, path.clone(), sys);
output.push(next);
}
Self::new(output)
}
pub fn new(items: impl IntoIterator<Item = Item>) -> Self {
let items = items.into_iter().collect_vec();
let exports = (items.iter())
.filter_map(|i| match &i.kind {
ItemKind::Export(e) => Some(e.clone()),
_ => None,
})
.collect_vec();
Self { imports: vec![], exports, items }
}
pub fn from_api(m: api::Module, path: Substack<Tok<String>>, sys: &System) -> Self {
let mut output = Vec::new();
for item in m.items.into_iter() {
let next = Item::from_api(item, path.clone(), sys);
output.push(next);
}
Self::new(output)
}
}
#[derive(Debug)]
pub struct LazyMemberHandle(api::TreeId, System, Tok<Vec<Tok<String>>>);
impl LazyMemberHandle {
pub fn run(self) -> OrcRes<MemberKind> {
match self.1.get_tree(self.0) {
api::MemberKind::Const(c) => Ok(MemberKind::Const(Code {
bytecode: Expr::from_api(&c, &mut ()).into(),
locator: CodeLocator { steps: self.2, rule_loc: None },
source: None,
})),
api::MemberKind::Module(m) => with_iter_stack(self.2.iter().cloned(), |path| {
Ok(MemberKind::Mod(Module::from_api(m, path, &self.1)))
}),
api::MemberKind::Lazy(id) => Self(id, self.1, self.2).run(),
}
}
pub fn into_member(self, name: Tok<String>) -> Member {
Member { name, kind: OnceLock::new(), lazy: Mutex::new(Some(self)) }
}
pub fn run(self) -> OrcRes<MemberKind> {
match self.1.get_tree(self.0) {
api::MemberKind::Const(c) => Ok(MemberKind::Const(Code {
bytecode: Expr::from_api(&c, &mut ()).into(),
locator: CodeLocator { steps: self.2, rule_loc: None },
source: None,
})),
api::MemberKind::Module(m) => with_iter_stack(self.2.iter().cloned(), |path| {
Ok(MemberKind::Mod(Module::from_api(m, path, &self.1)))
}),
api::MemberKind::Lazy(id) => Self(id, self.1, self.2).run(),
}
}
pub fn into_member(self, name: Tok<String>) -> Member {
Member { name, kind: OnceLock::new(), lazy: Mutex::new(Some(self)) }
}
}
#[derive(Debug)]
pub struct Rule {
pub pos: Pos,
pub comments: Vec<Comment>,
pub pattern: Vec<MacTree>,
pub kind: RuleKind,
pub pos: Pos,
pub comments: Vec<Comment>,
pub pattern: Vec<MacTree>,
pub kind: RuleKind,
}
#[derive(Debug)]
pub enum RuleKind {
Remote(System, api::MacroId),
Native(Code),
Remote(System, api::MacroId),
Native(Code),
}
#[derive(Debug)]
pub struct Code {
locator: CodeLocator,
source: Option<Vec<ParsTokTree>>,
bytecode: OnceLock<Expr>,
locator: CodeLocator,
source: Option<Vec<ParsTokTree>>,
bytecode: OnceLock<Expr>,
}
impl Code {
pub fn from_expr(locator: CodeLocator, expr: Expr) -> Self {
Self { locator, source: None, bytecode: expr.into() }
}
pub fn from_code(locator: CodeLocator, code: Vec<ParsTokTree>) -> Self {
Self { locator, source: Some(code), bytecode: OnceLock::new() }
}
pub fn from_expr(locator: CodeLocator, expr: Expr) -> Self {
Self { locator, source: None, bytecode: expr.into() }
}
pub fn from_code(locator: CodeLocator, code: Vec<ParsTokTree>) -> Self {
Self { locator, source: Some(code), bytecode: OnceLock::new() }
}
}
/// Selects a code element
///
/// Either the steps point to a constant and rule_loc is None, or the steps point to a module and
/// rule_loc selects a macro rule within that module
///
/// Either the steps point to a constant and rule_loc is None, or the steps
/// point to a module and rule_loc selects a macro rule within that module
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct CodeLocator {
steps: Tok<Vec<Tok<String>>>,
/// Index of a macro block in the module demarked by the steps, and a rule in that macro
rule_loc: Option<(u16, u16)>,
steps: Tok<Vec<Tok<String>>>,
/// Index of a macro block in the module demarked by the steps, and a rule in
/// that macro
rule_loc: Option<(u16, u16)>,
}
impl CodeLocator {
pub fn to_const(path: impl IntoIterator<Item = Tok<String>>) -> Self {
Self { steps: intern(&path.into_iter().collect_vec()), rule_loc: None }
}
pub fn to_rule(path: impl IntoIterator<Item = Tok<String>>, macro_i: u16, rule_i: u16) -> Self {
Self { steps: intern(&path.into_iter().collect_vec()), rule_loc: Some((macro_i, rule_i)) }
}
}
pub fn to_const(path: impl IntoIterator<Item = Tok<String>>) -> Self {
Self { steps: intern(&path.into_iter().collect_vec()), rule_loc: None }
}
pub fn to_rule(path: impl IntoIterator<Item = Tok<String>>, macro_i: u16, rule_i: u16) -> Self {
Self { steps: intern(&path.into_iter().collect_vec()), rule_loc: Some((macro_i, rule_i)) }
}
}

View File

@@ -4,12 +4,12 @@ version = "0.1.0"
edition = "2021"
[dependencies]
itertools = "0.13.0"
itertools = "0.14.0"
never = "0.1.0"
once_cell = "1.19.0"
once_cell = "1.20.2"
orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
orchid-base = { version = "0.1.0", path = "../orchid-base" }
orchid-extension = { version = "0.1.0", path = "../orchid-extension" }
ordered-float = "4.2.1"
ordered-float = "4.6.0"

View File

@@ -1,6 +1,6 @@
use orchid_api_derive::Coding;
use orchid_base::error::OrcRes;
use orchid_extension::atom::{AtomFactory, MethodSet, Atomic, AtomicFeatures, ToAtom, TypAtom};
use orchid_extension::atom::{AtomFactory, Atomic, AtomicFeatures, MethodSet, ToAtom, TypAtom};
use orchid_extension::atom_thin::{ThinAtom, ThinVariant};
use orchid_extension::conv::TryFromExpr;
use orchid_extension::expr::Expr;
@@ -9,49 +9,47 @@ use ordered_float::NotNan;
#[derive(Clone, Debug, Coding)]
pub struct Int(pub i64);
impl Atomic for Int {
type Variant = ThinVariant;
type Data = Self;
fn reg_reqs() -> MethodSet<Self> {
MethodSet::new()
}
type Variant = ThinVariant;
type Data = Self;
fn reg_reqs() -> MethodSet<Self> { MethodSet::new() }
}
impl ThinAtom for Int {}
impl TryFromExpr for Int {
fn try_from_expr(expr: Expr) -> OrcRes<Self> {
TypAtom::<Int>::try_from_expr(expr).map(|t| t.value)
}
fn try_from_expr(expr: Expr) -> OrcRes<Self> {
TypAtom::<Int>::try_from_expr(expr).map(|t| t.value)
}
}
#[derive(Clone, Debug, Coding)]
pub struct Float(pub NotNan<f64>);
impl Atomic for Float {
type Variant = ThinVariant;
type Data = Self;
fn reg_reqs() -> MethodSet<Self> { MethodSet::new() }
type Variant = ThinVariant;
type Data = Self;
fn reg_reqs() -> MethodSet<Self> { MethodSet::new() }
}
impl ThinAtom for Float {}
impl TryFromExpr for Float {
fn try_from_expr(expr: Expr) -> OrcRes<Self> {
TypAtom::<Float>::try_from_expr(expr).map(|t| t.value)
}
fn try_from_expr(expr: Expr) -> OrcRes<Self> {
TypAtom::<Float>::try_from_expr(expr).map(|t| t.value)
}
}
pub enum Numeric {
Int(i64),
Float(NotNan<f64>),
Int(i64),
Float(NotNan<f64>),
}
impl TryFromExpr for Numeric {
fn try_from_expr(expr: Expr) -> OrcRes<Self> {
Int::try_from_expr(expr.clone())
.map(|t| Numeric::Int(t.0))
.or_else(|e| Float::try_from_expr(expr).map(|t| Numeric::Float(t.0)).map_err(|e2| e + e2))
}
fn try_from_expr(expr: Expr) -> OrcRes<Self> {
Int::try_from_expr(expr.clone())
.map(|t| Numeric::Int(t.0))
.or_else(|e| Float::try_from_expr(expr).map(|t| Numeric::Float(t.0)).map_err(|e2| e + e2))
}
}
impl ToAtom for Numeric {
fn to_atom_factory(self) -> AtomFactory {
match self {
Self::Float(f) => Float(f).factory(),
Self::Int(i) => Int(i).factory(),
}
}
fn to_atom_factory(self) -> AtomFactory {
match self {
Self::Float(f) => Float(f).factory(),
Self::Int(i) => Int(i).factory(),
}
}
}

View File

@@ -1,7 +1,7 @@
use std::ops::RangeInclusive;
use orchid_base::error::OrcRes;
use orchid_base::number::{num_to_err, parse_num, Numeric};
use orchid_base::number::{Numeric, num_to_err, parse_num};
use orchid_extension::atom::AtomicFeatures;
use orchid_extension::lexer::{LexContext, Lexer};
use orchid_extension::tree::{GenTok, GenTokTree};
@@ -12,16 +12,16 @@ use super::num_atom::{Float, Int};
#[derive(Default)]
pub struct NumLexer;
impl Lexer for NumLexer {
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['0'..='9'];
fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> {
let ends_at = all.find(|c: char| !c.is_ascii_hexdigit() && !"xX._pP".contains(c));
let (chars, tail) = all.split_at(ends_at.unwrap_or(all.len()));
let fac = match parse_num(chars) {
Ok(Numeric::Float(f)) => Float(f).factory(),
Ok(Numeric::Uint(uint)) => Int(uint.try_into().unwrap()).factory(),
Ok(Numeric::Decimal(dec)) => Float(NotNan::new(dec.try_into().unwrap()).unwrap()).factory(),
Err(e) => return Err(num_to_err(e, ctx.pos(all)).into()),
};
Ok((tail, GenTok::X(fac).at(ctx.pos(all)..ctx.pos(tail))))
}
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['0'..='9'];
fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> {
let ends_at = all.find(|c: char| !c.is_ascii_hexdigit() && !"xX._pP".contains(c));
let (chars, tail) = all.split_at(ends_at.unwrap_or(all.len()));
let fac = match parse_num(chars) {
Ok(Numeric::Float(f)) => Float(f).factory(),
Ok(Numeric::Uint(uint)) => Int(uint.try_into().unwrap()).factory(),
Ok(Numeric::Decimal(dec)) => Float(NotNan::new(dec.try_into().unwrap()).unwrap()).factory(),
Err(e) => return Err(num_to_err(e, ctx.pos(all)).into()),
};
Ok((tail, GenTok::X(fac).at(ctx.pos(all)..ctx.pos(tail))))
}
}

View File

@@ -7,40 +7,40 @@ use orchid_extension::entrypoint::ExtReq;
use orchid_extension::fs::DeclFs;
use orchid_extension::system::{System, SystemCard};
use orchid_extension::system_ctor::SystemCtor;
use orchid_extension::tree::{comments, fun, module, root_mod, MemKind};
use orchid_extension::tree::{MemKind, comments, fun, module, root_mod};
use crate::OrcString;
use crate::number::num_atom::{Float, Int};
use crate::string::str_atom::{IntStrAtom, StrAtom};
use crate::string::str_lexer::StringLexer;
use crate::OrcString;
#[derive(Default)]
pub struct StdSystem;
impl SystemCtor for StdSystem {
type Deps = ();
type Instance = Self;
const NAME: &'static str = "orchid::std";
const VERSION: f64 = 0.00_01;
fn inst() -> Option<Self::Instance> { Some(StdSystem) }
type Deps = ();
type Instance = Self;
const NAME: &'static str = "orchid::std";
const VERSION: f64 = 0.00_01;
fn inst() -> Option<Self::Instance> { Some(StdSystem) }
}
impl SystemCard for StdSystem {
type Ctor = Self;
type Req = Never;
fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>> {
[Some(Int::dynfo()), Some(Float::dynfo()), Some(StrAtom::dynfo()), Some(IntStrAtom::dynfo())]
}
type Ctor = Self;
type Req = Never;
fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>> {
[Some(Int::dynfo()), Some(Float::dynfo()), Some(StrAtom::dynfo()), Some(IntStrAtom::dynfo())]
}
}
impl System for StdSystem {
fn request(_: ExtReq, req: Self::Req) -> orchid_base::reqnot::Receipt { match req {} }
fn lexers() -> Vec<orchid_extension::lexer::LexerObj> { vec![&StringLexer] }
fn parsers() -> Vec<orchid_extension::parser::ParserObj> { vec![] }
fn vfs() -> DeclFs { DeclFs::Mod(&[]) }
fn env() -> Vec<(Tok<String>, MemKind)> {
vec![root_mod("std", [], [module(true, "string", [], [comments(
["Concatenate two strings"],
fun(true, "concat", |left: OrcString, right: OrcString| {
StrAtom::new(Arc::new(left.get_string().to_string() + &right.get_string()))
}),
)])])]
}
fn request(_: ExtReq, req: Self::Req) -> orchid_base::reqnot::Receipt { match req {} }
fn lexers() -> Vec<orchid_extension::lexer::LexerObj> { vec![&StringLexer] }
fn parsers() -> Vec<orchid_extension::parser::ParserObj> { vec![] }
fn vfs() -> DeclFs { DeclFs::Mod(&[]) }
fn env() -> Vec<(Tok<String>, MemKind)> {
vec![root_mod("std", [], [module(true, "string", [], [comments(
["Concatenate two strings"],
fun(true, "concat", |left: OrcString, right: OrcString| {
StrAtom::new(Arc::new(left.get_string().to_string() + &right.get_string()))
}),
)])])]
}
}

View File

@@ -5,9 +5,9 @@ use std::sync::Arc;
use orchid_api_derive::Coding;
use orchid_api_traits::{Encode, Request};
use orchid_base::error::{mk_errv, OrcRes};
use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::intern;
use orchid_base::interner::{intern, Tok};
use orchid_base::interner::{Tok, intern};
use orchid_extension::atom::{AtomMethod, Atomic, MethodSet, Supports, TypAtom};
use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
use orchid_extension::conv::TryFromExpr;
@@ -17,83 +17,83 @@ use orchid_extension::system::SysCtx;
#[derive(Copy, Clone, Coding)]
pub struct StringGetVal;
impl Request for StringGetVal {
type Response = Arc<String>;
type Response = Arc<String>;
}
impl AtomMethod for StringGetVal {
const NAME: &str = "std::string_get_val";
const NAME: &str = "std::string_get_val";
}
impl Supports<StringGetVal> for StrAtom {
fn handle(&self, _: SysCtx, _: StringGetVal) -> <StringGetVal as Request>::Response {
self.0.clone()
}
fn handle(&self, _: SysCtx, _: StringGetVal) -> <StringGetVal as Request>::Response {
self.0.clone()
}
}
#[derive(Clone)]
pub struct StrAtom(Arc<String>);
impl Atomic for StrAtom {
type Variant = OwnedVariant;
type Data = ();
fn reg_reqs() -> MethodSet<Self> { MethodSet::new().handle::<StringGetVal>() }
type Variant = OwnedVariant;
type Data = ();
fn reg_reqs() -> MethodSet<Self> { MethodSet::new().handle::<StringGetVal>() }
}
impl StrAtom {
pub fn new(str: Arc<String>) -> Self { Self(str) }
pub fn value(&self) -> Arc<String> { self.0.clone() }
pub fn new(str: Arc<String>) -> Self { Self(str) }
pub fn value(&self) -> Arc<String> { self.0.clone() }
}
impl Deref for StrAtom {
type Target = str;
fn deref(&self) -> &Self::Target { &self.0 }
type Target = str;
fn deref(&self) -> &Self::Target { &self.0 }
}
impl OwnedAtom for StrAtom {
type Refs = ();
fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
fn serialize(&self, _: SysCtx, sink: &mut (impl io::Write + ?Sized)) -> Self::Refs {
self.deref().encode(sink)
}
fn deserialize(mut ctx: impl DeserializeCtx, _: Self::Refs) -> Self {
Self::new(Arc::new(ctx.read::<String>()))
}
type Refs = ();
fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
fn serialize(&self, _: SysCtx, sink: &mut (impl io::Write + ?Sized)) -> Self::Refs {
self.deref().encode(sink)
}
fn deserialize(mut ctx: impl DeserializeCtx, _: Self::Refs) -> Self {
Self::new(Arc::new(ctx.read::<String>()))
}
}
#[derive(Debug, Clone)]
pub struct IntStrAtom(Tok<String>);
impl Atomic for IntStrAtom {
type Variant = OwnedVariant;
type Data = orchid_api::TStr;
fn reg_reqs() -> MethodSet<Self> { MethodSet::new() }
type Variant = OwnedVariant;
type Data = orchid_api::TStr;
fn reg_reqs() -> MethodSet<Self> { MethodSet::new() }
}
impl From<Tok<String>> for IntStrAtom {
fn from(value: Tok<String>) -> Self { Self(value) }
fn from(value: Tok<String>) -> Self { Self(value) }
}
impl OwnedAtom for IntStrAtom {
type Refs = ();
fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(self.0.to_api()) }
fn print(&self, _ctx: SysCtx) -> String { format!("{:?}i", *self.0) }
fn serialize(&self, _: SysCtx, write: &mut (impl io::Write + ?Sized)) { self.0.encode(write) }
fn deserialize(ctx: impl DeserializeCtx, _: ()) -> Self { Self(intern(&ctx.decode::<String>())) }
type Refs = ();
fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(self.0.to_api()) }
fn print(&self, _ctx: SysCtx) -> String { format!("{:?}i", *self.0) }
fn serialize(&self, _: SysCtx, write: &mut (impl io::Write + ?Sized)) { self.0.encode(write) }
fn deserialize(ctx: impl DeserializeCtx, _: ()) -> Self { Self(intern(&ctx.decode::<String>())) }
}
#[derive(Clone)]
pub enum OrcString<'a> {
Val(TypAtom<'a, StrAtom>),
Int(TypAtom<'a, IntStrAtom>),
Val(TypAtom<'a, StrAtom>),
Int(TypAtom<'a, IntStrAtom>),
}
impl OrcString<'_> {
pub fn get_string(&self) -> Arc<String> {
match &self {
Self::Int(tok) => Tok::from_api(tok.value).arc(),
Self::Val(atom) => atom.request(StringGetVal),
}
}
pub fn get_string(&self) -> Arc<String> {
match &self {
Self::Int(tok) => Tok::from_api(tok.value).arc(),
Self::Val(atom) => atom.request(StringGetVal),
}
}
}
impl TryFromExpr for OrcString<'static> {
fn try_from_expr(expr: Expr) -> OrcRes<OrcString<'static>> {
if let Ok(v) = TypAtom::<StrAtom>::try_from_expr(expr.clone()) {
return Ok(OrcString::Val(v));
}
match TypAtom::<IntStrAtom>::try_from_expr(expr) {
Ok(t) => Ok(OrcString::Int(t)),
Err(e) => Err(mk_errv(intern!(str: "A string was expected"), "", e.pos_iter())),
}
}
fn try_from_expr(expr: Expr) -> OrcRes<OrcString<'static>> {
if let Ok(v) = TypAtom::<StrAtom>::try_from_expr(expr.clone()) {
return Ok(OrcString::Val(v));
}
match TypAtom::<IntStrAtom>::try_from_expr(expr) {
Ok(t) => Ok(OrcString::Int(t)),
Err(e) => Err(mk_errv(intern!(str: "A string was expected"), "", e.pos_iter())),
}
}
}

View File

@@ -1,11 +1,11 @@
use itertools::Itertools;
use orchid_base::error::{mk_err, mk_errv, OrcErr, OrcRes};
use orchid_base::error::{OrcErr, OrcRes, mk_err, mk_errv};
use orchid_base::interner::intern;
use orchid_base::location::Pos;
use orchid_base::tree::{vname_tv, wrap_tokv};
use orchid_base::{intern, vname};
use orchid_extension::atom::AtomicFeatures;
use orchid_extension::lexer::{err_not_applicable, LexContext, Lexer};
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
use orchid_extension::tree::{GenTok, GenTokTree};
use super::str_atom::IntStrAtom;
@@ -13,126 +13,126 @@ use super::str_atom::IntStrAtom;
/// Reasons why [parse_string] might fail. See [StringError]
#[derive(Clone)]
enum StringErrorKind {
/// A unicode escape sequence wasn't followed by 4 hex digits
NotHex,
/// A unicode escape sequence contained an unassigned code point
BadCodePoint,
/// An unrecognized escape sequence was found
BadEscSeq,
/// A unicode escape sequence wasn't followed by 4 hex digits
NotHex,
/// A unicode escape sequence contained an unassigned code point
BadCodePoint,
/// An unrecognized escape sequence was found
BadEscSeq,
}
/// Error produced by [parse_string]
#[derive(Clone)]
struct StringError {
/// Character where the error occured
pos: u32,
/// Reason for the error
kind: StringErrorKind,
/// Character where the error occured
pos: u32,
/// Reason for the error
kind: StringErrorKind,
}
impl StringError {
/// Convert into project error for reporting
pub fn into_proj(self, pos: u32) -> OrcErr {
let start = pos + self.pos;
mk_err(
intern!(str: "Failed to parse string"),
match self.kind {
StringErrorKind::NotHex => "Expected a hex digit",
StringErrorKind::BadCodePoint => "The specified number is not a Unicode code point",
StringErrorKind::BadEscSeq => "Unrecognized escape sequence",
},
[Pos::Range(start..start + 1).into()],
)
}
/// Convert into project error for reporting
pub fn into_proj(self, pos: u32) -> OrcErr {
let start = pos + self.pos;
mk_err(
intern!(str: "Failed to parse string"),
match self.kind {
StringErrorKind::NotHex => "Expected a hex digit",
StringErrorKind::BadCodePoint => "The specified number is not a Unicode code point",
StringErrorKind::BadEscSeq => "Unrecognized escape sequence",
},
[Pos::Range(start..start + 1).into()],
)
}
}
/// Process escape sequences in a string literal
fn parse_string(str: &str) -> Result<String, StringError> {
let mut target = String::new();
let mut iter = str.char_indices().map(|(i, c)| (i as u32, c));
while let Some((_, c)) = iter.next() {
if c != '\\' {
target.push(c);
continue;
}
let (mut pos, code) = iter.next().expect("lexer would have continued");
let next = match code {
c @ ('\\' | '"' | '$') => c,
'b' => '\x08',
'f' => '\x0f',
'n' => '\n',
'r' => '\r',
't' => '\t',
'\n' => 'skipws: loop {
match iter.next() {
None => return Ok(target),
Some((_, c)) =>
if !c.is_whitespace() {
break 'skipws c;
},
}
},
'u' => {
let acc = ((0..4).rev())
.map(|radical| {
let (j, c) = (iter.next()).ok_or(StringError { pos, kind: StringErrorKind::NotHex })?;
pos = j;
let b = u32::from_str_radix(&String::from(c), 16)
.map_err(|_| StringError { pos, kind: StringErrorKind::NotHex })?;
Ok(16u32.pow(radical) + b)
})
.fold_ok(0, u32::wrapping_add)?;
char::from_u32(acc).ok_or(StringError { pos, kind: StringErrorKind::BadCodePoint })?
},
_ => return Err(StringError { pos, kind: StringErrorKind::BadEscSeq }),
};
target.push(next);
}
Ok(target)
let mut target = String::new();
let mut iter = str.char_indices().map(|(i, c)| (i as u32, c));
while let Some((_, c)) = iter.next() {
if c != '\\' {
target.push(c);
continue;
}
let (mut pos, code) = iter.next().expect("lexer would have continued");
let next = match code {
c @ ('\\' | '"' | '$') => c,
'b' => '\x08',
'f' => '\x0f',
'n' => '\n',
'r' => '\r',
't' => '\t',
'\n' => 'skipws: loop {
match iter.next() {
None => return Ok(target),
Some((_, c)) =>
if !c.is_whitespace() {
break 'skipws c;
},
}
},
'u' => {
let acc = ((0..4).rev())
.map(|radical| {
let (j, c) = (iter.next()).ok_or(StringError { pos, kind: StringErrorKind::NotHex })?;
pos = j;
let b = u32::from_str_radix(&String::from(c), 16)
.map_err(|_| StringError { pos, kind: StringErrorKind::NotHex })?;
Ok(16u32.pow(radical) + b)
})
.fold_ok(0, u32::wrapping_add)?;
char::from_u32(acc).ok_or(StringError { pos, kind: StringErrorKind::BadCodePoint })?
},
_ => return Err(StringError { pos, kind: StringErrorKind::BadEscSeq }),
};
target.push(next);
}
Ok(target)
}
#[derive(Default)]
pub struct StringLexer;
impl Lexer for StringLexer {
const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['"'..='"'];
fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> {
let mut tail = all.strip_prefix('"').ok_or_else(err_not_applicable)?;
let mut ret = GenTok::X(IntStrAtom::from(intern!(str: "")).factory()).at(ctx.tok_ran(0, all));
let mut cur = String::new();
let mut errors = vec![];
let str_to_gen = |str: &mut String, tail: &str, err: &mut Vec<OrcErr>| {
let str_val = parse_string(&str.split_off(0))
.inspect_err(|e| err.push(e.clone().into_proj(ctx.pos(tail) - str.len() as u32)))
.unwrap_or_default();
GenTok::X(IntStrAtom::from(intern(&*str_val)).factory())
.at(ctx.tok_ran(str.len() as u32, tail))
};
let add_frag = |prev: GenTokTree<'a>, new: GenTokTree<'a>| {
wrap_tokv(vname_tv(&vname!(std::string::concat), new.range.end).chain([prev, new]))
};
loop {
if let Some(rest) = tail.strip_prefix('"') {
return Ok((rest, add_frag(ret, str_to_gen(&mut cur, tail, &mut errors))));
} else if let Some(rest) = tail.strip_prefix('$') {
ret = add_frag(ret, str_to_gen(&mut cur, tail, &mut errors));
let (new_tail, tree) = ctx.recurse(rest)?;
tail = new_tail;
ret = add_frag(ret, tree);
} else if tail.starts_with('\\') {
// parse_string will deal with it, we just have to skip the next char
tail = &tail[2..];
} else {
let mut ch = tail.chars();
if let Some(c) = ch.next() {
cur.push(c);
tail = ch.as_str();
} else {
let range = ctx.pos(all)..ctx.pos("");
return Err(mk_errv(intern!(str: "No string end"), "String never terminated with \"", [
Pos::Range(range.clone()).into(),
]));
}
}
}
}
const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['"'..='"'];
fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> {
let mut tail = all.strip_prefix('"').ok_or_else(err_not_applicable)?;
let mut ret = GenTok::X(IntStrAtom::from(intern!(str: "")).factory()).at(ctx.tok_ran(0, all));
let mut cur = String::new();
let mut errors = vec![];
let str_to_gen = |str: &mut String, tail: &str, err: &mut Vec<OrcErr>| {
let str_val = parse_string(&str.split_off(0))
.inspect_err(|e| err.push(e.clone().into_proj(ctx.pos(tail) - str.len() as u32)))
.unwrap_or_default();
GenTok::X(IntStrAtom::from(intern(&*str_val)).factory())
.at(ctx.tok_ran(str.len() as u32, tail))
};
let add_frag = |prev: GenTokTree<'a>, new: GenTokTree<'a>| {
wrap_tokv(vname_tv(&vname!(std::string::concat), new.range.end).chain([prev, new]))
};
loop {
if let Some(rest) = tail.strip_prefix('"') {
return Ok((rest, add_frag(ret, str_to_gen(&mut cur, tail, &mut errors))));
} else if let Some(rest) = tail.strip_prefix('$') {
ret = add_frag(ret, str_to_gen(&mut cur, tail, &mut errors));
let (new_tail, tree) = ctx.recurse(rest)?;
tail = new_tail;
ret = add_frag(ret, tree);
} else if tail.starts_with('\\') {
// parse_string will deal with it, we just have to skip the next char
tail = &tail[2..];
} else {
let mut ch = tail.chars();
if let Some(c) = ch.next() {
cur.push(c);
tail = ch.as_str();
} else {
let range = ctx.pos(all)..ctx.pos("");
return Err(mk_errv(intern!(str: "No string end"), "String never terminated with \"", [
Pos::Range(range.clone()).into(),
]));
}
}
}
}
}

View File

@@ -23,9 +23,9 @@
"editor.glyphMargin": false,
"editor.rulers": [],
"editor.guides.indentation": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
},
"editor.formatOnSave": true,
"rust-analyzer.showUnlinkedFileNotification": false,
"rust-analyzer.checkOnSave": true,
"rust-analyzer.check.command": "clippy",

View File

@@ -6,8 +6,8 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
camino = "1.1.7"
clap = { version = "=4.5.4", features = ["derive"] }
itertools = "0.13.0"
camino = "1.1.9"
clap = { version = "4.5.24", features = ["derive"] }
itertools = "0.14.0"
orchid-base = { version = "0.1.0", path = "../orchid-base" }
orchid-host = { version = "0.1.0", path = "../orchid-host" }

View File

@@ -9,44 +9,44 @@ use itertools::Itertools;
use orchid_base::interner::intern;
use orchid_base::logging::{LogStrategy, Logger};
use orchid_base::tree::ttv_fmt;
use orchid_host::extension::{init_systems, Extension};
use orchid_host::extension::{Extension, init_systems};
use orchid_host::lex::lex;
use orchid_host::subprocess::Subprocess;
#[derive(Parser, Debug)]
#[command(version, about, long_about)]
pub struct Args {
#[arg(short, long)]
extension: Vec<Utf8PathBuf>,
#[arg(short, long)]
system: Vec<String>,
#[command(subcommand)]
command: Commands,
#[arg(short, long)]
extension: Vec<Utf8PathBuf>,
#[arg(short, long)]
system: Vec<String>,
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand, Debug)]
pub enum Commands {
Lex {
#[arg(short, long)]
file: Utf8PathBuf,
},
Lex {
#[arg(short, long)]
file: Utf8PathBuf,
},
}
fn main() {
let args = Args::parse();
let logger = Logger::new(LogStrategy::StdErr);
match args.command {
Commands::Lex { file } => {
let extensions = (args.extension.iter())
.map(|f| Subprocess::new(Command::new(f.as_os_str()), logger.clone()).unwrap())
.map(|cmd| Extension::new_process(Arc::new(cmd), logger.clone()).unwrap())
.collect_vec();
let systems = init_systems(&args.system, &extensions).unwrap();
let mut file = File::open(file.as_std_path()).unwrap();
let mut buf = String::new();
file.read_to_string(&mut buf).unwrap();
let lexemes = lex(intern(&buf), &systems).unwrap();
println!("{}", ttv_fmt(&lexemes))
},
}
let args = Args::parse();
let logger = Logger::new(LogStrategy::StdErr);
match args.command {
Commands::Lex { file } => {
let extensions = (args.extension.iter())
.map(|f| Subprocess::new(Command::new(f.as_os_str()), logger.clone()).unwrap())
.map(|cmd| Extension::new_process(Arc::new(cmd), logger.clone()).unwrap())
.collect_vec();
let systems = init_systems(&args.system, &extensions).unwrap();
let mut file = File::open(file.as_std_path()).unwrap();
let mut buf = String::new();
file.read_to_string(&mut buf).unwrap();
let lexemes = lex(intern(&buf), &systems).unwrap();
println!("{}", ttv_fmt(&lexemes))
},
}
}

View File

@@ -1,16 +1,20 @@
# meta
format_code_in_doc_comments = true
unstable_features = true
version = "Two"
style_edition = "2024"
# space
tab_spaces = 2
hard_tabs = true
max_width = 100
error_on_line_overflow = true
error_on_unformatted = true
format_macro_matchers = true
newline_style = "Unix"
normalize_comments = true
wrap_comments = true
comment_width = 80
doc_comment_code_block_width = 80
overflow_delimited_expr = true
use_small_heuristics = "Max"
fn_single_line = true

View File

@@ -1,45 +1,45 @@
use std::env::{self, args};
use std::io::{stdin, BufRead, BufReader, Write};
use std::io::{BufRead, BufReader, Write, stdin};
use std::process;
use std::time::SystemTime;
fn main() {
let is_child = env::args().any(|arg| arg == "child");
if is_child {
loop {
let mut input = String::new();
stdin().read_line(&mut input).unwrap();
if input == "ping\n" {
println!("pong");
} else if input == "\n" {
process::exit(0);
} else {
panic!("Unrecognized input {input:?}");
}
}
} else {
let steps = 1_000_000;
let mut child = process::Command::new(args().next().unwrap())
.arg("child")
.stdin(process::Stdio::piped())
.stdout(process::Stdio::piped())
.spawn()
.unwrap();
let mut bufr = BufReader::new(child.stdout.take().unwrap());
let mut child_stdin = child.stdin.take().unwrap();
let time = SystemTime::now();
for _ in 0..steps {
writeln!(child_stdin, "ping").unwrap();
let mut buf = String::new();
bufr.read_line(&mut buf).unwrap();
if buf != "pong\n" {
panic!("Unrecognized output {buf:?}")
}
}
writeln!(child_stdin).unwrap();
child.wait().unwrap();
let elapsed = time.elapsed().unwrap();
let avg = elapsed / steps;
println!("A roundtrip takes {avg:?}, {}ms on average", (avg.as_nanos() as f64) / 1_000_000f64);
}
let is_child = env::args().any(|arg| arg == "child");
if is_child {
loop {
let mut input = String::new();
stdin().read_line(&mut input).unwrap();
if input == "ping\n" {
println!("pong");
} else if input == "\n" {
process::exit(0);
} else {
panic!("Unrecognized input {input:?}");
}
}
} else {
let steps = 1_000_000;
let mut child = process::Command::new(args().next().unwrap())
.arg("child")
.stdin(process::Stdio::piped())
.stdout(process::Stdio::piped())
.spawn()
.unwrap();
let mut bufr = BufReader::new(child.stdout.take().unwrap());
let mut child_stdin = child.stdin.take().unwrap();
let time = SystemTime::now();
for _ in 0..steps {
writeln!(child_stdin, "ping").unwrap();
let mut buf = String::new();
bufr.read_line(&mut buf).unwrap();
if buf != "pong\n" {
panic!("Unrecognized output {buf:?}")
}
}
writeln!(child_stdin).unwrap();
child.wait().unwrap();
let elapsed = time.elapsed().unwrap();
let avg = elapsed / steps;
println!("A roundtrip takes {avg:?}, {}ms on average", (avg.as_nanos() as f64) / 1_000_000f64);
}
}

View File

@@ -4,4 +4,4 @@ version = "0.1.0"
edition = "2021"
[dependencies]
clap = { version = "=4.5.4", features = ["derive"] }
clap = { version = "4.5.24", features = ["derive"] }

View File

@@ -10,59 +10,59 @@ use clap::{Parser, Subcommand};
#[derive(Parser)]
pub struct Args {
#[arg(short, long)]
verbose: bool,
#[command(subcommand)]
command: Commands,
#[arg(short, long)]
verbose: bool,
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
pub enum Commands {
CheckApiRefs,
CheckApiRefs,
}
pub static EXIT_OK: AtomicBool = AtomicBool::new(true);
fn main() -> io::Result<ExitCode> {
let args = Args::parse();
match args.command {
Commands::CheckApiRefs => walk_wsp(&mut |_| Ok(true), &mut |file| {
if file.path().extension() == Some(OsStr::new("rs")) && file.file_name() != "lib.rs" {
let mut contents = String::new();
File::open(file.path())?.read_to_string(&mut contents)?;
for (l, line) in contents.lines().enumerate() {
if line.trim().starts_with("use") {
if let Some(c) = line.find("orchid_api") {
if Some(c) != line.find("orchid_api_") {
let dname = file.path().to_string_lossy().to_string();
eprintln!("orchid_api imported in {dname} at {};{}", l + 1, c + 1)
}
}
}
}
}
Ok(())
})?,
}
Ok(if EXIT_OK.load(Ordering::Relaxed) { ExitCode::SUCCESS } else { ExitCode::FAILURE })
let args = Args::parse();
match args.command {
Commands::CheckApiRefs => walk_wsp(&mut |_| Ok(true), &mut |file| {
if file.path().extension() == Some(OsStr::new("rs")) && file.file_name() != "lib.rs" {
let mut contents = String::new();
File::open(file.path())?.read_to_string(&mut contents)?;
for (l, line) in contents.lines().enumerate() {
if line.trim().starts_with("use") {
if let Some(c) = line.find("orchid_api") {
if Some(c) != line.find("orchid_api_") {
let dname = file.path().to_string_lossy().to_string();
eprintln!("orchid_api imported in {dname} at {};{}", l + 1, c + 1)
}
}
}
}
}
Ok(())
})?,
}
Ok(if EXIT_OK.load(Ordering::Relaxed) { ExitCode::SUCCESS } else { ExitCode::FAILURE })
}
fn walk_wsp(
dir_filter: &mut impl FnMut(&DirEntry) -> io::Result<bool>,
file_handler: &mut impl FnMut(DirEntry) -> io::Result<()>,
dir_filter: &mut impl FnMut(&DirEntry) -> io::Result<bool>,
file_handler: &mut impl FnMut(DirEntry) -> io::Result<()>,
) -> io::Result<()> {
return recurse(&env::current_dir()?, dir_filter, file_handler);
fn recurse(
dir: &Path,
dir_filter: &mut impl FnMut(&DirEntry) -> io::Result<bool>,
file_handler: &mut impl FnMut(DirEntry) -> io::Result<()>,
) -> io::Result<()> {
for file in dir.read_dir()?.collect::<Result<Vec<_>, _>>()? {
if file.metadata()?.is_dir() && dir_filter(&file)? {
recurse(&file.path(), dir_filter, file_handler)?;
}
file_handler(file)?;
}
Ok(())
}
return recurse(&env::current_dir()?, dir_filter, file_handler);
fn recurse(
dir: &Path,
dir_filter: &mut impl FnMut(&DirEntry) -> io::Result<bool>,
file_handler: &mut impl FnMut(DirEntry) -> io::Result<()>,
) -> io::Result<()> {
for file in dir.read_dir()?.collect::<Result<Vec<_>, _>>()? {
if file.metadata()?.is_dir() && dir_filter(&file)? {
recurse(&file.path(), dir_filter, file_handler)?;
}
file_handler(file)?;
}
Ok(())
}
}