Compare commits
1 Commits
603efef28e
...
ctx-refact
| Author | SHA1 | Date | |
|---|---|---|---|
| cd1d640174 |
44
Cargo.lock
generated
44
Cargo.lock
generated
@@ -619,17 +619,6 @@ dependencies = [
|
|||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "futures-locks"
|
|
||||||
version = "0.7.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "45ec6fe3675af967e67c5536c0b9d44e34e6c52f86bedc4ea49c5317b8e94d06"
|
|
||||||
dependencies = [
|
|
||||||
"futures-channel",
|
|
||||||
"futures-task",
|
|
||||||
"tokio",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-macro"
|
name = "futures-macro"
|
||||||
version = "0.3.31"
|
version = "0.3.31"
|
||||||
@@ -809,9 +798,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "konst"
|
name = "konst"
|
||||||
version = "0.4.2"
|
version = "0.4.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "64896bdfd7906cfb0b57bc04f08bde408bcd6aaf71ff438ee471061cd16f2e86"
|
checksum = "e1b7495a4af30134f36ab2018716ba98b092019a6c5dc2126b94e3241c170748"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"const_panic",
|
"const_panic",
|
||||||
"konst_proc_macros",
|
"konst_proc_macros",
|
||||||
@@ -980,7 +969,6 @@ name = "orchid-api"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"futures",
|
"futures",
|
||||||
"itertools",
|
|
||||||
"orchid-api-derive",
|
"orchid-api-derive",
|
||||||
"orchid-api-traits",
|
"orchid-api-traits",
|
||||||
"ordered-float",
|
"ordered-float",
|
||||||
@@ -1040,11 +1028,11 @@ name = "orchid-extension"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-fn-stream",
|
"async-fn-stream",
|
||||||
|
"async-lock",
|
||||||
"async-once-cell",
|
"async-once-cell",
|
||||||
"derive_destructure",
|
"derive_destructure",
|
||||||
"dyn-clone",
|
"dyn-clone",
|
||||||
"futures",
|
"futures",
|
||||||
"futures-locks",
|
|
||||||
"hashbrown 0.16.0",
|
"hashbrown 0.16.0",
|
||||||
"include_dir",
|
"include_dir",
|
||||||
"itertools",
|
"itertools",
|
||||||
@@ -1061,7 +1049,6 @@ dependencies = [
|
|||||||
"pastey",
|
"pastey",
|
||||||
"some_executor",
|
"some_executor",
|
||||||
"substack",
|
"substack",
|
||||||
"task-local",
|
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-util",
|
"tokio-util",
|
||||||
"trait-set",
|
"trait-set",
|
||||||
@@ -1072,12 +1059,12 @@ name = "orchid-host"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-fn-stream",
|
"async-fn-stream",
|
||||||
|
"async-lock",
|
||||||
"async-once-cell",
|
"async-once-cell",
|
||||||
"async-process",
|
"async-process",
|
||||||
"bound",
|
"bound",
|
||||||
"derive_destructure",
|
"derive_destructure",
|
||||||
"futures",
|
"futures",
|
||||||
"futures-locks",
|
|
||||||
"hashbrown 0.16.0",
|
"hashbrown 0.16.0",
|
||||||
"itertools",
|
"itertools",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
@@ -1098,7 +1085,6 @@ dependencies = [
|
|||||||
name = "orchid-std"
|
name = "orchid-std"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-fn-stream",
|
|
||||||
"async-once-cell",
|
"async-once-cell",
|
||||||
"futures",
|
"futures",
|
||||||
"hashbrown 0.16.0",
|
"hashbrown 0.16.0",
|
||||||
@@ -1111,9 +1097,7 @@ dependencies = [
|
|||||||
"orchid-base",
|
"orchid-base",
|
||||||
"orchid-extension",
|
"orchid-extension",
|
||||||
"ordered-float",
|
"ordered-float",
|
||||||
"pastey",
|
|
||||||
"rust_decimal",
|
"rust_decimal",
|
||||||
"subslice-offset",
|
|
||||||
"substack",
|
"substack",
|
||||||
"test_executors",
|
"test_executors",
|
||||||
"tokio",
|
"tokio",
|
||||||
@@ -1129,7 +1113,6 @@ dependencies = [
|
|||||||
"ctrlc",
|
"ctrlc",
|
||||||
"futures",
|
"futures",
|
||||||
"itertools",
|
"itertools",
|
||||||
"orchid-api",
|
|
||||||
"orchid-base",
|
"orchid-base",
|
||||||
"orchid-host",
|
"orchid-host",
|
||||||
"substack",
|
"substack",
|
||||||
@@ -1446,9 +1429,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rust_decimal"
|
name = "rust_decimal"
|
||||||
version = "1.38.0"
|
version = "1.37.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c8975fc98059f365204d635119cf9c5a60ae67b841ed49b5422a9a7e56cdfac0"
|
checksum = "b203a6425500a03e0919c42d3c47caca51e79f1132046626d2c8871c5092035d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec",
|
"arrayvec",
|
||||||
"borsh",
|
"borsh",
|
||||||
@@ -1635,12 +1618,6 @@ version = "0.11.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "subslice-offset"
|
|
||||||
version = "0.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c883fb2521558a8be70f0f1922babf736f9f72dfbe6ae4f397de3aefb74627ec"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "substack"
|
name = "substack"
|
||||||
version = "1.1.1"
|
version = "1.1.1"
|
||||||
@@ -1686,15 +1663,6 @@ version = "1.0.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
|
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "task-local"
|
|
||||||
version = "0.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "b2c821daee0efdf6414970c8185a1c22e259a7ed87b2fd9f7d3c5f5503fd2863"
|
|
||||||
dependencies = [
|
|
||||||
"pin-project-lite",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "test_executors"
|
name = "test_executors"
|
||||||
version = "0.3.5"
|
version = "0.3.5"
|
||||||
|
|||||||
674
LICENSE
Normal file
674
LICENSE
Normal file
@@ -0,0 +1,674 @@
|
|||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 29 June 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
the GNU General Public License is intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users. We, the Free Software Foundation, use the
|
||||||
|
GNU General Public License for most of our software; it applies also to
|
||||||
|
any other work released this way by its authors. You can apply it to
|
||||||
|
your programs, too.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
To protect your rights, we need to prevent others from denying you
|
||||||
|
these rights or asking you to surrender the rights. Therefore, you have
|
||||||
|
certain responsibilities if you distribute copies of the software, or if
|
||||||
|
you modify it: responsibilities to respect the freedom of others.
|
||||||
|
|
||||||
|
For example, if you distribute copies of such a program, whether
|
||||||
|
gratis or for a fee, you must pass on to the recipients the same
|
||||||
|
freedoms that you received. You must make sure that they, too, receive
|
||||||
|
or can get the source code. And you must show them these terms so they
|
||||||
|
know their rights.
|
||||||
|
|
||||||
|
Developers that use the GNU GPL protect your rights with two steps:
|
||||||
|
(1) assert copyright on the software, and (2) offer you this License
|
||||||
|
giving you legal permission to copy, distribute and/or modify it.
|
||||||
|
|
||||||
|
For the developers' and authors' protection, the GPL clearly explains
|
||||||
|
that there is no warranty for this free software. For both users' and
|
||||||
|
authors' sake, the GPL requires that modified versions be marked as
|
||||||
|
changed, so that their problems will not be attributed erroneously to
|
||||||
|
authors of previous versions.
|
||||||
|
|
||||||
|
Some devices are designed to deny users access to install or run
|
||||||
|
modified versions of the software inside them, although the manufacturer
|
||||||
|
can do so. This is fundamentally incompatible with the aim of
|
||||||
|
protecting users' freedom to change the software. The systematic
|
||||||
|
pattern of such abuse occurs in the area of products for individuals to
|
||||||
|
use, which is precisely where it is most unacceptable. Therefore, we
|
||||||
|
have designed this version of the GPL to prohibit the practice for those
|
||||||
|
products. If such problems arise substantially in other domains, we
|
||||||
|
stand ready to extend this provision to those domains in future versions
|
||||||
|
of the GPL, as needed to protect the freedom of users.
|
||||||
|
|
||||||
|
Finally, every program is threatened constantly by software patents.
|
||||||
|
States should not allow patents to restrict development and use of
|
||||||
|
software on general-purpose computers, but in those that do, we wish to
|
||||||
|
avoid the special danger that patents applied to a free program could
|
||||||
|
make it effectively proprietary. To prevent this, the GPL assures that
|
||||||
|
patents cannot be used to render the program non-free.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Use with the GNU Affero General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU Affero General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the special requirements of the GNU Affero General Public License,
|
||||||
|
section 13, concerning interaction through a network will apply to the
|
||||||
|
combination as such.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU General Public License from time to time. Such new versions will
|
||||||
|
be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If the program does terminal interaction, make it output a short
|
||||||
|
notice like this when it starts in an interactive mode:
|
||||||
|
|
||||||
|
<program> Copyright (C) <year> <name of author>
|
||||||
|
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||||
|
This is free software, and you are welcome to redistribute it
|
||||||
|
under certain conditions; type `show c' for details.
|
||||||
|
|
||||||
|
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||||
|
parts of the General Public License. Of course, your program's commands
|
||||||
|
might be different; for a GUI interface, you would use an "about box".
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU GPL, see
|
||||||
|
<https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
The GNU General Public License does not permit incorporating your program
|
||||||
|
into proprietary programs. If your program is a subroutine library, you
|
||||||
|
may consider it more useful to permit linking proprietary applications with
|
||||||
|
the library. If this is what you want to do, use the GNU Lesser General
|
||||||
|
Public License instead of this License. But first, please read
|
||||||
|
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||||
@@ -1,44 +1,114 @@
|
|||||||
|
use std::cell::Cell;
|
||||||
|
use std::future::poll_fn;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
|
use std::pin::Pin;
|
||||||
|
use std::ptr;
|
||||||
|
use std::task::{Context, Poll};
|
||||||
|
|
||||||
use futures::channel::mpsc;
|
use futures::future::LocalBoxFuture;
|
||||||
use futures::stream::{PollNext, select_with_strategy};
|
use futures::{FutureExt, Stream};
|
||||||
use futures::{FutureExt, SinkExt, Stream, StreamExt};
|
|
||||||
|
type YieldSlot<'a, T> = &'a Cell<Option<T>>;
|
||||||
|
|
||||||
/// Handle that allows you to emit values on a stream. If you drop
|
/// Handle that allows you to emit values on a stream. If you drop
|
||||||
/// this, the stream will end and you will not be polled again.
|
/// this, the stream will end and you will not be polled again.
|
||||||
pub struct StreamCtx<'a, T>(mpsc::Sender<T>, PhantomData<&'a ()>);
|
pub struct StreamCtx<'a, T>(&'a Cell<Option<T>>, PhantomData<&'a ()>);
|
||||||
impl<T> StreamCtx<'_, T> {
|
impl<T> StreamCtx<'_, T> {
|
||||||
pub async fn emit(&mut self, value: T) {
|
pub fn emit(&mut self, value: T) -> impl Future<Output = ()> {
|
||||||
(self.0.send(value).await)
|
assert!(self.0.replace(Some(value)).is_none(), "Leftover value in stream");
|
||||||
.expect("Dropped a stream receiver without dropping the driving closure");
|
let mut state = Poll::Pending;
|
||||||
|
poll_fn(move |_| std::mem::replace(&mut state, Poll::Ready(())))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn left_strat(_: &mut ()) -> PollNext { PollNext::Left }
|
enum FnOrFut<'a, T, O> {
|
||||||
|
Fn(Option<Box<dyn FnOnce(YieldSlot<'a, T>) -> LocalBoxFuture<'a, O> + 'a>>),
|
||||||
|
Fut(LocalBoxFuture<'a, O>),
|
||||||
|
}
|
||||||
|
|
||||||
|
struct AsyncFnStream<'a, T> {
|
||||||
|
driver: FnOrFut<'a, T, ()>,
|
||||||
|
output: Cell<Option<T>>,
|
||||||
|
}
|
||||||
|
impl<'a, T> Stream for AsyncFnStream<'a, T> {
|
||||||
|
type Item = T;
|
||||||
|
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
|
unsafe {
|
||||||
|
let self_mut = self.get_unchecked_mut();
|
||||||
|
let fut = match &mut self_mut.driver {
|
||||||
|
FnOrFut::Fut(fut) => fut,
|
||||||
|
FnOrFut::Fn(f) => {
|
||||||
|
// safety: the cell is held inline in self, which is pinned.
|
||||||
|
let cell = ptr::from_ref(&self_mut.output).as_ref().unwrap();
|
||||||
|
let fut = f.take().unwrap()(cell);
|
||||||
|
self_mut.driver = FnOrFut::Fut(fut);
|
||||||
|
return Pin::new_unchecked(self_mut).poll_next(cx);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
match fut.as_mut().poll(cx) {
|
||||||
|
Poll::Ready(()) => Poll::Ready(None),
|
||||||
|
Poll::Pending => match self_mut.output.replace(None) {
|
||||||
|
None => Poll::Pending,
|
||||||
|
Some(t) => Poll::Ready(Some(t)),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct AsyncFnTryStream<'a, T, E> {
|
||||||
|
driver: FnOrFut<'a, T, Result<StreamCtx<'a, T>, E>>,
|
||||||
|
output: Cell<Option<T>>,
|
||||||
|
}
|
||||||
|
impl<'a, T, E> Stream for AsyncFnTryStream<'a, T, E> {
|
||||||
|
type Item = Result<T, E>;
|
||||||
|
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
|
unsafe {
|
||||||
|
let self_mut = self.get_unchecked_mut();
|
||||||
|
let fut = match &mut self_mut.driver {
|
||||||
|
FnOrFut::Fut(fut) => fut,
|
||||||
|
FnOrFut::Fn(f) => {
|
||||||
|
// safety: the cell is held inline in self, which is pinned.
|
||||||
|
let cell = ptr::from_ref(&self_mut.output).as_ref().unwrap();
|
||||||
|
let fut = f.take().unwrap()(cell);
|
||||||
|
self_mut.driver = FnOrFut::Fut(fut);
|
||||||
|
return Pin::new_unchecked(self_mut).poll_next(cx);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
match fut.as_mut().poll(cx) {
|
||||||
|
Poll::Ready(Ok(_)) => Poll::Ready(None),
|
||||||
|
Poll::Ready(Err(ex)) => Poll::Ready(Some(Err(ex))),
|
||||||
|
Poll::Pending => match self_mut.output.replace(None) {
|
||||||
|
None => Poll::Pending,
|
||||||
|
Some(t) => Poll::Ready(Some(Ok(t))),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Create a stream from an async function acting as a coroutine
|
/// Create a stream from an async function acting as a coroutine
|
||||||
pub fn stream<'a, T: 'a>(
|
pub fn stream<'a, T: 'a>(
|
||||||
f: impl for<'b> AsyncFnOnce(StreamCtx<'b, T>) + 'a,
|
f: impl for<'b> AsyncFnOnce(StreamCtx<'b, T>) + 'a,
|
||||||
) -> impl Stream<Item = T> + 'a {
|
) -> impl Stream<Item = T> + 'a {
|
||||||
let (send, recv) = mpsc::channel::<T>(1);
|
AsyncFnStream {
|
||||||
let fut = async { f(StreamCtx(send, PhantomData)).await };
|
output: Cell::new(None),
|
||||||
// use options to ensure that the stream is driven to exhaustion
|
driver: FnOrFut::Fn(Some(Box::new(|t| {
|
||||||
select_with_strategy(fut.into_stream().map(|()| None), recv.map(|t| Some(t)), left_strat)
|
async { f(StreamCtx(t, PhantomData)).await }.boxed_local()
|
||||||
.filter_map(async |opt| opt)
|
}))),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a stream of result from a fallible function.
|
/// Create a stream of result from a fallible function.
|
||||||
pub fn try_stream<'a, T: 'a, E: 'a>(
|
pub fn try_stream<'a, T: 'a, E: 'a>(
|
||||||
f: impl for<'b> AsyncFnOnce(StreamCtx<'b, T>) -> Result<StreamCtx<'b, T>, E> + 'a,
|
f: impl for<'b> AsyncFnOnce(StreamCtx<'b, T>) -> Result<StreamCtx<'b, T>, E> + 'a,
|
||||||
) -> impl Stream<Item = Result<T, E>> + 'a {
|
) -> impl Stream<Item = Result<T, E>> + 'a {
|
||||||
let (send, recv) = mpsc::channel::<T>(1);
|
AsyncFnTryStream {
|
||||||
let fut = async { f(StreamCtx(send, PhantomData)).await };
|
output: Cell::new(None),
|
||||||
select_with_strategy(
|
driver: FnOrFut::Fn(Some(Box::new(|t| {
|
||||||
fut.into_stream().map(|res| if let Err(e) = res { Some(Err(e)) } else { None }),
|
async { f(StreamCtx(t, PhantomData)).await }.boxed_local()
|
||||||
recv.map(|t| Some(Ok(t))),
|
}))),
|
||||||
left_strat,
|
}
|
||||||
)
|
|
||||||
.filter_map(async |opt| opt)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
let my_tuple = option::some t[1, 2]
|
let user = "dave"
|
||||||
let main = tuple::get (option::expect my_tuple "tuple is none") 1
|
let main = println "Hello $user!" exit_status::success
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ use std::future::Future;
|
|||||||
use super::coding::Coding;
|
use super::coding::Coding;
|
||||||
use crate::helpers::enc_vec;
|
use crate::helpers::enc_vec;
|
||||||
|
|
||||||
pub trait Request: fmt::Debug + Sized + 'static {
|
pub trait Request: fmt::Debug + Coding + Sized + 'static {
|
||||||
type Response: fmt::Debug + Coding + 'static;
|
type Response: fmt::Debug + Coding + 'static;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ ordered-float = "5.0.0"
|
|||||||
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
|
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
|
||||||
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
|
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
|
||||||
futures = { version = "0.3.31", features = ["std"], default-features = false }
|
futures = { version = "0.3.31", features = ["std"], default-features = false }
|
||||||
itertools = "0.14.0"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
test_executors = "0.3.5"
|
test_executors = "0.3.2"
|
||||||
|
|||||||
@@ -1,28 +1,14 @@
|
|||||||
use std::fmt;
|
|
||||||
use std::num::NonZeroU64;
|
use std::num::NonZeroU64;
|
||||||
|
|
||||||
use itertools::Itertools;
|
|
||||||
use orchid_api_derive::{Coding, Hierarchy};
|
use orchid_api_derive::{Coding, Hierarchy};
|
||||||
use orchid_api_traits::Request;
|
use orchid_api_traits::Request;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ExprTicket, Expression, ExtHostReq, FormattingUnit, HostExtReq, OrcResult, SysId, TStrv,
|
ExprTicket, Expression, ExtHostReq, FormattingUnit, HostExtNotif, HostExtReq, OrcResult, SysId,
|
||||||
|
TVec,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Clone, Coding)]
|
pub type AtomData = Vec<u8>;
|
||||||
pub struct AtomData(pub Vec<u8>);
|
|
||||||
impl fmt::Debug for AtomData {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
let mut byte_strings = self.0.iter().map(|b| format!("{b:02x}"));
|
|
||||||
if self.0.len() < 32 {
|
|
||||||
write!(f, "AtomData({})", byte_strings.join(" "))
|
|
||||||
} else {
|
|
||||||
let data_table =
|
|
||||||
byte_strings.chunks(32).into_iter().map(|mut chunk| chunk.join(" ")).join("\n");
|
|
||||||
write!(f, "AtomData(\n{}\n)", data_table)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Unique ID associated with atoms that have an identity
|
/// Unique ID associated with atoms that have an identity
|
||||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||||
@@ -30,7 +16,7 @@ pub struct AtomId(pub NonZeroU64);
|
|||||||
|
|
||||||
/// An atom owned by an implied system. Usually used in responses from a system.
|
/// An atom owned by an implied system. Usually used in responses from a system.
|
||||||
/// This has the same semantics as [Atom] except in that the owner is implied.
|
/// This has the same semantics as [Atom] except in that the owner is implied.
|
||||||
#[derive(Clone, Debug, Coding)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding)]
|
||||||
pub struct LocalAtom {
|
pub struct LocalAtom {
|
||||||
pub drop: Option<AtomId>,
|
pub drop: Option<AtomId>,
|
||||||
pub data: AtomData,
|
pub data: AtomData,
|
||||||
@@ -41,7 +27,7 @@ impl LocalAtom {
|
|||||||
|
|
||||||
/// An atom representation that can be serialized and sent around. Atoms
|
/// An atom representation that can be serialized and sent around. Atoms
|
||||||
/// represent the smallest increment of work.
|
/// represent the smallest increment of work.
|
||||||
#[derive(Clone, Debug, Coding)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding)]
|
||||||
pub struct Atom {
|
pub struct Atom {
|
||||||
/// Instance ID of the system that created the atom
|
/// Instance ID of the system that created the atom
|
||||||
pub owner: SysId,
|
pub owner: SysId,
|
||||||
@@ -63,7 +49,7 @@ pub struct Atom {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Attempt to apply an atom as a function to an expression
|
/// Attempt to apply an atom as a function to an expression
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||||
#[extends(AtomReq, HostExtReq)]
|
#[extends(AtomReq, HostExtReq)]
|
||||||
pub struct CallRef(pub Atom, pub ExprTicket);
|
pub struct CallRef(pub Atom, pub ExprTicket);
|
||||||
impl Request for CallRef {
|
impl Request for CallRef {
|
||||||
@@ -73,14 +59,14 @@ impl Request for CallRef {
|
|||||||
/// Attempt to apply an atom as a function, consuming the atom and enabling the
|
/// Attempt to apply an atom as a function, consuming the atom and enabling the
|
||||||
/// library to reuse its datastructures rather than duplicating them. This is an
|
/// library to reuse its datastructures rather than duplicating them. This is an
|
||||||
/// optimization over [CallRef] followed by [AtomDrop].
|
/// optimization over [CallRef] followed by [AtomDrop].
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||||
#[extends(AtomReq, HostExtReq)]
|
#[extends(AtomReq, HostExtReq)]
|
||||||
pub struct FinalCall(pub Atom, pub ExprTicket);
|
pub struct FinalCall(pub Atom, pub ExprTicket);
|
||||||
impl Request for FinalCall {
|
impl Request for FinalCall {
|
||||||
type Response = Expression;
|
type Response = Expression;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||||
#[extends(AtomReq, HostExtReq)]
|
#[extends(AtomReq, HostExtReq)]
|
||||||
pub struct SerializeAtom(pub Atom);
|
pub struct SerializeAtom(pub Atom);
|
||||||
impl Request for SerializeAtom {
|
impl Request for SerializeAtom {
|
||||||
@@ -95,16 +81,16 @@ impl Request for DeserAtom {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// A request blindly routed to the system that provides an atom.
|
/// A request blindly routed to the system that provides an atom.
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||||
#[extends(AtomReq, HostExtReq)]
|
#[extends(AtomReq, HostExtReq)]
|
||||||
pub struct Fwded(pub Atom, pub TStrv, pub Vec<u8>);
|
pub struct Fwded(pub Atom, pub TVec, pub Vec<u8>);
|
||||||
impl Request for Fwded {
|
impl Request for Fwded {
|
||||||
type Response = Option<Vec<u8>>;
|
type Response = Option<Vec<u8>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||||
#[extends(ExtHostReq)]
|
#[extends(ExtHostReq)]
|
||||||
pub struct Fwd(pub Atom, pub TStrv, pub Vec<u8>);
|
pub struct Fwd(pub Atom, pub TVec, pub Vec<u8>);
|
||||||
impl Request for Fwd {
|
impl Request for Fwd {
|
||||||
type Response = Option<Vec<u8>>;
|
type Response = Option<Vec<u8>>;
|
||||||
}
|
}
|
||||||
@@ -114,7 +100,7 @@ pub enum NextStep {
|
|||||||
Continue(Expression),
|
Continue(Expression),
|
||||||
Halt,
|
Halt,
|
||||||
}
|
}
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||||
#[extends(AtomReq, HostExtReq)]
|
#[extends(AtomReq, HostExtReq)]
|
||||||
pub struct Command(pub Atom);
|
pub struct Command(pub Atom);
|
||||||
impl Request for Command {
|
impl Request for Command {
|
||||||
@@ -125,20 +111,17 @@ impl Request for Command {
|
|||||||
/// isn't referenced anywhere. This should have no effect if the atom's `drop`
|
/// isn't referenced anywhere. This should have no effect if the atom's `drop`
|
||||||
/// flag is false.
|
/// flag is false.
|
||||||
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||||
#[extends(HostExtReq)]
|
#[extends(HostExtNotif)]
|
||||||
pub struct AtomDrop(pub SysId, pub AtomId);
|
pub struct AtomDrop(pub SysId, pub AtomId);
|
||||||
impl Request for AtomDrop {
|
|
||||||
type Response = ();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||||
#[extends(AtomReq, HostExtReq)]
|
#[extends(AtomReq, HostExtReq)]
|
||||||
pub struct AtomPrint(pub Atom);
|
pub struct AtomPrint(pub Atom);
|
||||||
impl Request for AtomPrint {
|
impl Request for AtomPrint {
|
||||||
type Response = FormattingUnit;
|
type Response = FormattingUnit;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||||
#[extends(ExtHostReq)]
|
#[extends(ExtHostReq)]
|
||||||
pub struct ExtAtomPrint(pub Atom);
|
pub struct ExtAtomPrint(pub Atom);
|
||||||
impl Request for ExtAtomPrint {
|
impl Request for ExtAtomPrint {
|
||||||
@@ -146,7 +129,7 @@ impl Request for ExtAtomPrint {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Requests that apply to an existing atom instance
|
/// Requests that apply to an existing atom instance
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||||
#[extends(HostExtReq)]
|
#[extends(HostExtReq)]
|
||||||
#[extendable]
|
#[extendable]
|
||||||
pub enum AtomReq {
|
pub enum AtomReq {
|
||||||
|
|||||||
@@ -1,23 +1,17 @@
|
|||||||
use std::fmt;
|
|
||||||
use std::num::NonZeroU64;
|
use std::num::NonZeroU64;
|
||||||
|
|
||||||
use orchid_api_derive::{Coding, Hierarchy};
|
use orchid_api_derive::{Coding, Hierarchy};
|
||||||
use orchid_api_traits::Request;
|
use orchid_api_traits::Request;
|
||||||
|
|
||||||
use crate::{Atom, ExtHostNotif, ExtHostReq, Location, OrcError, SysId, TStrv};
|
use crate::{Atom, ExtHostNotif, ExtHostReq, Location, OrcError, SysId, TVec};
|
||||||
|
|
||||||
/// An arbitrary ID associated with an expression on the host side. Incoming
|
/// An arbitrary ID associated with an expression on the host side. Incoming
|
||||||
/// tickets always come with some lifetime guarantee, which can be extended with
|
/// tickets always come with some lifetime guarantee, which can be extended with
|
||||||
/// [Acquire].
|
/// [Acquire].
|
||||||
///
|
///
|
||||||
/// The ID is globally unique within its lifetime, but may be reused.
|
/// The ID is globally unique within its lifetime, but may be reused.
|
||||||
#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||||
pub struct ExprTicket(pub NonZeroU64);
|
pub struct ExprTicket(pub NonZeroU64);
|
||||||
impl fmt::Debug for ExprTicket {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "ExprTicket({:x})", self.0.get())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Acquire a strong reference to an expression. This keeps it alive until a
|
/// Acquire a strong reference to an expression. This keeps it alive until a
|
||||||
/// corresponding [Release] is emitted. The number of times a system has
|
/// corresponding [Release] is emitted. The number of times a system has
|
||||||
@@ -43,6 +37,17 @@ pub struct Acquire(pub SysId, pub ExprTicket);
|
|||||||
#[extends(ExprNotif, ExtHostNotif)]
|
#[extends(ExprNotif, ExtHostNotif)]
|
||||||
pub struct Release(pub SysId, pub ExprTicket);
|
pub struct Release(pub SysId, pub ExprTicket);
|
||||||
|
|
||||||
|
/// Decrement the reference count for one system and increment it for another,
|
||||||
|
/// to indicate passing an owned reference. Equivalent to [Acquire] followed by
|
||||||
|
/// [Release].
|
||||||
|
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||||
|
#[extends(ExprNotif, ExtHostNotif)]
|
||||||
|
pub struct Move {
|
||||||
|
pub dec: SysId,
|
||||||
|
pub inc: SysId,
|
||||||
|
pub expr: ExprTicket,
|
||||||
|
}
|
||||||
|
|
||||||
/// A description of a new expression. It is used as the return value of
|
/// A description of a new expression. It is used as the return value of
|
||||||
/// [crate::atom::Call] or [crate::atom::CallRef], or a constant in the
|
/// [crate::atom::Call] or [crate::atom::CallRef], or a constant in the
|
||||||
/// [crate::tree::Tree].
|
/// [crate::tree::Tree].
|
||||||
@@ -56,8 +61,7 @@ pub enum ExpressionKind {
|
|||||||
/// template
|
/// template
|
||||||
Arg(u64),
|
Arg(u64),
|
||||||
/// Insert the specified host-expression in the template here. When the clause
|
/// Insert the specified host-expression in the template here. When the clause
|
||||||
/// is used in the const tree, this variant is forbidden. The ticket held
|
/// is used in the const tree, this variant is forbidden.
|
||||||
/// within is always owning. To avoid a leak, it must be deserialized.
|
|
||||||
Slot(ExprTicket),
|
Slot(ExprTicket),
|
||||||
/// The lhs must be fully processed before the rhs can be processed.
|
/// The lhs must be fully processed before the rhs can be processed.
|
||||||
/// Equivalent to Haskell's function of the same name
|
/// Equivalent to Haskell's function of the same name
|
||||||
@@ -68,7 +72,7 @@ pub enum ExpressionKind {
|
|||||||
/// Because the atom is newly constructed, it also must belong to this system.
|
/// Because the atom is newly constructed, it also must belong to this system.
|
||||||
NewAtom(Atom),
|
NewAtom(Atom),
|
||||||
/// A reference to a constant
|
/// A reference to a constant
|
||||||
Const(TStrv),
|
Const(TVec),
|
||||||
/// A static runtime error.
|
/// A static runtime error.
|
||||||
Bottom(Vec<OrcError>),
|
Bottom(Vec<OrcError>),
|
||||||
}
|
}
|
||||||
@@ -105,12 +109,11 @@ impl Request for Inspect {
|
|||||||
type Response = Inspected;
|
type Response = Inspected;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||||
#[extends(ExtHostReq)]
|
#[extends(ExtHostReq)]
|
||||||
#[extendable]
|
#[extendable]
|
||||||
pub enum ExprReq {
|
pub enum ExprReq {
|
||||||
Inspect(Inspect),
|
Inspect(Inspect),
|
||||||
Create(Create),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||||
@@ -119,11 +122,5 @@ pub enum ExprReq {
|
|||||||
pub enum ExprNotif {
|
pub enum ExprNotif {
|
||||||
Acquire(Acquire),
|
Acquire(Acquire),
|
||||||
Release(Release),
|
Release(Release),
|
||||||
}
|
Move(Move),
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
|
||||||
#[extends(ExprReq, ExtHostReq)]
|
|
||||||
pub struct Create(pub Expression);
|
|
||||||
impl Request for Create {
|
|
||||||
type Response = ExprTicket;
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ impl Request for ExternStr {
|
|||||||
#[extends(IntReq, ExtHostReq)]
|
#[extends(IntReq, ExtHostReq)]
|
||||||
pub struct InternStrv(pub Vec<TStr>);
|
pub struct InternStrv(pub Vec<TStr>);
|
||||||
impl Request for InternStrv {
|
impl Request for InternStrv {
|
||||||
type Response = TStrv;
|
type Response = TVec;
|
||||||
}
|
}
|
||||||
/// replica -> master to find the vector of interned strings corresponding to a
|
/// replica -> master to find the vector of interned strings corresponding to a
|
||||||
/// token
|
/// token
|
||||||
@@ -57,7 +57,7 @@ impl Request for InternStrv {
|
|||||||
/// See [IntReq]
|
/// See [IntReq]
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||||
#[extends(IntReq, ExtHostReq)]
|
#[extends(IntReq, ExtHostReq)]
|
||||||
pub struct ExternStrv(pub TStrv);
|
pub struct ExternStrv(pub TVec);
|
||||||
impl Request for ExternStrv {
|
impl Request for ExternStrv {
|
||||||
type Response = Vec<TStr>;
|
type Response = Vec<TStr>;
|
||||||
}
|
}
|
||||||
@@ -68,7 +68,7 @@ pub struct TStr(pub NonZeroU64);
|
|||||||
|
|
||||||
/// A substitute for an interned string sequence in serialized datastructures.
|
/// A substitute for an interned string sequence in serialized datastructures.
|
||||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||||
pub struct TStrv(pub NonZeroU64);
|
pub struct TVec(pub NonZeroU64);
|
||||||
|
|
||||||
/// A request to sweep the replica. The master will not be sweeped until all
|
/// A request to sweep the replica. The master will not be sweeped until all
|
||||||
/// replicas respond, as it must retain everything the replicas retained
|
/// replicas respond, as it must retain everything the replicas retained
|
||||||
@@ -84,5 +84,5 @@ impl Request for Sweep {
|
|||||||
#[derive(Clone, Debug, Coding)]
|
#[derive(Clone, Debug, Coding)]
|
||||||
pub struct Retained {
|
pub struct Retained {
|
||||||
pub strings: Vec<TStr>,
|
pub strings: Vec<TStr>,
|
||||||
pub vecs: Vec<TStrv>,
|
pub vecs: Vec<TVec>,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ use std::ops::RangeInclusive;
|
|||||||
use orchid_api_derive::{Coding, Hierarchy};
|
use orchid_api_derive::{Coding, Hierarchy};
|
||||||
use orchid_api_traits::Request;
|
use orchid_api_traits::Request;
|
||||||
|
|
||||||
use crate::{ExtHostReq, HostExtReq, OrcResult, ParsId, SysId, TStr, TStrv, TokenTree};
|
use crate::{ExtHostReq, HostExtReq, OrcResult, ParsId, SysId, TStr, TVec, TokenTree};
|
||||||
|
|
||||||
/// - All ranges contain at least one character
|
/// - All ranges contain at least one character
|
||||||
/// - All ranges are in increasing characeter order
|
/// - All ranges are in increasing characeter order
|
||||||
@@ -19,7 +19,7 @@ pub struct LexExpr {
|
|||||||
pub text: TStr,
|
pub text: TStr,
|
||||||
pub pos: u32,
|
pub pos: u32,
|
||||||
/// Source root module path
|
/// Source root module path
|
||||||
pub src: TStrv,
|
pub src: TVec,
|
||||||
}
|
}
|
||||||
impl Request for LexExpr {
|
impl Request for LexExpr {
|
||||||
type Response = Option<OrcResult<LexedExpr>>;
|
type Response = Option<OrcResult<LexedExpr>>;
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ use std::ops::Range;
|
|||||||
|
|
||||||
use orchid_api_derive::Coding;
|
use orchid_api_derive::Coding;
|
||||||
|
|
||||||
use crate::{TStr, TStrv};
|
use crate::{TStr, TVec};
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding)]
|
#[derive(Clone, Debug, Coding)]
|
||||||
pub enum Location {
|
pub enum Location {
|
||||||
@@ -17,18 +17,16 @@ pub enum Location {
|
|||||||
Gen(CodeGenInfo),
|
Gen(CodeGenInfo),
|
||||||
/// Range and file
|
/// Range and file
|
||||||
SourceRange(SourceRange),
|
SourceRange(SourceRange),
|
||||||
/// Multiple locations
|
|
||||||
Multi(Vec<Location>),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding)]
|
#[derive(Clone, Debug, Coding)]
|
||||||
pub struct SourceRange {
|
pub struct SourceRange {
|
||||||
pub path: TStrv,
|
pub path: TVec,
|
||||||
pub range: Range<u32>,
|
pub range: Range<u32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding)]
|
#[derive(Clone, Debug, Coding)]
|
||||||
pub struct CodeGenInfo {
|
pub struct CodeGenInfo {
|
||||||
pub generator: TStrv,
|
pub generator: TVec,
|
||||||
pub details: TStr,
|
pub details: TStr,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ use orchid_api_derive::{Coding, Hierarchy};
|
|||||||
use orchid_api_traits::Request;
|
use orchid_api_traits::Request;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
Expression, ExtHostReq, HostExtReq, OrcResult, SourceRange, SysId, TStr, TStrv, TokenTree,
|
Expression, ExtHostReq, HostExtReq, OrcResult, SourceRange, SysId, TStr, TVec, TokenTree,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||||
@@ -24,10 +24,10 @@ pub struct ParsedConstId(pub NonZeroU64);
|
|||||||
pub struct ParseLine {
|
pub struct ParseLine {
|
||||||
pub sys: SysId,
|
pub sys: SysId,
|
||||||
/// The immediately enclosing module path
|
/// The immediately enclosing module path
|
||||||
pub module: TStrv,
|
pub module: TVec,
|
||||||
/// The root module path for the snipppet of source code, prefix of
|
/// The root module path for the snipppet of source code, prefix of
|
||||||
/// [ParseLine#module]
|
/// [ParseLine#module]
|
||||||
pub src: TStrv,
|
pub src: TVec,
|
||||||
pub comments: Vec<Comment>,
|
pub comments: Vec<Comment>,
|
||||||
pub exported: bool,
|
pub exported: bool,
|
||||||
pub idx: u16,
|
pub idx: u16,
|
||||||
@@ -68,7 +68,10 @@ pub enum ParsedMemberKind {
|
|||||||
/// the macro engine could run here.
|
/// the macro engine could run here.
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||||
#[extends(HostExtReq)]
|
#[extends(HostExtReq)]
|
||||||
pub struct FetchParsedConst(pub SysId, pub ParsedConstId);
|
pub struct FetchParsedConst {
|
||||||
|
pub sys: SysId,
|
||||||
|
pub id: ParsedConstId,
|
||||||
|
}
|
||||||
impl Request for FetchParsedConst {
|
impl Request for FetchParsedConst {
|
||||||
type Response = Expression;
|
type Response = Expression;
|
||||||
}
|
}
|
||||||
@@ -94,9 +97,9 @@ pub struct Comment {
|
|||||||
pub struct ResolveNames {
|
pub struct ResolveNames {
|
||||||
pub sys: SysId,
|
pub sys: SysId,
|
||||||
pub constid: ParsedConstId,
|
pub constid: ParsedConstId,
|
||||||
pub names: Vec<TStrv>,
|
pub names: Vec<TVec>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Request for ResolveNames {
|
impl Request for ResolveNames {
|
||||||
type Response = Vec<OrcResult<TStrv>>;
|
type Response = Vec<OrcResult<TVec>>;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -120,14 +120,14 @@ pub enum HostExtReq {
|
|||||||
ParseLine(parser::ParseLine),
|
ParseLine(parser::ParseLine),
|
||||||
FetchParsedConst(parser::FetchParsedConst),
|
FetchParsedConst(parser::FetchParsedConst),
|
||||||
GetMember(tree::GetMember),
|
GetMember(tree::GetMember),
|
||||||
SystemDrop(system::SystemDrop),
|
|
||||||
AtomDrop(atom::AtomDrop),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Notifications sent from the host to the extension
|
/// Notifications sent from the host to the extension
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||||
#[extendable]
|
#[extendable]
|
||||||
pub enum HostExtNotif {
|
pub enum HostExtNotif {
|
||||||
|
SystemDrop(system::SystemDrop),
|
||||||
|
AtomDrop(atom::AtomDrop),
|
||||||
/// The host can assume that after this notif is sent, a correctly written
|
/// The host can assume that after this notif is sent, a correctly written
|
||||||
/// extension will eventually exit.
|
/// extension will eventually exit.
|
||||||
Exit,
|
Exit,
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ use orchid_api_derive::{Coding, Hierarchy};
|
|||||||
use orchid_api_traits::Request;
|
use orchid_api_traits::Request;
|
||||||
use ordered_float::NotNan;
|
use ordered_float::NotNan;
|
||||||
|
|
||||||
use crate::{CharFilter, ExtHostReq, HostExtReq, MemberKind, TStr, TStrv};
|
use crate::{CharFilter, ExtHostReq, HostExtNotif, HostExtReq, MemberKind, TStr, TVec};
|
||||||
|
|
||||||
/// ID of a system type
|
/// ID of a system type
|
||||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||||
@@ -63,15 +63,12 @@ pub struct NewSystemResponse {
|
|||||||
pub lex_filter: CharFilter,
|
pub lex_filter: CharFilter,
|
||||||
pub line_types: Vec<TStr>,
|
pub line_types: Vec<TStr>,
|
||||||
pub const_root: HashMap<TStr, MemberKind>,
|
pub const_root: HashMap<TStr, MemberKind>,
|
||||||
pub prelude: Vec<TStrv>,
|
pub prelude: Vec<TVec>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||||
#[extends(HostExtReq)]
|
#[extends(HostExtNotif)]
|
||||||
pub struct SystemDrop(pub SysId);
|
pub struct SystemDrop(pub SysId);
|
||||||
impl Request for SystemDrop {
|
|
||||||
type Response = ();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||||
#[extends(SysReq, HostExtReq)]
|
#[extends(SysReq, HostExtReq)]
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fmt;
|
|
||||||
use std::num::NonZeroU64;
|
use std::num::NonZeroU64;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
@@ -7,7 +6,7 @@ use std::rc::Rc;
|
|||||||
use orchid_api_derive::{Coding, Hierarchy};
|
use orchid_api_derive::{Coding, Hierarchy};
|
||||||
use orchid_api_traits::Request;
|
use orchid_api_traits::Request;
|
||||||
|
|
||||||
use crate::{ExprTicket, Expression, ExtHostReq, HostExtReq, OrcError, SysId, TStr, TStrv};
|
use crate::{ExprTicket, Expression, ExtHostReq, HostExtReq, OrcError, SysId, TStr, TVec};
|
||||||
|
|
||||||
/// A token tree from a lexer recursion request. Its lifetime is the lex call,
|
/// A token tree from a lexer recursion request. Its lifetime is the lex call,
|
||||||
/// the lexer can include it in its output or discard it by implication.
|
/// the lexer can include it in its output or discard it by implication.
|
||||||
@@ -57,15 +56,6 @@ pub enum Paren {
|
|||||||
Square,
|
Square,
|
||||||
Curly,
|
Curly,
|
||||||
}
|
}
|
||||||
impl fmt::Display for Paren {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}", match self {
|
|
||||||
Self::Round => "()",
|
|
||||||
Self::Curly => "{}",
|
|
||||||
Self::Square => "[]",
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||||
pub struct TreeId(pub NonZeroU64);
|
pub struct TreeId(pub NonZeroU64);
|
||||||
@@ -102,7 +92,7 @@ impl Request for GetMember {
|
|||||||
/// an atom call.
|
/// an atom call.
|
||||||
#[derive(Clone, Copy, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Copy, Debug, Coding, Hierarchy)]
|
||||||
#[extends(ExtHostReq)]
|
#[extends(ExtHostReq)]
|
||||||
pub struct LsModule(pub SysId, pub TStrv);
|
pub struct LsModule(pub SysId, pub TVec);
|
||||||
impl Request for LsModule {
|
impl Request for LsModule {
|
||||||
type Response = Result<ModuleInfo, LsModuleError>;
|
type Response = Result<ModuleInfo, LsModuleError>;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,9 @@ name = "orchid-base"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
mocks = []
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
|||||||
28
orchid-base/src/ctx.rs
Normal file
28
orchid-base/src/ctx.rs
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
use std::any::{TypeId, type_name};
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use orchid_api_traits::MsgSet;
|
||||||
|
|
||||||
|
use crate::error::Reporter;
|
||||||
|
use crate::interner::Interner;
|
||||||
|
use crate::reqnot::{Client, DynClient};
|
||||||
|
|
||||||
|
pub trait CtxDyn {
|
||||||
|
fn i(&self) -> Interner;
|
||||||
|
fn rep(&self) -> &Reporter;
|
||||||
|
fn client(&self, msg_set: TypeId) -> Option<Rc<dyn DynClient>>;
|
||||||
|
fn msg_set_type(&self) -> TypeId;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Ctx(Rc<dyn CtxDyn>);
|
||||||
|
impl Ctx {
|
||||||
|
pub fn i(&self) -> Interner { self.0.i() }
|
||||||
|
pub fn rep(&self) -> &Reporter { self.0.rep() }
|
||||||
|
pub fn client<T: MsgSet>(&self) -> Client<T> {
|
||||||
|
let Some(dyn_client) = self.0.client(TypeId::of::<T>()) else {
|
||||||
|
panic!("Incorrect message set {} passed", type_name::<T>());
|
||||||
|
};
|
||||||
|
Client(dyn_client, PhantomData)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,14 +1,22 @@
|
|||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
use std::fmt;
|
use std::fmt::{self, Display};
|
||||||
use std::ops::Add;
|
use std::ops::{Add, AddAssign, Deref};
|
||||||
|
use std::pin::Pin;
|
||||||
|
use std::rc::Rc;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use futures::future::join_all;
|
use async_fn_stream::stream;
|
||||||
|
use async_once_cell::{Lazy, OnceCell};
|
||||||
|
use futures::future::{join_all, ready};
|
||||||
|
use futures::lock::Mutex;
|
||||||
|
use futures::{Stream, StreamExt, stream};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::interner::{Interner, Tok};
|
use crate::ctx::Ctx;
|
||||||
|
use crate::format::{FmtCtx, FmtUnit, Format};
|
||||||
|
use crate::interner::{IStr, Interner};
|
||||||
use crate::location::Pos;
|
use crate::location::Pos;
|
||||||
|
|
||||||
/// A point of interest in resolving the error, such as the point where
|
/// A point of interest in resolving the error, such as the point where
|
||||||
@@ -50,40 +58,126 @@ impl fmt::Display for ErrPos {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct OrcErr {
|
struct SingleError {
|
||||||
pub description: Tok<String>,
|
pub description: IStr,
|
||||||
pub message: Arc<String>,
|
pub message: Arc<String>,
|
||||||
pub positions: Vec<ErrPos>,
|
pub positions: Vec<ErrPos>,
|
||||||
}
|
}
|
||||||
impl OrcErr {
|
impl fmt::Display for SingleError {
|
||||||
fn to_api(&self) -> api::OrcError {
|
|
||||||
api::OrcError {
|
|
||||||
description: self.description.to_api(),
|
|
||||||
message: self.message.clone(),
|
|
||||||
locations: self.positions.iter().map(ErrPos::to_api).collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
async fn from_api(api: &api::OrcError, i: &Interner) -> Self {
|
|
||||||
Self {
|
|
||||||
description: Tok::from_api(api.description, i).await,
|
|
||||||
message: api.message.clone(),
|
|
||||||
positions: join_all(api.locations.iter().map(|e| ErrPos::from_api(e, i))).await,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl PartialEq<Tok<String>> for OrcErr {
|
|
||||||
fn eq(&self, other: &Tok<String>) -> bool { self.description == *other }
|
|
||||||
}
|
|
||||||
impl From<OrcErr> for Vec<OrcErr> {
|
|
||||||
fn from(value: OrcErr) -> Self { vec![value] }
|
|
||||||
}
|
|
||||||
impl fmt::Display for OrcErr {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let pstr = self.positions.iter().map(|p| format!("{p}")).join("; ");
|
let pstr = self.positions.iter().map(|p| format!("{p}")).join("; ");
|
||||||
write!(f, "{}: {} @ {}", self.description, self.message, pstr)
|
write!(f, "{}: {} @ {}", self.description, self.message, pstr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct OrcErr {
|
||||||
|
singles: OnceCell<OwnedOrcErr>,
|
||||||
|
futures: Mutex<Vec<Pin<Box<dyn Future<Output = OwnedOrcErr>>>>>,
|
||||||
|
}
|
||||||
|
impl OrcErr {
|
||||||
|
pub async fn into_owned(self) -> OwnedOrcErr {
|
||||||
|
self.to_owned().await;
|
||||||
|
self.singles.into_inner().expect("Initialized above")
|
||||||
|
}
|
||||||
|
pub async fn to_owned(&self) -> &OwnedOrcErr {
|
||||||
|
self
|
||||||
|
.singles
|
||||||
|
.get_or_init(async {
|
||||||
|
let results = join_all(self.futures.lock().await.drain(..)).await;
|
||||||
|
OwnedOrcErr(results.iter().flat_map(|err| err.0.iter()).cloned().collect())
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
fn into_futures(self) -> Vec<Pin<Box<dyn Future<Output = OwnedOrcErr>>>> {
|
||||||
|
match self.singles.into_inner() {
|
||||||
|
Some(val) => vec![Box::pin(ready(val))],
|
||||||
|
None => self.futures.into_inner(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl From<OwnedOrcErr> for OrcErr {
|
||||||
|
fn from(value: OwnedOrcErr) -> Self {
|
||||||
|
Self { singles: OnceCell::from(value), futures: Mutex::new(vec![]) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<T: Future<Output = OrcErr> + 'static> From<T> for OrcErr {
|
||||||
|
fn from(value: T) -> Self {
|
||||||
|
Self {
|
||||||
|
singles: OnceCell::new(),
|
||||||
|
futures: Mutex::new(vec![Box::pin(async { value.await.into_owned().await })]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Add for OrcErr {
|
||||||
|
type Output = Self;
|
||||||
|
fn add(mut self, mut rhs: Self) -> Self::Output {
|
||||||
|
if let (Some(l), Some(r)) = (self.singles.get_mut(), rhs.singles.get_mut()) {
|
||||||
|
l.0.extend(r.0.drain(..));
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
Self {
|
||||||
|
singles: OnceCell::new(),
|
||||||
|
futures: Mutex::new(self.into_futures().into_iter().chain(rhs.into_futures()).collect()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl AddAssign for OrcErr {
|
||||||
|
fn add_assign(&mut self, mut rhs: Self) {
|
||||||
|
if let (Some(l), Some(r)) = (self.singles.get_mut(), rhs.singles.get_mut()) {
|
||||||
|
l.0.extend(r.0.drain(..));
|
||||||
|
} else {
|
||||||
|
let mut temp = Self { futures: Mutex::default(), singles: OnceCell::new() };
|
||||||
|
std::mem::swap(self, &mut temp);
|
||||||
|
self.futures.get_mut().extend(temp.into_futures().into_iter().chain(rhs.into_futures()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Format for OrcErr {
|
||||||
|
async fn print<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||||
|
format!("{}", self.to_owned().await).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct OwnedOrcErr(Vec<SingleError>);
|
||||||
|
impl OwnedOrcErr {
|
||||||
|
pub fn to_api(&self) -> Vec<api::OrcError> {
|
||||||
|
self
|
||||||
|
.0
|
||||||
|
.iter()
|
||||||
|
.map(|err| api::OrcError {
|
||||||
|
description: err.description.to_api(),
|
||||||
|
message: err.message.clone(),
|
||||||
|
locations: err.positions.iter().map(|pos| pos.to_api()).collect(),
|
||||||
|
})
|
||||||
|
.collect_vec()
|
||||||
|
}
|
||||||
|
pub async fn from_api(api: impl IntoIterator<Item = &api::OrcError>, i: &Interner) -> Self {
|
||||||
|
Self(
|
||||||
|
join_all(api.into_iter().map(|e| async {
|
||||||
|
SingleError {
|
||||||
|
description: i.es(e.description).await,
|
||||||
|
message: e.message.clone(),
|
||||||
|
positions: join_all(e.locations.iter().map(|pos| ErrPos::from_api(pos, i))).await,
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
.await,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Display for OwnedOrcErr {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "{}", self.0.iter().join("\n"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl fmt::Debug for OwnedOrcErr {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "OwnedOrcErr({self}") }
|
||||||
|
}
|
||||||
|
impl Add for OwnedOrcErr {
|
||||||
|
type Output = Self;
|
||||||
|
fn add(self, rhs: Self) -> Self::Output { Self(self.0.into_iter().chain(rhs.0).collect()) }
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct EmptyErrv;
|
pub struct EmptyErrv;
|
||||||
impl fmt::Display for EmptyErrv {
|
impl fmt::Display for EmptyErrv {
|
||||||
@@ -92,70 +186,7 @@ impl fmt::Display for EmptyErrv {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
pub type OrcRes<T> = Result<T, OrcErr>;
|
||||||
pub struct OrcErrv(Vec<OrcErr>);
|
|
||||||
impl OrcErrv {
|
|
||||||
pub fn new(errors: impl IntoIterator<Item = OrcErr>) -> Result<Self, EmptyErrv> {
|
|
||||||
let v = errors.into_iter().collect_vec();
|
|
||||||
if v.is_empty() { Err(EmptyErrv) } else { Ok(Self(v)) }
|
|
||||||
}
|
|
||||||
#[must_use]
|
|
||||||
pub fn extended<T>(mut self, errors: impl IntoIterator<Item = T>) -> Self
|
|
||||||
where Self: Extend<T> {
|
|
||||||
self.extend(errors);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
#[must_use]
|
|
||||||
pub fn len(&self) -> usize { self.0.len() }
|
|
||||||
#[must_use]
|
|
||||||
pub fn is_empty(&self) -> bool { self.len() == 0 }
|
|
||||||
#[must_use]
|
|
||||||
pub fn any(&self, f: impl FnMut(&OrcErr) -> bool) -> bool { self.0.iter().any(f) }
|
|
||||||
#[must_use]
|
|
||||||
pub fn keep_only(self, f: impl FnMut(&OrcErr) -> bool) -> Option<Self> {
|
|
||||||
let v = self.0.into_iter().filter(f).collect_vec();
|
|
||||||
if v.is_empty() { None } else { Some(Self(v)) }
|
|
||||||
}
|
|
||||||
#[must_use]
|
|
||||||
pub fn one(&self) -> Option<&OrcErr> { (self.0.len() == 1).then(|| &self.0[9]) }
|
|
||||||
pub fn pos_iter(&self) -> impl Iterator<Item = ErrPos> + '_ {
|
|
||||||
self.0.iter().flat_map(|e| e.positions.iter().cloned())
|
|
||||||
}
|
|
||||||
pub fn to_api(&self) -> Vec<api::OrcError> { self.0.iter().map(OrcErr::to_api).collect() }
|
|
||||||
pub async fn from_api<'a>(
|
|
||||||
api: impl IntoIterator<Item = &'a api::OrcError>,
|
|
||||||
i: &Interner,
|
|
||||||
) -> Self {
|
|
||||||
Self(join_all(api.into_iter().map(|e| OrcErr::from_api(e, i))).await)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl From<OrcErr> for OrcErrv {
|
|
||||||
fn from(value: OrcErr) -> Self { Self(vec![value]) }
|
|
||||||
}
|
|
||||||
impl Add for OrcErrv {
|
|
||||||
type Output = Self;
|
|
||||||
fn add(self, rhs: Self) -> Self::Output { Self(self.0.into_iter().chain(rhs.0).collect_vec()) }
|
|
||||||
}
|
|
||||||
impl Extend<OrcErr> for OrcErrv {
|
|
||||||
fn extend<T: IntoIterator<Item = OrcErr>>(&mut self, iter: T) { self.0.extend(iter) }
|
|
||||||
}
|
|
||||||
impl Extend<OrcErrv> for OrcErrv {
|
|
||||||
fn extend<T: IntoIterator<Item = OrcErrv>>(&mut self, iter: T) {
|
|
||||||
self.0.extend(iter.into_iter().flatten())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl IntoIterator for OrcErrv {
|
|
||||||
type IntoIter = std::vec::IntoIter<OrcErr>;
|
|
||||||
type Item = OrcErr;
|
|
||||||
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
|
|
||||||
}
|
|
||||||
impl fmt::Display for OrcErrv {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}", self.0.iter().join("\n"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type OrcRes<T> = Result<T, OrcErrv>;
|
|
||||||
|
|
||||||
pub fn join_ok<T, U>(left: OrcRes<T>, right: OrcRes<U>) -> OrcRes<(T, U)> {
|
pub fn join_ok<T, U>(left: OrcRes<T>, right: OrcRes<U>) -> OrcRes<(T, U)> {
|
||||||
match (left, right) {
|
match (left, right) {
|
||||||
@@ -191,62 +222,80 @@ macro_rules! join_ok {
|
|||||||
(@VALUES) => { Ok(()) };
|
(@VALUES) => { Ok(()) };
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_errv_floating(description: Tok<String>, message: impl AsRef<str>) -> OrcErrv {
|
impl Ctx {
|
||||||
mk_errv::<Pos>(description, message, [])
|
pub fn mk_err_floating(
|
||||||
|
&self,
|
||||||
|
description: impl AsRef<str> + 'static,
|
||||||
|
message: impl AsRef<str> + 'static,
|
||||||
|
) -> OrcErr {
|
||||||
|
self.mk_err::<Pos>(description, message, [])
|
||||||
}
|
}
|
||||||
|
pub fn mk_err<I: Into<ErrPos>>(
|
||||||
pub fn mk_errv<I: Into<ErrPos>>(
|
&self,
|
||||||
description: Tok<String>,
|
description: impl AsRef<str> + 'static,
|
||||||
message: impl AsRef<str>,
|
message: impl AsRef<str> + 'static,
|
||||||
posv: impl IntoIterator<Item = I>,
|
posv: impl IntoIterator<Item = I> + 'static,
|
||||||
) -> OrcErrv {
|
) -> OrcErr {
|
||||||
OrcErr {
|
let i = self.i();
|
||||||
description,
|
async move {
|
||||||
|
OwnedOrcErr(vec![SingleError {
|
||||||
|
description: i.is(description.as_ref()).await,
|
||||||
message: Arc::new(message.as_ref().to_string()),
|
message: Arc::new(message.as_ref().to_string()),
|
||||||
positions: posv.into_iter().map_into().collect(),
|
positions: posv.into_iter().map_into().collect(),
|
||||||
|
}])
|
||||||
|
.into()
|
||||||
}
|
}
|
||||||
.into()
|
.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn async_io_err<I: Into<ErrPos>>(
|
pub async fn async_io_err<I: Into<ErrPos>>(
|
||||||
|
&self,
|
||||||
err: std::io::Error,
|
err: std::io::Error,
|
||||||
i: &Interner,
|
posv: impl IntoIterator<Item = I> + 'static,
|
||||||
posv: impl IntoIterator<Item = I>,
|
) -> OrcErr {
|
||||||
) -> OrcErrv {
|
self.mk_err(err.kind().to_string(), err.to_string(), posv)
|
||||||
mk_errv(i.i(&err.kind().to_string()).await, err.to_string(), posv)
|
|
||||||
}
|
}
|
||||||
|
pub fn os_str_to_string<'a, I: Into<ErrPos>>(
|
||||||
pub async fn os_str_to_string<'a, I: Into<ErrPos>>(
|
&self,
|
||||||
str: &'a OsStr,
|
str: &'a OsStr,
|
||||||
i: &Interner,
|
posv: impl IntoIterator<Item = I> + 'static,
|
||||||
posv: impl IntoIterator<Item = I>,
|
|
||||||
) -> OrcRes<&'a str> {
|
) -> OrcRes<&'a str> {
|
||||||
match str.to_str() {
|
match str.to_str() {
|
||||||
Some(str) => Ok(str),
|
Some(str) => Ok(str),
|
||||||
None => Err(mk_errv(
|
None => Err(self.mk_err(
|
||||||
i.i("Non-unicode string").await,
|
"Non-unicode string",
|
||||||
format!("{str:?} is not representable as unicode"),
|
format!("{str:?} is not representable as unicode"),
|
||||||
posv,
|
posv,
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct Reporter {
|
pub struct Reporter {
|
||||||
errors: RefCell<Vec<OrcErr>>,
|
errors: RefCell<Option<OrcErr>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Reporter {
|
impl Reporter {
|
||||||
pub fn report(&self, e: impl Into<OrcErrv>) { self.errors.borrow_mut().extend(e.into()) }
|
pub fn report(&self, e: impl Into<OrcErr>) {
|
||||||
pub fn new() -> Self { Self { errors: RefCell::new(vec![]) } }
|
match &mut *self.errors.borrow_mut() {
|
||||||
pub fn errv(self) -> Option<OrcErrv> { OrcErrv::new(self.errors.into_inner()).ok() }
|
slot @ None => *slot = Some(e.into()),
|
||||||
|
Some(err) => *err += e.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn new() -> Self { Self { errors: RefCell::new(None) } }
|
||||||
|
pub fn res(self) -> Result<(), OrcErr> {
|
||||||
|
match self.errors.into_inner() {
|
||||||
|
Some(e) => Err(e),
|
||||||
|
None => Ok(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
pub fn merge<T>(self, res: OrcRes<T>) -> OrcRes<T> {
|
pub fn merge<T>(self, res: OrcRes<T>) -> OrcRes<T> {
|
||||||
match (res, self.errv()) {
|
match (res, self.res()) {
|
||||||
(res, None) => res,
|
(res, Ok(())) => res,
|
||||||
(Ok(_), Some(errv)) => Err(errv),
|
(Ok(_), Err(e)) => Err(e),
|
||||||
(Err(e), Some(errv)) => Err(e + errv),
|
(Err(e), Err(e2)) => Err(e + e2),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn is_empty(&self) -> bool { self.errors.borrow().is_empty() }
|
pub fn is_empty(&self) -> bool { self.errors.borrow().is_none() }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for Reporter {
|
impl Default for Reporter {
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
use std::borrow::Borrow;
|
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::convert::Infallible;
|
use std::convert::Infallible;
|
||||||
use std::future::Future;
|
use std::future::Future;
|
||||||
@@ -6,8 +5,7 @@ use std::iter;
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use futures::future::join_all;
|
use itertools::Itertools;
|
||||||
use itertools::{Itertools, chain};
|
|
||||||
use never::Never;
|
use never::Never;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
@@ -47,14 +45,12 @@ impl FmtUnit {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn sequence(
|
pub fn sequence(
|
||||||
head: &str,
|
|
||||||
delim: &str,
|
delim: &str,
|
||||||
tail: &str,
|
|
||||||
seq_bnd: Option<bool>,
|
seq_bnd: Option<bool>,
|
||||||
seq: impl IntoIterator<Item = FmtUnit>,
|
seq: impl IntoIterator<Item = FmtUnit>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let items = seq.into_iter().collect_vec();
|
let items = seq.into_iter().collect_vec();
|
||||||
Variants::default().sequence(items.len(), head, delim, tail, seq_bnd).units_own(items)
|
FmtUnit::new(Variants::sequence(items.len(), delim, seq_bnd), items)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<T> From<T> for FmtUnit
|
impl<T> From<T> for FmtUnit
|
||||||
@@ -81,12 +77,9 @@ impl FmtElement {
|
|||||||
pub fn bounded(i: u32) -> Self { Self::sub(i, Some(true)) }
|
pub fn bounded(i: u32) -> Self { Self::sub(i, Some(true)) }
|
||||||
pub fn unbounded(i: u32) -> Self { Self::sub(i, Some(false)) }
|
pub fn unbounded(i: u32) -> Self { Self::sub(i, Some(false)) }
|
||||||
pub fn last(i: u32) -> Self { Self::sub(i, None) }
|
pub fn last(i: u32) -> Self { Self::sub(i, None) }
|
||||||
pub fn sequence(len: usize, bounded: Option<bool>) -> Vec<Self> {
|
pub fn sequence(len: usize, bounded: Option<bool>) -> impl Iterator<Item = Self> {
|
||||||
match len.try_into().unwrap() {
|
let len32: u32 = len.try_into().unwrap();
|
||||||
0u32 => vec![],
|
(0..len32 - 1).map(FmtElement::unbounded).chain([FmtElement::sub(len32 - 1, bounded)])
|
||||||
1u32 => vec![FmtElement::sub(0, bounded)],
|
|
||||||
n => (0..n - 1).map(FmtElement::unbounded).chain([FmtElement::sub(n - 1, bounded)]).collect(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
pub fn from_api(api: &api::FormattingElement) -> Self {
|
pub fn from_api(api: &api::FormattingElement) -> Self {
|
||||||
match_mapping!(api, api::FormattingElement => FmtElement {
|
match_mapping!(api, api::FormattingElement => FmtElement {
|
||||||
@@ -112,38 +105,10 @@ pub struct Variant {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn variants_parse_test() {
|
fn variants_parse_test() {
|
||||||
let vars = Rc::new(Variants::default().bounded("({{{0}}})"));
|
let vars = Variants::default().bounded("({0})");
|
||||||
let expected_vars = Rc::new(Variants(vec![Variant {
|
println!("final: {vars:?}")
|
||||||
bounded: true,
|
|
||||||
elements: vec![
|
|
||||||
FmtElement::String(Rc::new("({".to_string())),
|
|
||||||
FmtElement::Sub { bounded: Some(false), slot: 0 },
|
|
||||||
FmtElement::String(Rc::new("})".to_string())),
|
|
||||||
],
|
|
||||||
}]));
|
|
||||||
assert_eq!(vars.as_ref(), expected_vars.as_ref());
|
|
||||||
let unit = vars.units(["1".into()]);
|
|
||||||
assert_eq!(unit, FmtUnit {
|
|
||||||
subs: vec![FmtUnit {
|
|
||||||
subs: vec![],
|
|
||||||
variants: Rc::new(Variants(vec![Variant {
|
|
||||||
bounded: true,
|
|
||||||
elements: vec![FmtElement::String(Rc::new("1".to_string()))]
|
|
||||||
}]))
|
|
||||||
}],
|
|
||||||
variants: expected_vars
|
|
||||||
});
|
|
||||||
let str = take_first(&unit, true);
|
|
||||||
assert_eq!(str, "({1})");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Represents a collection of formatting strings for the same set of parameters
|
|
||||||
/// from which the formatter can choose within their associated constraints.
|
|
||||||
///
|
|
||||||
/// - {0b} can be replaced by any variant of the parameter.
|
|
||||||
/// - {0} can only be replaced by a bounded variant of the parameter
|
|
||||||
/// - {0l} causes the current end restriction to be applied to the parameter.
|
|
||||||
/// This is to be used if the parameter is at the very end of the variant.
|
|
||||||
#[derive(Clone, Debug, Hash, PartialEq, Eq, Default)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, Default)]
|
||||||
pub struct Variants(pub Vec<Variant>);
|
pub struct Variants(pub Vec<Variant>);
|
||||||
impl Variants {
|
impl Variants {
|
||||||
@@ -218,40 +183,20 @@ impl Variants {
|
|||||||
fn add(&mut self, bounded: bool, s: &'_ str) {
|
fn add(&mut self, bounded: bool, s: &'_ str) {
|
||||||
self.0.push(Variant { bounded, elements: Self::parse(s) })
|
self.0.push(Variant { bounded, elements: Self::parse(s) })
|
||||||
}
|
}
|
||||||
/// This option is available in all positions.
|
// This option is available in all positions
|
||||||
/// See [Variants] for a description of the format strings
|
|
||||||
pub fn bounded(mut self, s: &'_ str) -> Self {
|
pub fn bounded(mut self, s: &'_ str) -> Self {
|
||||||
self.add(true, s);
|
self.add(true, s);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
/// This option is only available in positions immediately preceding the end
|
// This option is only available in positions immediately preceding the end of
|
||||||
/// of the sequence or a parenthesized subsequence.
|
// the sequence or a parenthesized subsequence.
|
||||||
/// See [Variants] for a description of the format strings
|
|
||||||
pub fn unbounded(mut self, s: &'_ str) -> Self {
|
pub fn unbounded(mut self, s: &'_ str) -> Self {
|
||||||
self.add(false, s);
|
self.add(false, s);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
pub fn sequence(
|
pub fn sequence(len: usize, delim: &str, seq_bnd: Option<bool>) -> Rc<Self> {
|
||||||
mut self,
|
let seq = Itertools::intersperse(FmtElement::sequence(len, seq_bnd), FmtElement::str(delim));
|
||||||
len: usize,
|
Rc::new(Variants(vec![Variant { bounded: true, elements: seq.collect_vec() }]))
|
||||||
head: &str,
|
|
||||||
delim: &str,
|
|
||||||
tail: &str,
|
|
||||||
seq_bnd: Option<bool>,
|
|
||||||
) -> Self {
|
|
||||||
let seq = chain!(
|
|
||||||
[FmtElement::str(head)],
|
|
||||||
Itertools::intersperse(
|
|
||||||
FmtElement::sequence(len, seq_bnd).into_iter(),
|
|
||||||
FmtElement::str(delim),
|
|
||||||
),
|
|
||||||
[FmtElement::str(tail)],
|
|
||||||
);
|
|
||||||
self.0.push(Variant { bounded: true, elements: seq.collect_vec() });
|
|
||||||
self
|
|
||||||
}
|
|
||||||
pub fn units_own(self, subs: impl IntoIterator<Item = FmtUnit>) -> FmtUnit {
|
|
||||||
FmtUnit::new(Rc::new(self), subs)
|
|
||||||
}
|
}
|
||||||
pub fn units(self: &Rc<Self>, subs: impl IntoIterator<Item = FmtUnit>) -> FmtUnit {
|
pub fn units(self: &Rc<Self>, subs: impl IntoIterator<Item = FmtUnit>) -> FmtUnit {
|
||||||
FmtUnit::new(self.clone(), subs)
|
FmtUnit::new(self.clone(), subs)
|
||||||
@@ -333,12 +278,3 @@ impl Format for Never {
|
|||||||
|
|
||||||
/// Format with default strategy. Currently equal to [take_first_fmt]
|
/// Format with default strategy. Currently equal to [take_first_fmt]
|
||||||
pub async fn fmt(v: &(impl Format + ?Sized), i: &Interner) -> String { take_first_fmt(v, i).await }
|
pub async fn fmt(v: &(impl Format + ?Sized), i: &Interner) -> String { take_first_fmt(v, i).await }
|
||||||
/// Format a sequence with default strategy. Currently equal to [take_first_fmt]
|
|
||||||
pub async fn fmt_v<F: Format + ?Sized, R: Borrow<F>>(
|
|
||||||
v: impl IntoIterator<Item = R>,
|
|
||||||
i: &Interner,
|
|
||||||
) -> impl Iterator<Item = String> {
|
|
||||||
join_all(v.into_iter().map(|f| async move { take_first_fmt(f.borrow(), i).await }))
|
|
||||||
.await
|
|
||||||
.into_iter()
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,12 +1,14 @@
|
|||||||
|
use std::any::Any;
|
||||||
use std::borrow::Borrow;
|
use std::borrow::Borrow;
|
||||||
use std::future::Future;
|
use std::future::Future;
|
||||||
use std::hash::BuildHasher as _;
|
use std::hash::{BuildHasher as _, Hash};
|
||||||
use std::num::NonZeroU64;
|
use std::num::NonZeroU64;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::sync::atomic;
|
use std::sync::atomic;
|
||||||
use std::{fmt, hash};
|
use std::{fmt, hash};
|
||||||
|
|
||||||
|
use futures::future::LocalBoxFuture;
|
||||||
use futures::lock::Mutex;
|
use futures::lock::Mutex;
|
||||||
use hashbrown::{HashMap, HashSet};
|
use hashbrown::{HashMap, HashSet};
|
||||||
use itertools::Itertools as _;
|
use itertools::Itertools as _;
|
||||||
@@ -15,296 +17,460 @@ use orchid_api_traits::Request;
|
|||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::reqnot::{DynRequester, Requester};
|
use crate::reqnot::{DynRequester, Requester};
|
||||||
|
|
||||||
/// Clippy crashes while verifying `Tok: Sized` without this and I cba to create
|
// /// Clippy crashes while verifying `Tok: Sized` without this and I cba to
|
||||||
/// a minimal example
|
// create /// a minimal example
|
||||||
#[derive(Clone)]
|
// #[derive(Clone)]
|
||||||
struct ForceSized<T>(T);
|
// struct ForceSized<T>(T);
|
||||||
|
|
||||||
#[derive(Clone)]
|
// #[derive(Clone)]
|
||||||
pub struct Tok<T: Interned> {
|
// pub struct Tok<T: Interned> {
|
||||||
data: Rc<T>,
|
// data: Rc<T>,
|
||||||
marker: ForceSized<T::Marker>,
|
// marker: ForceSized<T::Marker>,
|
||||||
}
|
// }
|
||||||
impl<T: Interned> Tok<T> {
|
// impl<T: Interned> Tok<T> {
|
||||||
pub fn new(data: Rc<T>, marker: T::Marker) -> Self { Self { data, marker: ForceSized(marker) } }
|
// pub fn new(data: Rc<T>, marker: T::Marker) -> Self { Self { data, marker:
|
||||||
pub fn to_api(&self) -> T::Marker { self.marker.0 }
|
// ForceSized(marker) } } pub fn to_api(&self) -> T::Marker { self.marker.0 }
|
||||||
pub async fn from_api<M>(marker: M, i: &Interner) -> Self
|
// pub async fn from_api<M>(marker: M, i: &Interner) -> Self
|
||||||
where M: InternMarker<Interned = T> {
|
// where M: InternMarker<Interned = T> {
|
||||||
i.ex(marker).await
|
// i.ex(marker).await
|
||||||
}
|
// }
|
||||||
pub fn rc(&self) -> Rc<T> { self.data.clone() }
|
// pub fn rc(&self) -> Rc<T> { self.data.clone() }
|
||||||
}
|
// }
|
||||||
impl<T: Interned> Deref for Tok<T> {
|
// impl<T: Interned> Deref for Tok<T> {
|
||||||
type Target = T;
|
// type Target = T;
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target { self.data.as_ref() }
|
// fn deref(&self) -> &Self::Target { self.data.as_ref() }
|
||||||
}
|
// }
|
||||||
impl<T: Interned> Ord for Tok<T> {
|
// impl<T: Interned> Ord for Tok<T> {
|
||||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.to_api().cmp(&other.to_api()) }
|
// fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||||
}
|
// self.to_api().cmp(&other.to_api()) } }
|
||||||
impl<T: Interned> PartialOrd for Tok<T> {
|
// impl<T: Interned> PartialOrd for Tok<T> {
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) }
|
// fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||||
}
|
// Some(self.cmp(other)) } }
|
||||||
impl<T: Interned> Eq for Tok<T> {}
|
// impl<T: Interned> Eq for Tok<T> {}
|
||||||
impl<T: Interned> PartialEq for Tok<T> {
|
// impl<T: Interned> PartialEq for Tok<T> {
|
||||||
fn eq(&self, other: &Self) -> bool { self.cmp(other).is_eq() }
|
// fn eq(&self, other: &Self) -> bool { self.cmp(other).is_eq() }
|
||||||
}
|
// }
|
||||||
impl<T: Interned> hash::Hash for Tok<T> {
|
// impl<T: Interned> hash::Hash for Tok<T> {
|
||||||
fn hash<H: hash::Hasher>(&self, state: &mut H) { self.to_api().hash(state) }
|
// fn hash<H: hash::Hasher>(&self, state: &mut H) { self.to_api().hash(state) }
|
||||||
}
|
// }
|
||||||
impl<T: Interned + fmt::Display> fmt::Display for Tok<T> {
|
// impl<T: Interned + fmt::Display> fmt::Display for Tok<T> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "{}", &*self.data)
|
// write!(f, "{}", &*self.data)
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
impl<T: Interned + fmt::Debug> fmt::Debug for Tok<T> {
|
// impl<T: Interned + fmt::Debug> fmt::Debug for Tok<T> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "Token({} -> {:?})", self.to_api().get_id(), self.data.as_ref())
|
// write!(f, "Token({} -> {:?})", self.to_api().get_id(), self.data.as_ref())
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
pub trait Interned: Eq + hash::Hash + Clone + fmt::Debug + Internable<Interned = Self> {
|
// pub trait Interned: Eq + hash::Hash + Clone + fmt::Debug +
|
||||||
type Marker: InternMarker<Interned = Self> + Sized;
|
// Internable<Interned = Self> { type Marker: InternMarker<Interned = Self> +
|
||||||
fn intern(
|
// Sized; fn intern(
|
||||||
self: Rc<Self>,
|
// self: Rc<Self>,
|
||||||
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
|
// req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
|
||||||
) -> impl Future<Output = Self::Marker>;
|
// ) -> impl Future<Output = Self::Marker>;
|
||||||
fn bimap(interner: &mut TypedInterners) -> &mut Bimap<Self>;
|
// fn bimap(interner: &mut TypedInterners) -> &mut Bimap<Self>;
|
||||||
}
|
// }
|
||||||
|
|
||||||
pub trait Internable: fmt::Debug {
|
// pub trait Internable: fmt::Debug {
|
||||||
type Interned: Interned;
|
// type Interned: Interned;
|
||||||
fn get_owned(&self) -> Rc<Self::Interned>;
|
// fn get_owned(&self) -> Rc<Self::Interned>;
|
||||||
}
|
// }
|
||||||
|
|
||||||
pub trait InternMarker: Copy + PartialEq + Eq + PartialOrd + Ord + hash::Hash + Sized {
|
// pub trait InternMarker: Copy + PartialEq + Eq + PartialOrd + Ord + hash::Hash
|
||||||
type Interned: Interned<Marker = Self>;
|
// + Sized { type Interned: Interned<Marker = Self>;
|
||||||
/// Only called on replicas
|
// /// Only called on replicas
|
||||||
fn resolve(self, i: &Interner) -> impl Future<Output = Tok<Self::Interned>>;
|
// fn resolve(self, i: &Interner) -> impl Future<Output = Tok<Self::Interned>>;
|
||||||
fn get_id(self) -> NonZeroU64;
|
// fn get_id(self) -> NonZeroU64;
|
||||||
fn from_id(id: NonZeroU64) -> Self;
|
// fn from_id(id: NonZeroU64) -> Self;
|
||||||
}
|
// }
|
||||||
|
|
||||||
impl Interned for String {
|
// impl Interned for String {
|
||||||
type Marker = api::TStr;
|
// type Marker = api::TStr;
|
||||||
async fn intern(
|
// async fn intern(
|
||||||
self: Rc<Self>,
|
// self: Rc<Self>,
|
||||||
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
|
// req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
|
||||||
) -> Self::Marker {
|
// ) -> Self::Marker {
|
||||||
req.request(api::InternStr(self.to_string())).await
|
// req.request(api::InternStr(self.to_string())).await
|
||||||
}
|
// }
|
||||||
fn bimap(interners: &mut TypedInterners) -> &mut Bimap<Self> { &mut interners.strings }
|
// fn bimap(interners: &mut TypedInterners) -> &mut Bimap<Self> { &mut
|
||||||
}
|
// interners.strings } }
|
||||||
impl InternMarker for api::TStr {
|
// impl InternMarker for api::TStr {
|
||||||
type Interned = String;
|
// type Interned = String;
|
||||||
async fn resolve(self, i: &Interner) -> Tok<Self::Interned> {
|
// async fn resolve(self, i: &Interner) -> Tok<Self::Interned> {
|
||||||
Tok::new(Rc::new(i.0.master.as_ref().unwrap().request(api::ExternStr(self)).await), self)
|
// Tok::new(Rc::new(i.0.master.as_ref().unwrap().
|
||||||
}
|
// request(api::ExternStr(self)).await), self) }
|
||||||
fn get_id(self) -> NonZeroU64 { self.0 }
|
// fn get_id(self) -> NonZeroU64 { self.0 }
|
||||||
fn from_id(id: NonZeroU64) -> Self { Self(id) }
|
// fn from_id(id: NonZeroU64) -> Self { Self(id) }
|
||||||
}
|
// }
|
||||||
impl Internable for str {
|
// impl Internable for str {
|
||||||
type Interned = String;
|
// type Interned = String;
|
||||||
fn get_owned(&self) -> Rc<Self::Interned> { Rc::new(self.to_string()) }
|
// fn get_owned(&self) -> Rc<Self::Interned> { Rc::new(self.to_string()) }
|
||||||
}
|
// }
|
||||||
impl Internable for String {
|
// impl Internable for String {
|
||||||
type Interned = String;
|
// type Interned = String;
|
||||||
fn get_owned(&self) -> Rc<Self::Interned> { Rc::new(self.to_string()) }
|
// fn get_owned(&self) -> Rc<Self::Interned> { Rc::new(self.to_string()) }
|
||||||
}
|
// }
|
||||||
|
|
||||||
impl Interned for Vec<Tok<String>> {
|
// impl Interned for Vec<IStr> {
|
||||||
type Marker = api::TStrv;
|
// type Marker = api::TStrv;
|
||||||
async fn intern(
|
// async fn intern(
|
||||||
self: Rc<Self>,
|
// self: Rc<Self>,
|
||||||
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
|
// req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
|
||||||
) -> Self::Marker {
|
// ) -> Self::Marker {
|
||||||
req.request(api::InternStrv(self.iter().map(|t| t.to_api()).collect())).await
|
// req.request(api::InternStrv(self.iter().map(|t|
|
||||||
}
|
// t.to_api()).collect())).await }
|
||||||
fn bimap(interners: &mut TypedInterners) -> &mut Bimap<Self> { &mut interners.vecs }
|
// fn bimap(interners: &mut TypedInterners) -> &mut Bimap<Self> { &mut
|
||||||
}
|
// interners.vecs } }
|
||||||
impl InternMarker for api::TStrv {
|
// impl InternMarker for api::TStrv {
|
||||||
type Interned = Vec<Tok<String>>;
|
// type Interned = Vec<IStr>;
|
||||||
async fn resolve(self, i: &Interner) -> Tok<Self::Interned> {
|
// async fn resolve(self, i: &Interner) -> Tok<Self::Interned> {
|
||||||
let rep = i.0.master.as_ref().unwrap().request(api::ExternStrv(self)).await;
|
// let rep =
|
||||||
let data = futures::future::join_all(rep.into_iter().map(|m| i.ex(m))).await;
|
// i.0.master.as_ref().unwrap().request(api::ExternStrv(self)).await; let data
|
||||||
Tok::new(Rc::new(data), self)
|
// = futures::future::join_all(rep.into_iter().map(|m| i.ex(m))).await;
|
||||||
}
|
// Tok::new(Rc::new(data), self)
|
||||||
fn get_id(self) -> NonZeroU64 { self.0 }
|
// }
|
||||||
fn from_id(id: NonZeroU64) -> Self { Self(id) }
|
// fn get_id(self) -> NonZeroU64 { self.0 }
|
||||||
}
|
// fn from_id(id: NonZeroU64) -> Self { Self(id) }
|
||||||
impl Internable for [Tok<String>] {
|
// }
|
||||||
type Interned = Vec<Tok<String>>;
|
// impl Internable for [IStr] {
|
||||||
fn get_owned(&self) -> Rc<Self::Interned> { Rc::new(self.to_vec()) }
|
// type Interned = Vec<IStr>;
|
||||||
}
|
// fn get_owned(&self) -> Rc<Self::Interned> { Rc::new(self.to_vec()) }
|
||||||
impl<const N: usize> Internable for [Tok<String>; N] {
|
// }
|
||||||
type Interned = Vec<Tok<String>>;
|
// impl<const N: usize> Internable for [IStr; N] {
|
||||||
fn get_owned(&self) -> Rc<Self::Interned> { Rc::new(self.to_vec()) }
|
// type Interned = Vec<IStr>;
|
||||||
}
|
// fn get_owned(&self) -> Rc<Self::Interned> { Rc::new(self.to_vec()) }
|
||||||
impl Internable for Vec<Tok<String>> {
|
// }
|
||||||
type Interned = Vec<Tok<String>>;
|
// impl Internable for Vec<IStr> {
|
||||||
fn get_owned(&self) -> Rc<Self::Interned> { Rc::new(self.to_vec()) }
|
// type Interned = Vec<IStr>;
|
||||||
}
|
// fn get_owned(&self) -> Rc<Self::Interned> { Rc::new(self.to_vec()) }
|
||||||
|
// }
|
||||||
// impl Internable for Vec<api::TStr> {
|
// impl Internable for Vec<api::TStr> {
|
||||||
// type Interned = Vec<Tok<String>>;
|
// type Interned = Vec<IStr>;
|
||||||
// fn get_owned(&self) -> Arc<Self::Interned> {
|
// fn get_owned(&self) -> Arc<Self::Interned> {
|
||||||
// Arc::new(self.iter().map(|ts| deintern(*ts)).collect())
|
// Arc::new(self.iter().map(|ts| deintern(*ts)).collect())
|
||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
// impl Internable for [api::TStr] {
|
// impl Internable for [api::TStr] {
|
||||||
// type Interned = Vec<Tok<String>>;
|
// type Interned = Vec<IStr>;
|
||||||
// fn get_owned(&self) -> Arc<Self::Interned> {
|
// fn get_owned(&self) -> Arc<Self::Interned> {
|
||||||
// Arc::new(self.iter().map(|ts| deintern(*ts)).collect())
|
// Arc::new(self.iter().map(|ts| deintern(*ts)).collect())
|
||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
|
|
||||||
/// The number of references held to any token by the interner.
|
macro_rules! token_def {
|
||||||
const BASE_RC: usize = 3;
|
($type:ident, $trait:ident, $deref:ty, $api_repr:ty, $type_name:expr) => {
|
||||||
|
#[derive(Clone)]
|
||||||
#[test]
|
pub struct $type(Rc<dyn $trait>);
|
||||||
fn base_rc_correct() {
|
impl $type {
|
||||||
let tok = Tok::new(Rc::new("foo".to_string()), api::TStr(1.try_into().unwrap()));
|
pub fn new<T: $trait + 'static>(t: T) -> Self { Self(Rc::new(t) as _) }
|
||||||
let mut bimap = Bimap::default();
|
pub fn to_api(&self) -> $api_repr { self.0.to_api() }
|
||||||
bimap.insert(tok.clone());
|
pub fn inner(&self) -> &dyn Any { self.0.as_ref() }
|
||||||
assert_eq!(Rc::strong_count(&tok.data), BASE_RC + 1, "the bimap plus the current instance");
|
fn addr(&self) -> usize { Rc::as_ptr(&self.0).addr() }
|
||||||
}
|
}
|
||||||
|
pub trait $trait: Deref<Target = $deref> + Any {
|
||||||
pub struct Bimap<T: Interned> {
|
fn to_api(&self) -> $api_repr;
|
||||||
intern: HashMap<Rc<T>, Tok<T>>,
|
|
||||||
by_id: HashMap<T::Marker, Tok<T>>,
|
|
||||||
}
|
}
|
||||||
impl<T: Interned> Bimap<T> {
|
impl Deref for $type {
|
||||||
pub fn insert(&mut self, token: Tok<T>) {
|
type Target = $deref;
|
||||||
self.intern.insert(token.data.clone(), token.clone());
|
fn deref(&self) -> &Self::Target { self.0.deref() }
|
||||||
self.by_id.insert(token.to_api(), token);
|
|
||||||
}
|
}
|
||||||
|
impl Eq for $type {}
|
||||||
pub fn by_marker(&self, marker: T::Marker) -> Option<Tok<T>> { self.by_id.get(&marker).cloned() }
|
impl PartialEq for $type {
|
||||||
|
fn eq(&self, other: &Self) -> bool { Rc::ptr_eq(&self.0, &other.0) }
|
||||||
pub fn by_value<Q: Eq + hash::Hash>(&self, q: &Q) -> Option<Tok<T>>
|
|
||||||
where T: Borrow<Q> {
|
|
||||||
(self.intern.raw_entry())
|
|
||||||
.from_hash(self.intern.hasher().hash_one(q), |k| k.as_ref().borrow() == q)
|
|
||||||
.map(|p| p.1.clone())
|
|
||||||
}
|
}
|
||||||
|
impl Ord for $type {
|
||||||
pub fn sweep_replica(&mut self) -> Vec<T::Marker> {
|
fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.addr().cmp(&other.addr()) }
|
||||||
(self.intern)
|
|
||||||
.extract_if(|k, _| Rc::strong_count(k) == BASE_RC)
|
|
||||||
.map(|(_, v)| {
|
|
||||||
self.by_id.remove(&v.to_api());
|
|
||||||
v.to_api()
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
|
impl PartialOrd for $type {
|
||||||
pub fn sweep_master(&mut self, retained: HashSet<T::Marker>) {
|
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) }
|
||||||
self.intern.retain(|k, v| BASE_RC < Rc::strong_count(k) || retained.contains(&v.to_api()))
|
|
||||||
}
|
}
|
||||||
|
impl Hash for $type {
|
||||||
|
fn hash<H: hash::Hasher>(&self, state: &mut H) { self.addr().hash(state) }
|
||||||
}
|
}
|
||||||
|
impl std::fmt::Debug for $type {
|
||||||
impl<T: Interned> Default for Bimap<T> {
|
|
||||||
fn default() -> Self { Self { by_id: HashMap::new(), intern: HashMap::new() } }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait UpComm {
|
|
||||||
fn up<R: Request>(&self, req: R) -> R::Response;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct TypedInterners {
|
|
||||||
strings: Bimap<String>,
|
|
||||||
vecs: Bimap<Vec<Tok<String>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct InternerData {
|
|
||||||
interners: Mutex<TypedInterners>,
|
|
||||||
master: Option<Box<dyn DynRequester<Transfer = api::IntReq>>>,
|
|
||||||
}
|
|
||||||
#[derive(Clone, Default)]
|
|
||||||
pub struct Interner(Rc<InternerData>);
|
|
||||||
impl Interner {
|
|
||||||
pub fn new_master() -> Self { Self::default() }
|
|
||||||
pub fn new_replica(req: impl DynRequester<Transfer = api::IntReq> + 'static) -> Self {
|
|
||||||
Self(Rc::new(InternerData { master: Some(Box::new(req)), interners: Mutex::default() }))
|
|
||||||
}
|
|
||||||
/// Intern some data; query its identifier if not known locally
|
|
||||||
pub async fn i<T: Interned>(&self, t: &(impl Internable<Interned = T> + ?Sized)) -> Tok<T> {
|
|
||||||
let data = t.get_owned();
|
|
||||||
let mut g = self.0.interners.lock().await;
|
|
||||||
let typed = T::bimap(&mut g);
|
|
||||||
if let Some(tok) = typed.by_value(&data) {
|
|
||||||
return tok;
|
|
||||||
}
|
|
||||||
let marker = match &self.0.master {
|
|
||||||
Some(c) => data.clone().intern(&**c).await,
|
|
||||||
None =>
|
|
||||||
T::Marker::from_id(NonZeroU64::new(ID.fetch_add(1, atomic::Ordering::Relaxed)).unwrap()),
|
|
||||||
};
|
|
||||||
let tok = Tok::new(data, marker);
|
|
||||||
T::bimap(&mut g).insert(tok.clone());
|
|
||||||
tok
|
|
||||||
}
|
|
||||||
/// Extern an identifier; query the data it represents if not known locally
|
|
||||||
pub async fn ex<M: InternMarker>(&self, marker: M) -> Tok<M::Interned> {
|
|
||||||
if let Some(tok) = M::Interned::bimap(&mut *self.0.interners.lock().await).by_marker(marker) {
|
|
||||||
return tok;
|
|
||||||
}
|
|
||||||
assert!(self.0.master.is_some(), "ID not in local interner and this is master");
|
|
||||||
let token = marker.resolve(self).await;
|
|
||||||
M::Interned::bimap(&mut *self.0.interners.lock().await).insert(token.clone());
|
|
||||||
token
|
|
||||||
}
|
|
||||||
pub async fn sweep_replica(&self) -> api::Retained {
|
|
||||||
assert!(self.0.master.is_some(), "Not a replica");
|
|
||||||
let mut g = self.0.interners.lock().await;
|
|
||||||
api::Retained { strings: g.strings.sweep_replica(), vecs: g.vecs.sweep_replica() }
|
|
||||||
}
|
|
||||||
pub async fn sweep_master(&self, retained: api::Retained) {
|
|
||||||
assert!(self.0.master.is_none(), "Not master");
|
|
||||||
let mut g = self.0.interners.lock().await;
|
|
||||||
g.strings.sweep_master(retained.strings.into_iter().collect());
|
|
||||||
g.vecs.sweep_master(retained.vecs.into_iter().collect());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl fmt::Debug for Interner {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "Interner{{ replica: {} }}", self.0.master.is_none())
|
f.debug_tuple($type_name).field(&self.to_api().0).field(&self.deref()).finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
};
|
||||||
static ID: atomic::AtomicU64 = atomic::AtomicU64::new(1);
|
|
||||||
|
|
||||||
pub fn merge_retained(into: &mut api::Retained, from: &api::Retained) {
|
|
||||||
into.strings = into.strings.iter().chain(&from.strings).copied().unique().collect();
|
|
||||||
into.vecs = into.vecs.iter().chain(&from.vecs).copied().unique().collect();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
token_def!(IStr, IStrDyn, str, api::TStr, "IStr");
|
||||||
mod test {
|
impl std::fmt::Display for IStr {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.deref()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
token_def!(IVec, IVecDyn, [IStr], api::TVec, "IVec");
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Interner(Rc<dyn InternerDyn>);
|
||||||
|
impl Interner {
|
||||||
|
pub fn new<T: InternerDyn + 'static>(t: T) -> Self { Self(Rc::new(t) as _) }
|
||||||
|
pub async fn is(&self, s: &(impl Borrow<str> + ?Sized)) -> IStr {
|
||||||
|
IStr(self.0.is(s.borrow()).await)
|
||||||
|
}
|
||||||
|
pub async fn iv(&self, s: &(impl Borrow<[IStr]> + ?Sized)) -> IVec {
|
||||||
|
IVec(self.0.iv(s.borrow()).await)
|
||||||
|
}
|
||||||
|
pub async fn es(&self, m: api::TStr) -> IStr { IStr(self.0.es(m).await) }
|
||||||
|
pub async fn ev(&self, m: api::TVec) -> IVec { IVec(self.0.ev(m).await) }
|
||||||
|
}
|
||||||
|
pub trait InternerDyn {
|
||||||
|
fn is(&self, s: &str) -> LocalBoxFuture<Rc<dyn IStrDyn>>;
|
||||||
|
fn iv(&self, v: &[IStr]) -> LocalBoxFuture<Rc<dyn IVecDyn>>;
|
||||||
|
fn es(&self, m: api::TStr) -> LocalBoxFuture<Rc<dyn IStrDyn>>;
|
||||||
|
fn ev(&self, m: api::TVec) -> LocalBoxFuture<Rc<dyn IVecDyn>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "mocks", test))]
|
||||||
|
pub mod test {
|
||||||
|
|
||||||
|
use std::cell::RefCell;
|
||||||
use std::num::NonZero;
|
use std::num::NonZero;
|
||||||
use std::pin::Pin;
|
use std::sync::atomic::AtomicU64;
|
||||||
|
|
||||||
use orchid_api_traits::{Decode, enc_vec};
|
|
||||||
use test_executors::spin_on;
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::api;
|
use crate::testing::AsyncMonitor;
|
||||||
|
|
||||||
#[test]
|
pub(crate) struct DummyIStr(NonZeroU64, String);
|
||||||
fn test_i() {
|
impl Deref for DummyIStr {
|
||||||
let i = Interner::new_master();
|
type Target = str;
|
||||||
let _: Tok<String> = spin_on(i.i("foo"));
|
fn deref(&self) -> &Self::Target { &self.1 }
|
||||||
let _: Tok<Vec<Tok<String>>> = spin_on(i.i(&[spin_on(i.i("bar")), spin_on(i.i("baz"))]));
|
}
|
||||||
|
impl IStrDyn for DummyIStr {
|
||||||
|
fn to_api(&self) -> api::TStr { api::TStr(self.0) }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
pub(crate) struct DummyIStrv(NonZeroU64, Vec<IStr>);
|
||||||
fn test_coding() {
|
impl Deref for DummyIStrv {
|
||||||
spin_on(async {
|
type Target = [IStr];
|
||||||
let coded = api::TStr(NonZero::new(3u64).unwrap());
|
fn deref(&self) -> &Self::Target { &self.1 }
|
||||||
let mut enc = &enc_vec(&coded).await[..];
|
}
|
||||||
api::TStr::decode(Pin::new(&mut enc)).await;
|
impl IVecDyn for DummyIStrv {
|
||||||
assert_eq!(enc, [], "Did not consume all of {enc:?}")
|
fn to_api(&self) -> api::TVec { api::TVec(self.0) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct DummyInterner(
|
||||||
|
RefCell<(
|
||||||
|
HashMap<String, NonZeroU64>,
|
||||||
|
HashMap<NonZeroU64, Rc<DummyIStr>>,
|
||||||
|
HashMap<NonZeroU64, Rc<DummyIStrv>>,
|
||||||
|
)>,
|
||||||
|
AsyncMonitor<InternerEvent>,
|
||||||
|
);
|
||||||
|
pub enum InternerEvent {
|
||||||
|
ExternStr(Rc<DummyIStr>),
|
||||||
|
ExternVec(Rc<DummyIStrv>),
|
||||||
|
InternStr { token: Rc<DummyIStr>, new: bool },
|
||||||
|
InternVec { token: Rc<DummyIStrv>, new: bool },
|
||||||
|
}
|
||||||
|
impl DummyInterner {
|
||||||
|
pub fn new(monitor: AsyncMonitor<InternerEvent>) -> Interner {
|
||||||
|
Interner(Rc::new(Self(RefCell::default(), monitor)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl InternerDyn for DummyInterner {
|
||||||
|
fn es(&self, m: api::TStr) -> LocalBoxFuture<Rc<dyn IStrDyn>> {
|
||||||
|
let state = self.0.borrow();
|
||||||
|
let istr = state.1.get(&m.0).unwrap_or_else(|| panic!("Externed nonexistent {m:?}")).clone();
|
||||||
|
Box::pin(async {
|
||||||
|
self.1.notify(InternerEvent::ExternStr(istr.clone())).await;
|
||||||
|
istr as Rc<dyn IStrDyn>
|
||||||
|
})
|
||||||
|
}
|
||||||
|
fn ev(&self, m: api::TVec) -> LocalBoxFuture<Rc<dyn IVecDyn>> {
|
||||||
|
let state = self.0.borrow();
|
||||||
|
let ivec = state.2.get(&m.0).unwrap_or_else(|| panic!("Externed nonexistent {m:?}")).clone();
|
||||||
|
Box::pin(async {
|
||||||
|
self.1.notify(InternerEvent::ExternVec(ivec.clone())).await;
|
||||||
|
ivec as Rc<dyn IVecDyn>
|
||||||
|
})
|
||||||
|
}
|
||||||
|
fn is(&self, s: &str) -> LocalBoxFuture<Rc<dyn IStrDyn>> {
|
||||||
|
let mut this = self.0.borrow_mut();
|
||||||
|
let id = *(this.0.entry(format!("{s:?}")))
|
||||||
|
.or_insert_with(|| NonZero::new(COUNTER.fetch_add(1, atomic::Ordering::Relaxed)).unwrap());
|
||||||
|
let (tok, new) = match this.1.entry(id) {
|
||||||
|
hashbrown::hash_map::Entry::Occupied(ent) => (ent.get().clone(), false),
|
||||||
|
hashbrown::hash_map::Entry::Vacant(ent) =>
|
||||||
|
(ent.insert(Rc::new(DummyIStr(id, s.to_string()))).clone(), true),
|
||||||
|
};
|
||||||
|
Box::pin(async move {
|
||||||
|
self.1.notify(InternerEvent::InternStr { token: tok.clone(), new }).await;
|
||||||
|
tok as _
|
||||||
|
})
|
||||||
|
}
|
||||||
|
fn iv(&self, s: &[IStr]) -> LocalBoxFuture<Rc<dyn IVecDyn>> {
|
||||||
|
let mut this = self.0.borrow_mut();
|
||||||
|
let id = *(this.0.entry(format!("{s:?}")))
|
||||||
|
.or_insert_with(|| NonZero::new(COUNTER.fetch_add(1, atomic::Ordering::Relaxed)).unwrap());
|
||||||
|
let (tok, new) = match this.2.entry(id) {
|
||||||
|
hashbrown::hash_map::Entry::Occupied(ent) => (ent.get().clone(), false),
|
||||||
|
hashbrown::hash_map::Entry::Vacant(ent) =>
|
||||||
|
(ent.insert(Rc::new(DummyIStrv(id, s.to_vec()))).clone(), true),
|
||||||
|
};
|
||||||
|
Box::pin(async move {
|
||||||
|
self.1.notify(InternerEvent::InternVec { token: tok.clone(), new }).await;
|
||||||
|
tok as _
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
static COUNTER: AtomicU64 = AtomicU64::new(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// /// The number of references held to any token by the interner.
|
||||||
|
// const BASE_RC: usize = 3;
|
||||||
|
|
||||||
|
// #[test]
|
||||||
|
// fn base_rc_correct() {
|
||||||
|
// let tok = Tok::new(Rc::new("foo".to_string()),
|
||||||
|
// api::TStr(1.try_into().unwrap())); let mut bimap = Bimap::default();
|
||||||
|
// bimap.insert(tok.clone());
|
||||||
|
// assert_eq!(Rc::strong_count(&tok.data), BASE_RC + 1, "the bimap plus the
|
||||||
|
// current instance"); }
|
||||||
|
|
||||||
|
// pub struct Bimap<K, V, Tok> {
|
||||||
|
// intern: HashMap<V, Tok>,
|
||||||
|
// by_id: HashMap<K, Tok>,
|
||||||
|
// }
|
||||||
|
// impl<K> Bimap<T> {
|
||||||
|
// pub fn insert(&mut self, token: Tok<T>) {
|
||||||
|
// self.intern.insert(token.data.clone(), token.clone());
|
||||||
|
// self.by_id.insert(token.to_api(), token);
|
||||||
|
// }
|
||||||
|
|
||||||
|
// pub fn by_marker(&self, marker: T::Marker) -> Option<Tok<T>> {
|
||||||
|
// self.by_id.get(&marker).cloned() }
|
||||||
|
|
||||||
|
// pub fn by_value<Q: Eq + hash::Hash>(&self, q: &Q) -> Option<Tok<T>>
|
||||||
|
// where T: Borrow<Q> {
|
||||||
|
// (self.intern.raw_entry())
|
||||||
|
// .from_hash(self.intern.hasher().hash_one(q), |k| k.as_ref().borrow() == q)
|
||||||
|
// .map(|p| p.1.clone())
|
||||||
|
// }
|
||||||
|
|
||||||
|
// pub fn sweep_replica(&mut self) -> Vec<T::Marker> {
|
||||||
|
// (self.intern)
|
||||||
|
// .extract_if(|k, _| Rc::strong_count(k) == BASE_RC)
|
||||||
|
// .map(|(_, v)| {
|
||||||
|
// self.by_id.remove(&v.to_api());
|
||||||
|
// v.to_api()
|
||||||
|
// })
|
||||||
|
// .collect()
|
||||||
|
// }
|
||||||
|
|
||||||
|
// pub fn sweep_master(&mut self, retained: HashSet<T::Marker>) {
|
||||||
|
// self.intern.retain(|k, v| BASE_RC < Rc::strong_count(k) ||
|
||||||
|
// retained.contains(&v.to_api())) }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// impl<T: Interned> Default for Bimap<T> {
|
||||||
|
// fn default() -> Self { Self { by_id: HashMap::new(), intern: HashMap::new()
|
||||||
|
// } } }
|
||||||
|
|
||||||
|
// pub trait UpComm {
|
||||||
|
// fn up<R: Request>(&self, req: R) -> R::Response;
|
||||||
|
// }
|
||||||
|
|
||||||
|
// #[derive(Default)]
|
||||||
|
// pub struct TypedInterners {
|
||||||
|
// strings: Bimap<String>,
|
||||||
|
// vecs: Bimap<Vec<IStr>>,
|
||||||
|
// }
|
||||||
|
|
||||||
|
// #[derive(Default)]
|
||||||
|
// pub struct InternerData {
|
||||||
|
// interners: Mutex<TypedInterners>,
|
||||||
|
// master: Option<Box<dyn DynRequester<Transfer = api::IntReq>>>,
|
||||||
|
// }
|
||||||
|
// #[derive(Clone, Default)]
|
||||||
|
// pub struct Interner(Rc<InternerData>);
|
||||||
|
// impl Interner {
|
||||||
|
// pub fn new_master() -> Self { Self::default() }
|
||||||
|
// pub fn new_replica(req: impl DynRequester<Transfer = api::IntReq> + 'static)
|
||||||
|
// -> Self { Self(Rc::new(InternerData { master: Some(Box::new(req)),
|
||||||
|
// interners: Mutex::default() })) }
|
||||||
|
// /// Intern some data; query its identifier if not known locally
|
||||||
|
// pub async fn i<T: Interned>(&self, t: &(impl Internable<Interned = T> +
|
||||||
|
// ?Sized)) -> Tok<T> { let data = t.get_owned();
|
||||||
|
// let mut g = self.0.interners.lock().await;
|
||||||
|
// let typed = T::bimap(&mut g);
|
||||||
|
// if let Some(tok) = typed.by_value(&data) {
|
||||||
|
// return tok;
|
||||||
|
// }
|
||||||
|
// let marker = match &self.0.master {
|
||||||
|
// Some(c) => data.clone().intern(&**c).await,
|
||||||
|
// None =>
|
||||||
|
// T::Marker::from_id(NonZeroU64::new(ID.fetch_add(1,
|
||||||
|
// atomic::Ordering::Relaxed)).unwrap()), };
|
||||||
|
// let tok = Tok::new(data, marker);
|
||||||
|
// T::bimap(&mut g).insert(tok.clone());
|
||||||
|
// tok
|
||||||
|
// }
|
||||||
|
// /// Extern an identifier; query the data it represents if not known locally
|
||||||
|
// pub async fn ex<M: InternMarker>(&self, marker: M) -> Tok<M::Interned> {
|
||||||
|
// if let Some(tok) = M::Interned::bimap(&mut
|
||||||
|
// *self.0.interners.lock().await).by_marker(marker) { return tok;
|
||||||
|
// }
|
||||||
|
// assert!(self.0.master.is_some(), "ID not in local interner and this is
|
||||||
|
// master"); let token = marker.resolve(self).await;
|
||||||
|
// M::Interned::bimap(&mut
|
||||||
|
// *self.0.interners.lock().await).insert(token.clone()); token
|
||||||
|
// }
|
||||||
|
// pub async fn sweep_replica(&self) -> api::Retained {
|
||||||
|
// assert!(self.0.master.is_some(), "Not a replica");
|
||||||
|
// let mut g = self.0.interners.lock().await;
|
||||||
|
// api::Retained { strings: g.strings.sweep_replica(), vecs:
|
||||||
|
// g.vecs.sweep_replica() } }
|
||||||
|
// pub async fn sweep_master(&self, retained: api::Retained) {
|
||||||
|
// assert!(self.0.master.is_none(), "Not master");
|
||||||
|
// let mut g = self.0.interners.lock().await;
|
||||||
|
// g.strings.sweep_master(retained.strings.into_iter().collect());
|
||||||
|
// g.vecs.sweep_master(retained.vecs.into_iter().collect());
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// impl fmt::Debug for Interner {
|
||||||
|
// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
// write!(f, "Interner{{ replica: {} }}", self.0.master.is_none())
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// static ID: atomic::AtomicU64 = atomic::AtomicU64::new(1);
|
||||||
|
|
||||||
|
// pub fn merge_retained(into: &mut api::Retained, from: &api::Retained) {
|
||||||
|
// into.strings =
|
||||||
|
// into.strings.iter().chain(&from.strings).copied().unique().collect();
|
||||||
|
// into.vecs = into.vecs.iter().chain(&from.vecs).copied().unique().collect();
|
||||||
|
// }
|
||||||
|
|
||||||
|
// #[cfg(test)]
|
||||||
|
// mod test {
|
||||||
|
// use std::num::NonZero;
|
||||||
|
// use std::pin::Pin;
|
||||||
|
|
||||||
|
// use orchid_api_traits::{Decode, enc_vec};
|
||||||
|
// use test_executors::spin_on;
|
||||||
|
|
||||||
|
// use super::*;
|
||||||
|
// use crate::api;
|
||||||
|
|
||||||
|
// #[test]
|
||||||
|
// fn test_i() {
|
||||||
|
// let i = Interner::new_master();
|
||||||
|
// let _: IStr = spin_on(i.i("foo"));
|
||||||
|
// let _: Tok<Vec<IStr>> = spin_on(i.i(&[spin_on(i.i("bar")),
|
||||||
|
// spin_on(i.i("baz"))])); }
|
||||||
|
|
||||||
|
// #[test]
|
||||||
|
// fn test_coding() {
|
||||||
|
// spin_on(async {
|
||||||
|
// let coded = api::TStr(NonZero::new(3u64).unwrap());
|
||||||
|
// let mut enc = &enc_vec(&coded).await[..];
|
||||||
|
// api::TStr::decode(Pin::new(&mut enc)).await;
|
||||||
|
// assert_eq!(enc, [], "Did not consume all of {enc:?}")
|
||||||
|
// })
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ pub mod builtin;
|
|||||||
pub mod char_filter;
|
pub mod char_filter;
|
||||||
pub mod clone;
|
pub mod clone;
|
||||||
pub mod combine;
|
pub mod combine;
|
||||||
|
pub mod ctx;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
pub mod event;
|
pub mod event;
|
||||||
pub mod format;
|
pub mod format;
|
||||||
@@ -25,6 +26,7 @@ pub mod pure_seq;
|
|||||||
pub mod reqnot;
|
pub mod reqnot;
|
||||||
pub mod sequence;
|
pub mod sequence;
|
||||||
pub mod side;
|
pub mod side;
|
||||||
|
pub mod testing;
|
||||||
mod tl_cache;
|
mod tl_cache;
|
||||||
pub mod tokens;
|
pub mod tokens;
|
||||||
pub mod tree;
|
pub mod tree;
|
||||||
|
|||||||
@@ -2,18 +2,17 @@
|
|||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use std::ops::{Add, AddAssign, Range};
|
use std::ops::Range;
|
||||||
|
|
||||||
use futures::future::join_all;
|
|
||||||
use trait_set::trait_set;
|
use trait_set::trait_set;
|
||||||
|
|
||||||
use crate::error::ErrPos;
|
use crate::error::ErrPos;
|
||||||
use crate::interner::{Interner, Tok};
|
use crate::interner::{IStr, Interner};
|
||||||
use crate::name::Sym;
|
use crate::name::Sym;
|
||||||
use crate::{api, match_mapping, sym};
|
use crate::{api, match_mapping, sym};
|
||||||
|
|
||||||
trait_set! {
|
trait_set! {
|
||||||
pub trait GetSrc = FnMut(&Sym) -> Tok<String>;
|
pub trait GetSrc = FnMut(&Sym) -> IStr;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
@@ -26,7 +25,6 @@ pub enum Pos {
|
|||||||
Gen(CodeGenInfo),
|
Gen(CodeGenInfo),
|
||||||
/// Range and file
|
/// Range and file
|
||||||
SrcRange(SrcRange),
|
SrcRange(SrcRange),
|
||||||
Multi(Vec<Pos>),
|
|
||||||
}
|
}
|
||||||
impl Pos {
|
impl Pos {
|
||||||
pub fn pretty_print(&self, get_src: &mut impl GetSrc) -> String {
|
pub fn pretty_print(&self, get_src: &mut impl GetSrc) -> String {
|
||||||
@@ -41,7 +39,6 @@ impl Pos {
|
|||||||
match_mapping!(api, api::Location => Pos {
|
match_mapping!(api, api::Location => Pos {
|
||||||
None, Inherit, SlotTarget,
|
None, Inherit, SlotTarget,
|
||||||
Gen(cgi => CodeGenInfo::from_api(cgi, i).await),
|
Gen(cgi => CodeGenInfo::from_api(cgi, i).await),
|
||||||
Multi(v => join_all(v.iter().map(|l| Pos::from_api(l, i))).await)
|
|
||||||
} {
|
} {
|
||||||
api::Location::SourceRange(sr) => Self::SrcRange(SrcRange::from_api(sr, i).await)
|
api::Location::SourceRange(sr) => Self::SrcRange(SrcRange::from_api(sr, i).await)
|
||||||
})
|
})
|
||||||
@@ -50,7 +47,6 @@ impl Pos {
|
|||||||
match_mapping!(self, Pos => api::Location {
|
match_mapping!(self, Pos => api::Location {
|
||||||
None, Inherit, SlotTarget,
|
None, Inherit, SlotTarget,
|
||||||
Gen(cgi.to_api()),
|
Gen(cgi.to_api()),
|
||||||
Multi(v => v.iter().map(|pos| pos.to_api()).collect()),
|
|
||||||
} {
|
} {
|
||||||
Self::SrcRange(sr) => api::Location::SourceRange(sr.to_api()),
|
Self::SrcRange(sr) => api::Location::SourceRange(sr.to_api()),
|
||||||
})
|
})
|
||||||
@@ -64,34 +60,7 @@ impl fmt::Display for Pos {
|
|||||||
Pos::None => f.write_str("N/A"),
|
Pos::None => f.write_str("N/A"),
|
||||||
Pos::Gen(g) => write!(f, "{g}"),
|
Pos::Gen(g) => write!(f, "{g}"),
|
||||||
Pos::SrcRange(sr) => write!(f, "{sr}"),
|
Pos::SrcRange(sr) => write!(f, "{sr}"),
|
||||||
Pos::Multi(posv) => {
|
|
||||||
write!(f, "{}", posv[0])?;
|
|
||||||
for pos in posv {
|
|
||||||
write!(f, "+{}", pos)?;
|
|
||||||
}
|
}
|
||||||
Ok(())
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl Add for Pos {
|
|
||||||
type Output = Pos;
|
|
||||||
fn add(self, rhs: Self) -> Self::Output {
|
|
||||||
match (self, rhs) {
|
|
||||||
(Pos::Multi(l), Pos::Multi(r)) => Pos::Multi(l.into_iter().chain(r).collect()),
|
|
||||||
(Pos::None, any) => any,
|
|
||||||
(any, Pos::None) => any,
|
|
||||||
(Pos::Multi(v), single) => Pos::Multi(v.into_iter().chain([single]).collect()),
|
|
||||||
(single, Pos::Multi(v)) => Pos::Multi([single].into_iter().chain(v).collect()),
|
|
||||||
(l, r) => Pos::Multi(vec![l, r]),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl AddAssign for Pos {
|
|
||||||
fn add_assign(&mut self, rhs: Self) {
|
|
||||||
let mut tmp = Pos::None;
|
|
||||||
std::mem::swap(&mut tmp, self);
|
|
||||||
*self = tmp + rhs;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -108,7 +77,7 @@ impl SrcRange {
|
|||||||
}
|
}
|
||||||
/// Create a dud [SourceRange] for testing. Its value is unspecified and
|
/// Create a dud [SourceRange] for testing. Its value is unspecified and
|
||||||
/// volatile.
|
/// volatile.
|
||||||
pub async fn mock(i: &Interner) -> Self { Self { range: 0..1, path: sym!(test; i) } }
|
pub async fn mock(i: &Interner) -> Self { Self { range: 0..1, path: sym!(test; i).await } }
|
||||||
/// Path the source text was loaded from
|
/// Path the source text was loaded from
|
||||||
pub fn path(&self) -> Sym { self.path.clone() }
|
pub fn path(&self) -> Sym { self.path.clone() }
|
||||||
/// Byte range
|
/// Byte range
|
||||||
@@ -162,24 +131,21 @@ pub struct CodeGenInfo {
|
|||||||
/// formatted like a Rust namespace
|
/// formatted like a Rust namespace
|
||||||
pub generator: Sym,
|
pub generator: Sym,
|
||||||
/// Unformatted user message with relevant circumstances and parameters
|
/// Unformatted user message with relevant circumstances and parameters
|
||||||
pub details: Tok<String>,
|
pub details: IStr,
|
||||||
}
|
}
|
||||||
impl CodeGenInfo {
|
impl CodeGenInfo {
|
||||||
/// A codegen marker with no user message and parameters
|
/// A codegen marker with no user message and parameters
|
||||||
pub async fn new_short(generator: Sym, i: &Interner) -> Self {
|
pub async fn new_short(generator: Sym, i: &Interner) -> Self {
|
||||||
Self { generator, details: i.i("").await }
|
Self { generator, details: i.is("").await }
|
||||||
}
|
}
|
||||||
/// A codegen marker with a user message or parameters
|
/// A codegen marker with a user message or parameters
|
||||||
pub async fn new_details(generator: Sym, details: impl AsRef<str>, i: &Interner) -> Self {
|
pub async fn new_details(generator: Sym, details: impl AsRef<str>, i: &Interner) -> Self {
|
||||||
Self { generator, details: i.i(details.as_ref()).await }
|
Self { generator, details: i.is(details.as_ref()).await }
|
||||||
}
|
}
|
||||||
/// Syntactic location
|
/// Syntactic location
|
||||||
pub fn pos(&self) -> Pos { Pos::Gen(self.clone()) }
|
pub fn pos(&self) -> Pos { Pos::Gen(self.clone()) }
|
||||||
pub async fn from_api(api: &api::CodeGenInfo, i: &Interner) -> Self {
|
pub async fn from_api(api: &api::CodeGenInfo, i: &Interner) -> Self {
|
||||||
Self {
|
Self { generator: Sym::from_api(api.generator, i).await, details: i.es(api.details).await }
|
||||||
generator: Sym::from_api(api.generator, i).await,
|
|
||||||
details: Tok::from_api(api.details, i).await,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
pub fn to_api(&self) -> api::CodeGenInfo {
|
pub fn to_api(&self) -> api::CodeGenInfo {
|
||||||
api::CodeGenInfo { generator: self.generator.to_api(), details: self.details.to_api() }
|
api::CodeGenInfo { generator: self.generator.to_api(), details: self.details.to_api() }
|
||||||
|
|||||||
@@ -12,52 +12,48 @@ use itertools::Itertools;
|
|||||||
use trait_set::trait_set;
|
use trait_set::trait_set;
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::interner::{InternMarker, Interner, Tok};
|
use crate::interner::{IStr, IVec, Interner};
|
||||||
|
|
||||||
trait_set! {
|
trait_set! {
|
||||||
/// Traits that all name iterators should implement
|
/// Traits that all name iterators should implement
|
||||||
pub trait NameIter = Iterator<Item = Tok<String>> + DoubleEndedIterator + ExactSizeIterator;
|
pub trait NameIter = Iterator<Item = IStr> + DoubleEndedIterator + ExactSizeIterator;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A token path which may be empty. [VName] is the non-empty version
|
/// A token path which may be empty. [VName] is the non-empty version
|
||||||
#[derive(Clone, Default, Hash, PartialEq, Eq)]
|
#[derive(Clone, Default, Hash, PartialEq, Eq)]
|
||||||
pub struct VPath(Vec<Tok<String>>);
|
pub struct VPath(Vec<IStr>);
|
||||||
impl VPath {
|
impl VPath {
|
||||||
/// Collect segments into a vector
|
/// Collect segments into a vector
|
||||||
pub fn new(items: impl IntoIterator<Item = Tok<String>>) -> Self {
|
pub fn new(items: impl IntoIterator<Item = IStr>) -> Self { Self(items.into_iter().collect()) }
|
||||||
Self(items.into_iter().collect())
|
|
||||||
}
|
|
||||||
/// Number of path segments
|
/// Number of path segments
|
||||||
pub fn len(&self) -> usize { self.0.len() }
|
pub fn len(&self) -> usize { self.0.len() }
|
||||||
/// Whether there are any path segments. In other words, whether this is a
|
/// Whether there are any path segments. In other words, whether this is a
|
||||||
/// valid name
|
/// valid name
|
||||||
pub fn is_empty(&self) -> bool { self.len() == 0 }
|
pub fn is_empty(&self) -> bool { self.len() == 0 }
|
||||||
/// Prepend some tokens to the path
|
/// Prepend some tokens to the path
|
||||||
pub fn prefix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
|
pub fn prefix(self, items: impl IntoIterator<Item = IStr>) -> Self {
|
||||||
Self(items.into_iter().chain(self.0).collect())
|
Self(items.into_iter().chain(self.0).collect())
|
||||||
}
|
}
|
||||||
/// Append some tokens to the path
|
/// Append some tokens to the path
|
||||||
pub fn suffix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
|
pub fn suffix(self, items: impl IntoIterator<Item = IStr>) -> Self {
|
||||||
Self(self.0.into_iter().chain(items).collect())
|
Self(self.0.into_iter().chain(items).collect())
|
||||||
}
|
}
|
||||||
/// Partition the string by `::` namespace separators
|
/// Partition the string by `::` namespace separators
|
||||||
pub async fn parse(s: &str, i: &Interner) -> Self {
|
pub async fn parse(s: &str, i: &Interner) -> Self {
|
||||||
Self(if s.is_empty() { vec![] } else { join_all(s.split("::").map(|s| i.i(s))).await })
|
Self(if s.is_empty() { vec![] } else { join_all(s.split("::").map(|s| i.is(s))).await })
|
||||||
}
|
}
|
||||||
/// Walk over the segments
|
/// Walk over the segments
|
||||||
pub fn str_iter(&self) -> impl Iterator<Item = &'_ str> {
|
pub fn str_iter(&self) -> impl Iterator<Item = &'_ str> { self.0.iter().map(|s| s.as_ref()) }
|
||||||
Box::new(self.0.iter().map(|s| s.as_str()))
|
|
||||||
}
|
|
||||||
/// Try to convert into non-empty version
|
/// Try to convert into non-empty version
|
||||||
pub fn into_name(self) -> Result<VName, EmptyNameError> { VName::new(self.0) }
|
pub fn into_name(self) -> Result<VName, EmptyNameError> { VName::new(self.0) }
|
||||||
/// Add a token to the path. Since now we know that it can't be empty, turn it
|
/// Add a token to the path. Since now we know that it can't be empty, turn it
|
||||||
/// into a name.
|
/// into a name.
|
||||||
pub fn name_with_suffix(self, name: Tok<String>) -> VName {
|
pub fn name_with_suffix(self, name: IStr) -> VName {
|
||||||
VName(self.into_iter().chain([name]).collect())
|
VName(self.into_iter().chain([name]).collect())
|
||||||
}
|
}
|
||||||
/// Add a token to the beginning of the. Since now we know that it can't be
|
/// Add a token to the beginning of the. Since now we know that it can't be
|
||||||
/// empty, turn it into a name.
|
/// empty, turn it into a name.
|
||||||
pub fn name_with_prefix(self, name: Tok<String>) -> VName {
|
pub fn name_with_prefix(self, name: IStr) -> VName {
|
||||||
VName([name].into_iter().chain(self).collect())
|
VName([name].into_iter().chain(self).collect())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -65,7 +61,7 @@ impl VPath {
|
|||||||
pub async fn from_path(path: &Path, ext: &str, i: &Interner) -> Option<(Self, bool)> {
|
pub async fn from_path(path: &Path, ext: &str, i: &Interner) -> Option<(Self, bool)> {
|
||||||
async fn to_vpath(p: &Path, i: &Interner) -> Option<VPath> {
|
async fn to_vpath(p: &Path, i: &Interner) -> Option<VPath> {
|
||||||
let tok_opt_v =
|
let tok_opt_v =
|
||||||
join_all(p.iter().map(|c| OptionFuture::from(c.to_str().map(|s| i.i(s))))).await;
|
join_all(p.iter().map(|c| OptionFuture::from(c.to_str().map(|s| i.is(s))))).await;
|
||||||
tok_opt_v.into_iter().collect::<Option<_>>().map(VPath)
|
tok_opt_v.into_iter().collect::<Option<_>>().map(VPath)
|
||||||
}
|
}
|
||||||
match path.extension().map(|s| s.to_str()) {
|
match path.extension().map(|s| s.to_str()) {
|
||||||
@@ -83,30 +79,28 @@ impl fmt::Display for VPath {
|
|||||||
write!(f, "{}", self.str_iter().join("::"))
|
write!(f, "{}", self.str_iter().join("::"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl FromIterator<Tok<String>> for VPath {
|
impl FromIterator<IStr> for VPath {
|
||||||
fn from_iter<T: IntoIterator<Item = Tok<String>>>(iter: T) -> Self {
|
fn from_iter<T: IntoIterator<Item = IStr>>(iter: T) -> Self { Self(iter.into_iter().collect()) }
|
||||||
Self(iter.into_iter().collect())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
impl IntoIterator for VPath {
|
impl IntoIterator for VPath {
|
||||||
type Item = Tok<String>;
|
type Item = IStr;
|
||||||
type IntoIter = vec::IntoIter<Self::Item>;
|
type IntoIter = vec::IntoIter<Self::Item>;
|
||||||
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
|
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
|
||||||
}
|
}
|
||||||
impl Borrow<[Tok<String>]> for VPath {
|
impl Borrow<[IStr]> for VPath {
|
||||||
fn borrow(&self) -> &[Tok<String>] { &self.0[..] }
|
fn borrow(&self) -> &[IStr] { &self.0[..] }
|
||||||
}
|
}
|
||||||
impl Deref for VPath {
|
impl Deref for VPath {
|
||||||
type Target = [Tok<String>];
|
type Target = [IStr];
|
||||||
fn deref(&self) -> &Self::Target { self.borrow() }
|
fn deref(&self) -> &Self::Target { self.borrow() }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Index<T> for VPath
|
impl<T> Index<T> for VPath
|
||||||
where [Tok<String>]: Index<T>
|
where [IStr]: Index<T>
|
||||||
{
|
{
|
||||||
type Output = <[Tok<String>] as Index<T>>::Output;
|
type Output = <[IStr] as Index<T>>::Output;
|
||||||
|
|
||||||
fn index(&self, index: T) -> &Self::Output { &Borrow::<[Tok<String>]>::borrow(self)[index] }
|
fn index(&self, index: T) -> &Self::Output { &Borrow::<[IStr]>::borrow(self)[index] }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A mutable representation of a namespaced identifier of at least one segment.
|
/// A mutable representation of a namespaced identifier of at least one segment.
|
||||||
@@ -116,11 +110,11 @@ where [Tok<String>]: Index<T>
|
|||||||
/// See also [Sym] for the immutable representation, and [VPath] for possibly
|
/// See also [Sym] for the immutable representation, and [VPath] for possibly
|
||||||
/// empty values
|
/// empty values
|
||||||
#[derive(Clone, Hash, PartialEq, Eq)]
|
#[derive(Clone, Hash, PartialEq, Eq)]
|
||||||
pub struct VName(Vec<Tok<String>>);
|
pub struct VName(Vec<IStr>);
|
||||||
impl VName {
|
impl VName {
|
||||||
/// Assert that the sequence isn't empty and wrap it in [VName] to represent
|
/// Assert that the sequence isn't empty and wrap it in [VName] to represent
|
||||||
/// this invariant
|
/// this invariant
|
||||||
pub fn new(items: impl IntoIterator<Item = Tok<String>>) -> Result<Self, EmptyNameError> {
|
pub fn new(items: impl IntoIterator<Item = IStr>) -> Result<Self, EmptyNameError> {
|
||||||
let data: Vec<_> = items.into_iter().collect();
|
let data: Vec<_> = items.into_iter().collect();
|
||||||
if data.is_empty() { Err(EmptyNameError) } else { Ok(Self(data)) }
|
if data.is_empty() { Err(EmptyNameError) } else { Ok(Self(data)) }
|
||||||
}
|
}
|
||||||
@@ -128,27 +122,27 @@ impl VName {
|
|||||||
name: impl IntoIterator<Item = api::TStr>,
|
name: impl IntoIterator<Item = api::TStr>,
|
||||||
i: &Interner,
|
i: &Interner,
|
||||||
) -> Result<Self, EmptyNameError> {
|
) -> Result<Self, EmptyNameError> {
|
||||||
Self::new(join_all(name.into_iter().map(|m| Tok::from_api(m, i))).await)
|
Self::new(join_all(name.into_iter().map(|m| i.es(m))).await)
|
||||||
}
|
}
|
||||||
/// Unwrap the enclosed vector
|
/// Unwrap the enclosed vector
|
||||||
pub fn into_vec(self) -> Vec<Tok<String>> { self.0 }
|
pub fn into_vec(self) -> Vec<IStr> { self.0 }
|
||||||
/// Get a reference to the enclosed vector
|
/// Get a reference to the enclosed vector
|
||||||
pub fn vec(&self) -> &Vec<Tok<String>> { &self.0 }
|
pub fn vec(&self) -> &Vec<IStr> { &self.0 }
|
||||||
/// Mutable access to the underlying vector. To ensure correct results, this
|
/// Mutable access to the underlying vector. To ensure correct results, this
|
||||||
/// must never be empty.
|
/// must never be empty.
|
||||||
pub fn vec_mut(&mut self) -> &mut Vec<Tok<String>> { &mut self.0 }
|
pub fn vec_mut(&mut self) -> &mut Vec<IStr> { &mut self.0 }
|
||||||
/// Intern the name and return a [Sym]
|
/// Intern the name and return a [Sym]
|
||||||
pub async fn to_sym(&self, i: &Interner) -> Sym { Sym(i.i(&self.0[..]).await) }
|
pub async fn to_sym(&self, i: &Interner) -> Sym { Sym(i.iv(&self.0[..]).await) }
|
||||||
/// If this name has only one segment, return it
|
/// If this name has only one segment, return it
|
||||||
pub fn as_root(&self) -> Option<Tok<String>> { self.0.iter().exactly_one().ok().cloned() }
|
pub fn as_root(&self) -> Option<IStr> { self.0.iter().exactly_one().ok().cloned() }
|
||||||
/// Prepend the segments to this name
|
/// Prepend the segments to this name
|
||||||
#[must_use = "This is a pure function"]
|
#[must_use = "This is a pure function"]
|
||||||
pub fn prefix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
|
pub fn prefix(self, items: impl IntoIterator<Item = IStr>) -> Self {
|
||||||
Self(items.into_iter().chain(self.0).collect())
|
Self(items.into_iter().chain(self.0).collect())
|
||||||
}
|
}
|
||||||
/// Append the segments to this name
|
/// Append the segments to this name
|
||||||
#[must_use = "This is a pure function"]
|
#[must_use = "This is a pure function"]
|
||||||
pub fn suffix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self {
|
pub fn suffix(self, items: impl IntoIterator<Item = IStr>) -> Self {
|
||||||
Self(self.0.into_iter().chain(items).collect())
|
Self(self.0.into_iter().chain(items).collect())
|
||||||
}
|
}
|
||||||
/// Read a `::` separated namespaced name
|
/// Read a `::` separated namespaced name
|
||||||
@@ -159,7 +153,7 @@ impl VName {
|
|||||||
Self::parse(s, i).await.expect("empty literal !?")
|
Self::parse(s, i).await.expect("empty literal !?")
|
||||||
}
|
}
|
||||||
/// Obtain an iterator over the segments of the name
|
/// Obtain an iterator over the segments of the name
|
||||||
pub fn iter(&self) -> impl Iterator<Item = Tok<String>> + '_ { self.0.iter().cloned() }
|
pub fn iter(&self) -> impl Iterator<Item = IStr> + '_ { self.0.iter().cloned() }
|
||||||
}
|
}
|
||||||
impl fmt::Debug for VName {
|
impl fmt::Debug for VName {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") }
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") }
|
||||||
@@ -170,22 +164,22 @@ impl fmt::Display for VName {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl IntoIterator for VName {
|
impl IntoIterator for VName {
|
||||||
type Item = Tok<String>;
|
type Item = IStr;
|
||||||
type IntoIter = vec::IntoIter<Self::Item>;
|
type IntoIter = vec::IntoIter<Self::Item>;
|
||||||
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
|
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
|
||||||
}
|
}
|
||||||
impl<T> Index<T> for VName
|
impl<T> Index<T> for VName
|
||||||
where [Tok<String>]: Index<T>
|
where [IStr]: Index<T>
|
||||||
{
|
{
|
||||||
type Output = <[Tok<String>] as Index<T>>::Output;
|
type Output = <[IStr] as Index<T>>::Output;
|
||||||
|
|
||||||
fn index(&self, index: T) -> &Self::Output { &self.deref()[index] }
|
fn index(&self, index: T) -> &Self::Output { &self.deref()[index] }
|
||||||
}
|
}
|
||||||
impl Borrow<[Tok<String>]> for VName {
|
impl Borrow<[IStr]> for VName {
|
||||||
fn borrow(&self) -> &[Tok<String>] { self.0.borrow() }
|
fn borrow(&self) -> &[IStr] { self.0.borrow() }
|
||||||
}
|
}
|
||||||
impl Deref for VName {
|
impl Deref for VName {
|
||||||
type Target = [Tok<String>];
|
type Target = [IStr];
|
||||||
fn deref(&self) -> &Self::Target { self.borrow() }
|
fn deref(&self) -> &Self::Target { self.borrow() }
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -193,11 +187,9 @@ impl Deref for VName {
|
|||||||
/// empty sequence
|
/// empty sequence
|
||||||
#[derive(Debug, Copy, Clone, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Debug, Copy, Clone, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub struct EmptyNameError;
|
pub struct EmptyNameError;
|
||||||
impl TryFrom<&[Tok<String>]> for VName {
|
impl TryFrom<&[IStr]> for VName {
|
||||||
type Error = EmptyNameError;
|
type Error = EmptyNameError;
|
||||||
fn try_from(value: &[Tok<String>]) -> Result<Self, Self::Error> {
|
fn try_from(value: &[IStr]) -> Result<Self, Self::Error> { Self::new(value.iter().cloned()) }
|
||||||
Self::new(value.iter().cloned())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An interned representation of a namespaced identifier.
|
/// An interned representation of a namespaced identifier.
|
||||||
@@ -206,36 +198,36 @@ impl TryFrom<&[Tok<String>]> for VName {
|
|||||||
///
|
///
|
||||||
/// See also [VName]
|
/// See also [VName]
|
||||||
#[derive(Clone, Hash, PartialEq, Eq)]
|
#[derive(Clone, Hash, PartialEq, Eq)]
|
||||||
pub struct Sym(Tok<Vec<Tok<String>>>);
|
pub struct Sym(IVec);
|
||||||
impl Sym {
|
impl Sym {
|
||||||
/// Assert that the sequence isn't empty, intern it and wrap it in a [Sym] to
|
/// Assert that the sequence isn't empty, intern it and wrap it in a [Sym] to
|
||||||
/// represent this invariant
|
/// represent this invariant
|
||||||
pub async fn new(
|
pub async fn new(
|
||||||
v: impl IntoIterator<Item = Tok<String>>,
|
v: impl IntoIterator<Item = IStr>,
|
||||||
i: &Interner,
|
i: &Interner,
|
||||||
) -> Result<Self, EmptyNameError> {
|
) -> Result<Self, EmptyNameError> {
|
||||||
let items = v.into_iter().collect_vec();
|
let items = v.into_iter().collect_vec();
|
||||||
Self::from_tok(i.i(&items).await)
|
Self::from_tok(i.iv(&items).await)
|
||||||
}
|
}
|
||||||
/// Read a `::` separated namespaced name.
|
/// Read a `::` separated namespaced name.
|
||||||
pub async fn parse(s: &str, i: &Interner) -> Result<Self, EmptyNameError> {
|
pub async fn parse(s: &str, i: &Interner) -> Result<Self, EmptyNameError> {
|
||||||
Ok(Sym(i.i(&VName::parse(s, i).await?.into_vec()).await))
|
Ok(Sym(i.iv(&VName::parse(s, i).await?.into_vec()).await))
|
||||||
}
|
}
|
||||||
/// Assert that a token isn't empty, and wrap it in a [Sym]
|
/// Assert that a token isn't empty, and wrap it in a [Sym]
|
||||||
pub fn from_tok(t: Tok<Vec<Tok<String>>>) -> Result<Self, EmptyNameError> {
|
pub fn from_tok(t: IVec) -> Result<Self, EmptyNameError> {
|
||||||
if t.is_empty() { Err(EmptyNameError) } else { Ok(Self(t)) }
|
if t.is_empty() { Err(EmptyNameError) } else { Ok(Self(t)) }
|
||||||
}
|
}
|
||||||
/// Grab the interner token
|
/// Grab the interner token
|
||||||
pub fn tok(&self) -> Tok<Vec<Tok<String>>> { self.0.clone() }
|
pub fn tok(&self) -> IVec { self.0.clone() }
|
||||||
/// Get a number unique to this name suitable for arbitrary ordering.
|
/// Get a number unique to this name suitable for arbitrary ordering.
|
||||||
pub fn id(&self) -> NonZeroU64 { self.0.to_api().get_id() }
|
pub fn id(&self) -> NonZeroU64 { self.0.to_api().0 }
|
||||||
/// Extern the sym for editing
|
/// Extern the sym for editing
|
||||||
pub fn to_vname(&self) -> VName { VName(self[..].to_vec()) }
|
pub fn to_vname(&self) -> VName { VName(self[..].to_vec()) }
|
||||||
pub async fn from_api(marker: api::TStrv, i: &Interner) -> Sym {
|
pub async fn from_api(marker: api::TVec, i: &Interner) -> Sym {
|
||||||
Self::from_tok(Tok::from_api(marker, i).await).expect("Empty sequence found for serialized Sym")
|
Self::from_tok(i.ev(marker).await).expect("Empty sequence found for serialized Sym")
|
||||||
}
|
}
|
||||||
pub fn to_api(&self) -> api::TStrv { self.tok().to_api() }
|
pub fn to_api(&self) -> api::TVec { self.tok().to_api() }
|
||||||
pub async fn suffix(&self, tokv: impl IntoIterator<Item = Tok<String>>, i: &Interner) -> Sym {
|
pub async fn suffix(&self, tokv: impl IntoIterator<Item = IStr>, i: &Interner) -> Sym {
|
||||||
Self::new(self.0.iter().cloned().chain(tokv), i).await.unwrap()
|
Self::new(self.0.iter().cloned().chain(tokv), i).await.unwrap()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -248,17 +240,17 @@ impl fmt::Display for Sym {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<T> Index<T> for Sym
|
impl<T> Index<T> for Sym
|
||||||
where [Tok<String>]: Index<T>
|
where [IStr]: Index<T>
|
||||||
{
|
{
|
||||||
type Output = <[Tok<String>] as Index<T>>::Output;
|
type Output = <[IStr] as Index<T>>::Output;
|
||||||
|
|
||||||
fn index(&self, index: T) -> &Self::Output { &self.deref()[index] }
|
fn index(&self, index: T) -> &Self::Output { &self.deref()[index] }
|
||||||
}
|
}
|
||||||
impl Borrow<[Tok<String>]> for Sym {
|
impl Borrow<[IStr]> for Sym {
|
||||||
fn borrow(&self) -> &[Tok<String>] { &self.0[..] }
|
fn borrow(&self) -> &[IStr] { &self.0[..] }
|
||||||
}
|
}
|
||||||
impl Deref for Sym {
|
impl Deref for Sym {
|
||||||
type Target = [Tok<String>];
|
type Target = [IStr];
|
||||||
fn deref(&self) -> &Self::Target { self.borrow() }
|
fn deref(&self) -> &Self::Target { self.borrow() }
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -266,15 +258,15 @@ impl Deref for Sym {
|
|||||||
/// handled together in datastructures. The names can never be empty
|
/// handled together in datastructures. The names can never be empty
|
||||||
#[allow(clippy::len_without_is_empty)] // never empty
|
#[allow(clippy::len_without_is_empty)] // never empty
|
||||||
pub trait NameLike:
|
pub trait NameLike:
|
||||||
'static + Clone + Eq + Hash + fmt::Debug + fmt::Display + Borrow<[Tok<String>]>
|
'static + Clone + Eq + Hash + fmt::Debug + fmt::Display + Borrow<[IStr]>
|
||||||
{
|
{
|
||||||
/// Convert into held slice
|
/// Convert into held slice
|
||||||
fn as_slice(&self) -> &[Tok<String>] { Borrow::<[Tok<String>]>::borrow(self) }
|
fn as_slice(&self) -> &[IStr] { Borrow::<[IStr]>::borrow(self) }
|
||||||
/// Get iterator over tokens
|
/// Get iterator over tokens
|
||||||
fn segs(&self) -> impl NameIter + '_ { self.as_slice().iter().cloned() }
|
fn segs(&self) -> impl NameIter + '_ { self.as_slice().iter().cloned() }
|
||||||
/// Get iterator over string segments
|
/// Get iterator over string segments
|
||||||
fn str_iter(&self) -> impl Iterator<Item = &'_ str> + '_ {
|
fn str_iter(&self) -> impl Iterator<Item = &'_ str> + '_ {
|
||||||
self.as_slice().iter().map(|t| t.as_str())
|
self.as_slice().iter().map(|t| t.as_ref())
|
||||||
}
|
}
|
||||||
/// Fully resolve the name for printing
|
/// Fully resolve the name for printing
|
||||||
#[must_use]
|
#[must_use]
|
||||||
@@ -286,19 +278,19 @@ pub trait NameLike:
|
|||||||
NonZeroUsize::try_from(self.segs().count()).expect("NameLike never empty")
|
NonZeroUsize::try_from(self.segs().count()).expect("NameLike never empty")
|
||||||
}
|
}
|
||||||
/// Like slice's `split_first` except we know that it always returns Some
|
/// Like slice's `split_first` except we know that it always returns Some
|
||||||
fn split_first_seg(&self) -> (Tok<String>, &[Tok<String>]) {
|
fn split_first_seg(&self) -> (IStr, &[IStr]) {
|
||||||
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
|
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
|
||||||
(foot.clone(), torso)
|
(foot.clone(), torso)
|
||||||
}
|
}
|
||||||
/// Like slice's `split_last` except we know that it always returns Some
|
/// Like slice's `split_last` except we know that it always returns Some
|
||||||
fn split_last_seg(&self) -> (Tok<String>, &[Tok<String>]) {
|
fn split_last_seg(&self) -> (IStr, &[IStr]) {
|
||||||
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
|
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
|
||||||
(foot.clone(), torso)
|
(foot.clone(), torso)
|
||||||
}
|
}
|
||||||
/// Get the first element
|
/// Get the first element
|
||||||
fn first_seg(&self) -> Tok<String> { self.split_first_seg().0 }
|
fn first_seg(&self) -> IStr { self.split_first_seg().0 }
|
||||||
/// Get the last element
|
/// Get the last element
|
||||||
fn last_seg(&self) -> Tok<String> { self.split_last_seg().0 }
|
fn last_seg(&self) -> IStr { self.split_last_seg().0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NameLike for Sym {}
|
impl NameLike for Sym {}
|
||||||
@@ -311,15 +303,17 @@ impl NameLike for VName {}
|
|||||||
/// cloning the token.
|
/// cloning the token.
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! sym {
|
macro_rules! sym {
|
||||||
($seg1:tt $( :: $seg:tt)* ; $i:expr) => {
|
($seg1:tt $( :: $seg:tt)* ; $i:expr) => { async {
|
||||||
$crate::name::Sym::from_tok(
|
$crate::name::Sym::from_tok(
|
||||||
$i.i(&[
|
$i.iv(&[
|
||||||
$i.i(stringify!($seg1)).await
|
$i.is(stringify!($seg1)).await
|
||||||
$( , $i.i(stringify!($seg)).await )*
|
$( , $i.is(stringify!($seg)).await )*
|
||||||
])
|
])
|
||||||
.await
|
.await
|
||||||
).unwrap()
|
).unwrap()
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
(@NAME $seg:tt) => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a [VName] literal.
|
/// Create a [VName] literal.
|
||||||
@@ -327,12 +321,12 @@ macro_rules! sym {
|
|||||||
/// The components are interned much like in [sym].
|
/// The components are interned much like in [sym].
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! vname {
|
macro_rules! vname {
|
||||||
($seg1:tt $( :: $seg:tt)* ; $i:expr) => {
|
($seg1:tt $( :: $seg:tt)* ; $i:expr) => { async {
|
||||||
$crate::name::VName::new([
|
$crate::name::VName::new([
|
||||||
$i.i(stringify!($seg1)).await
|
$i.is(stringify!($seg1)).await
|
||||||
$( , $i.i(stringify!($seg)).await )*
|
$( , $i.is(stringify!($seg)).await )*
|
||||||
]).unwrap()
|
]).unwrap()
|
||||||
};
|
} };
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a [VPath] literal.
|
/// Create a [VPath] literal.
|
||||||
@@ -340,12 +334,12 @@ macro_rules! vname {
|
|||||||
/// The components are interned much like in [sym].
|
/// The components are interned much like in [sym].
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! vpath {
|
macro_rules! vpath {
|
||||||
($seg1:tt $( :: $seg:tt)+ ; $i:expr) => {
|
($seg1:tt $( :: $seg:tt)+ ; $i:expr) => { async {
|
||||||
$crate::name::VPath(vec![
|
$crate::name::VPath(vec![
|
||||||
$i.i(stringify!($seg1)).await
|
$i.is(stringify!($seg1)).await
|
||||||
$( , $i.i(stringify!($seg)).await )+
|
$( , $i.is(stringify!($seg)).await )+
|
||||||
])
|
])
|
||||||
};
|
} };
|
||||||
() => {
|
() => {
|
||||||
$crate::name::VPath(vec![])
|
$crate::name::VPath(vec![])
|
||||||
}
|
}
|
||||||
@@ -358,35 +352,37 @@ mod test {
|
|||||||
use test_executors::spin_on;
|
use test_executors::spin_on;
|
||||||
|
|
||||||
use super::{NameLike, Sym, VName};
|
use super::{NameLike, Sym, VName};
|
||||||
use crate::interner::{Interner, Tok};
|
use crate::interner::test::DummyInterner;
|
||||||
|
use crate::interner::{IStr, Interner};
|
||||||
use crate::name::VPath;
|
use crate::name::VPath;
|
||||||
|
use crate::testing::AsyncMonitor;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn recur() {
|
fn recur() {
|
||||||
spin_on(async {
|
spin_on(async {
|
||||||
let i = Interner::new_master();
|
let i = DummyInterner::new(AsyncMonitor::default());
|
||||||
let myname = vname!(foo::bar; i);
|
let myname = vname!(foo::bar; i).await;
|
||||||
let _borrowed_slice: &[Tok<String>] = myname.borrow();
|
let _borrowed_slice: &[IStr] = myname.borrow();
|
||||||
let _deref_pathslice: &[Tok<String>] = &myname;
|
let _deref_pathslice: &[IStr] = &myname;
|
||||||
let _as_slice_out: &[Tok<String>] = myname.as_slice();
|
let _as_slice_out: &[IStr] = myname.as_slice();
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn literals() {
|
fn literals() {
|
||||||
spin_on(async {
|
spin_on(async {
|
||||||
let i = Interner::new_master();
|
let i = DummyInterner::new(AsyncMonitor::default());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
sym!(foo::bar::baz; i),
|
sym!(foo::bar::baz; i).await,
|
||||||
Sym::new([i.i("foo").await, i.i("bar").await, i.i("baz").await], &i).await.unwrap()
|
Sym::new([i.is("foo").await, i.is("bar").await, i.is("baz").await], &i).await.unwrap()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
vname!(foo::bar::baz; i),
|
vname!(foo::bar::baz; i).await,
|
||||||
VName::new([i.i("foo").await, i.i("bar").await, i.i("baz").await]).unwrap()
|
VName::new([i.is("foo").await, i.is("bar").await, i.is("baz").await]).unwrap()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
vpath!(foo::bar::baz; i),
|
vpath!(foo::bar::baz; i).await,
|
||||||
VPath::new([i.i("foo").await, i.i("bar").await, i.i("baz").await])
|
VPath::new([i.is("foo").await, i.is("bar").await, i.is("baz").await])
|
||||||
);
|
);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,8 +3,8 @@ use std::ops::Range;
|
|||||||
|
|
||||||
use ordered_float::NotNan;
|
use ordered_float::NotNan;
|
||||||
|
|
||||||
use crate::error::{OrcErrv, mk_errv};
|
use crate::ctx::Ctx;
|
||||||
use crate::interner::Interner;
|
use crate::error::OrcErr;
|
||||||
use crate::location::SrcRange;
|
use crate::location::SrcRange;
|
||||||
use crate::name::Sym;
|
use crate::name::Sym;
|
||||||
|
|
||||||
@@ -55,14 +55,14 @@ pub struct NumError {
|
|||||||
pub kind: NumErrorKind,
|
pub kind: NumErrorKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn num_to_errv(
|
pub fn num_to_errv(
|
||||||
NumError { kind, range }: NumError,
|
NumError { kind, range }: NumError,
|
||||||
offset: u32,
|
offset: u32,
|
||||||
source: &Sym,
|
source: &Sym,
|
||||||
i: &Interner,
|
ctx: &Ctx,
|
||||||
) -> OrcErrv {
|
) -> OrcErr {
|
||||||
mk_errv(
|
ctx.mk_err(
|
||||||
i.i("Failed to parse number").await,
|
"Failed to parse number",
|
||||||
match kind {
|
match kind {
|
||||||
NumErrorKind::NaN => "NaN emerged during parsing",
|
NumErrorKind::NaN => "NaN emerged during parsing",
|
||||||
NumErrorKind::InvalidDigit => "non-digit character encountered",
|
NumErrorKind::InvalidDigit => "non-digit character encountered",
|
||||||
|
|||||||
@@ -1,34 +1,21 @@
|
|||||||
use std::fmt::{self, Display};
|
use std::fmt::{self, Display};
|
||||||
use std::iter;
|
use std::iter;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
use futures::future::join_all;
|
use futures::future::join_all;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::error::{OrcErrv, OrcRes, Reporter, mk_errv};
|
use crate::ctx::Ctx;
|
||||||
|
use crate::error::{OrcErr, OrcRes, Reporter};
|
||||||
use crate::format::{FmtCtx, FmtUnit, Format, fmt};
|
use crate::format::{FmtCtx, FmtUnit, Format, fmt};
|
||||||
use crate::interner::{Interner, Tok};
|
use crate::interner::{IStr, Interner};
|
||||||
use crate::location::SrcRange;
|
use crate::location::SrcRange;
|
||||||
use crate::name::{Sym, VName, VPath};
|
use crate::name::{Sym, VName, VPath};
|
||||||
use crate::tree::{ExprRepr, ExtraTok, Paren, TokTree, Token, ttv_fmt, ttv_range};
|
use crate::tree::{ExprRepr, ExtraTok, Paren, TokTree, Token, ttv_fmt, ttv_range};
|
||||||
|
|
||||||
pub trait ParseCtx {
|
|
||||||
#[must_use]
|
|
||||||
fn i(&self) -> &Interner;
|
|
||||||
#[must_use]
|
|
||||||
fn rep(&self) -> &Reporter;
|
|
||||||
}
|
|
||||||
pub struct ParseCtxImpl<'a> {
|
|
||||||
pub i: &'a Interner,
|
|
||||||
pub r: &'a Reporter,
|
|
||||||
}
|
|
||||||
impl ParseCtx for ParseCtxImpl<'_> {
|
|
||||||
fn i(&self) -> &Interner { self.i }
|
|
||||||
fn rep(&self) -> &Reporter { self.r }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn name_start(c: char) -> bool { c.is_alphabetic() || c == '_' }
|
pub fn name_start(c: char) -> bool { c.is_alphabetic() || c == '_' }
|
||||||
pub fn name_char(c: char) -> bool { name_start(c) || c.is_numeric() }
|
pub fn name_char(c: char) -> bool { name_start(c) || c.is_numeric() }
|
||||||
pub fn op_char(c: char) -> bool { !name_char(c) && !c.is_whitespace() && !"()[]{}\\".contains(c) }
|
pub fn op_char(c: char) -> bool { !name_char(c) && !c.is_whitespace() && !"()[]{}\\".contains(c) }
|
||||||
@@ -103,22 +90,22 @@ impl<A: ExprRepr, X: ExtraTok> Format for Snippet<'_, A, X> {
|
|||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct Comment {
|
pub struct Comment {
|
||||||
pub text: Tok<String>,
|
pub text: IStr,
|
||||||
pub sr: SrcRange,
|
pub sr: SrcRange,
|
||||||
}
|
}
|
||||||
impl Comment {
|
impl Comment {
|
||||||
// XXX: which of these four are actually used?
|
// XXX: which of these four are actually used?
|
||||||
pub async fn from_api(c: &api::Comment, src: Sym, i: &Interner) -> Self {
|
pub async fn from_api(c: &api::Comment, src: Sym, cx: &Ctx) -> Self {
|
||||||
Self { text: i.ex(c.text).await, sr: SrcRange::new(c.range.clone(), &src) }
|
Self { text: cx.i().es(c.text).await, sr: SrcRange::new(c.range.clone(), &src) }
|
||||||
}
|
}
|
||||||
pub async fn from_tk(tk: &TokTree<impl ExprRepr, impl ExtraTok>, i: &Interner) -> Option<Self> {
|
pub async fn from_tk(tk: &TokTree<impl ExprRepr, impl ExtraTok>, cx: &Ctx) -> Option<Self> {
|
||||||
match &tk.tok {
|
match &tk.tok {
|
||||||
Token::Comment(text) => Some(Self { text: i.i(&**text).await, sr: tk.sr.clone() }),
|
Token::Comment(text) => Some(Self { text: cx.i().is(&**text).await, sr: tk.sr.clone() }),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn to_tk<R: ExprRepr, X: ExtraTok>(&self) -> TokTree<R, X> {
|
pub fn to_tk<A: ExprRepr, X: ExtraTok>(&self) -> TokTree<A, X> {
|
||||||
TokTree { tok: Token::Comment(self.text.rc().clone()), sr: self.sr.clone() }
|
TokTree { tok: Token::Comment(Rc::new(self.text.to_string())), sr: self.sr.clone() }
|
||||||
}
|
}
|
||||||
pub fn to_api(&self) -> api::Comment {
|
pub fn to_api(&self) -> api::Comment {
|
||||||
api::Comment { range: self.sr.range(), text: self.text.to_api() }
|
api::Comment { range: self.sr.range(), text: self.text.to_api() }
|
||||||
@@ -130,7 +117,7 @@ impl fmt::Display for Comment {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn line_items<'a, A: ExprRepr, X: ExtraTok>(
|
pub async fn line_items<'a, A: ExprRepr, X: ExtraTok>(
|
||||||
ctx: &impl ParseCtx,
|
ctx: &Ctx,
|
||||||
snip: Snippet<'a, A, X>,
|
snip: Snippet<'a, A, X>,
|
||||||
) -> Vec<Parsed<'a, Vec<Comment>, A, X>> {
|
) -> Vec<Parsed<'a, Vec<Comment>, A, X>> {
|
||||||
let mut items = Vec::new();
|
let mut items = Vec::new();
|
||||||
@@ -146,7 +133,7 @@ pub async fn line_items<'a, A: ExprRepr, X: ExtraTok>(
|
|||||||
Some(i) => {
|
Some(i) => {
|
||||||
let (cmts, tail) = line.split_at(i);
|
let (cmts, tail) = line.split_at(i);
|
||||||
let comments = join_all(comments.drain(..).chain(cmts.cur).map(|t| async {
|
let comments = join_all(comments.drain(..).chain(cmts.cur).map(|t| async {
|
||||||
Comment::from_tk(t, ctx.i()).await.expect("All are comments checked above")
|
Comment::from_tk(t, ctx).await.expect("All are comments checked above")
|
||||||
}))
|
}))
|
||||||
.await;
|
.await;
|
||||||
items.push(Parsed { output: comments, tail });
|
items.push(Parsed { output: comments, tail });
|
||||||
@@ -157,56 +144,50 @@ pub async fn line_items<'a, A: ExprRepr, X: ExtraTok>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn try_pop_no_fluff<'a, A: ExprRepr, X: ExtraTok>(
|
pub async fn try_pop_no_fluff<'a, A: ExprRepr, X: ExtraTok>(
|
||||||
ctx: &impl ParseCtx,
|
ctx: &Ctx,
|
||||||
snip: Snippet<'a, A, X>,
|
snip: Snippet<'a, A, X>,
|
||||||
) -> ParseRes<'a, &'a TokTree<A, X>, A, X> {
|
) -> ParseRes<'a, &'a TokTree<A, X>, A, X> {
|
||||||
match snip.skip_fluff().pop_front() {
|
match snip.skip_fluff().pop_front() {
|
||||||
Some((output, tail)) => Ok(Parsed { output, tail }),
|
Some((output, tail)) => Ok(Parsed { output, tail }),
|
||||||
None => Err(mk_errv(
|
None => Err(
|
||||||
ctx.i().i("Unexpected end").await,
|
ctx.mk_err("Unexpected end", "Line ends abruptly; more tokens were expected", [snip.sr()]),
|
||||||
"Line ends abruptly; more tokens were expected",
|
),
|
||||||
[snip.sr()],
|
|
||||||
)),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn expect_end(
|
pub async fn expect_end(ctx: &Ctx, snip: Snippet<'_, impl ExprRepr, impl ExtraTok>) -> OrcRes<()> {
|
||||||
ctx: &impl ParseCtx,
|
|
||||||
snip: Snippet<'_, impl ExprRepr, impl ExtraTok>,
|
|
||||||
) -> OrcRes<()> {
|
|
||||||
match snip.skip_fluff().get(0) {
|
match snip.skip_fluff().get(0) {
|
||||||
Some(surplus) => Err(mk_errv(
|
Some(surplus) =>
|
||||||
ctx.i().i("Extra code after end of line").await,
|
Err(ctx.mk_err("Extra code after end of line", "Code found after the end of the line", [
|
||||||
"Code found after the end of the line",
|
surplus.sr.pos(),
|
||||||
[surplus.sr.pos()],
|
])),
|
||||||
)),
|
|
||||||
None => Ok(()),
|
None => Ok(()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn expect_tok<'a, A: ExprRepr, X: ExtraTok>(
|
pub async fn expect_tok<'a, A: ExprRepr, X: ExtraTok>(
|
||||||
ctx: &impl ParseCtx,
|
ctx: &Ctx,
|
||||||
snip: Snippet<'a, A, X>,
|
snip: Snippet<'a, A, X>,
|
||||||
tok: Tok<String>,
|
tok: IStr,
|
||||||
) -> ParseRes<'a, (), A, X> {
|
) -> ParseRes<'a, (), A, X> {
|
||||||
let Parsed { output: head, tail } = try_pop_no_fluff(ctx, snip).await?;
|
let Parsed { output: head, tail } = try_pop_no_fluff(ctx, snip).await?;
|
||||||
match &head.tok {
|
match &head.tok {
|
||||||
Token::Name(n) if *n == tok => Ok(Parsed { output: (), tail }),
|
Token::Name(n) if *n == tok => Ok(Parsed { output: (), tail }),
|
||||||
t => Err(mk_errv(
|
t => Err(ctx.mk_err(
|
||||||
ctx.i().i("Expected specific keyword").await,
|
"Expected specific keyword",
|
||||||
format!("Expected {tok} but found {:?}", fmt(t, ctx.i()).await),
|
format!("Expected {tok} but found {:?}", fmt(t, &ctx.i()).await),
|
||||||
[head.sr()],
|
[head.sr()],
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn token_errv<A: ExprRepr, X: ExtraTok>(
|
pub async fn token_errv<A: ExprRepr, X: ExtraTok>(
|
||||||
ctx: &impl ParseCtx,
|
ctx: &Ctx,
|
||||||
tok: &TokTree<A, X>,
|
tok: &TokTree<A, X>,
|
||||||
description: &'static str,
|
description: &'static str,
|
||||||
message: impl FnOnce(&str) -> String,
|
message: impl FnOnce(&str) -> String,
|
||||||
) -> OrcErrv {
|
) -> OrcErr {
|
||||||
mk_errv(ctx.i().i(description).await, message(&fmt(tok, ctx.i()).await), [tok.sr.pos()])
|
ctx.mk_err(description, message(&fmt(tok, &ctx.i()).await), [tok.sr.pos()])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Parsed<'a, T, H: ExprRepr, X: ExtraTok> {
|
pub struct Parsed<'a, T, H: ExprRepr, X: ExtraTok> {
|
||||||
@@ -217,12 +198,12 @@ pub struct Parsed<'a, T, H: ExprRepr, X: ExtraTok> {
|
|||||||
pub type ParseRes<'a, T, H, X> = OrcRes<Parsed<'a, T, H, X>>;
|
pub type ParseRes<'a, T, H, X> = OrcRes<Parsed<'a, T, H, X>>;
|
||||||
|
|
||||||
pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
|
pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
|
||||||
ctx: &impl ParseCtx,
|
ctx: &Ctx,
|
||||||
tail: Snippet<'a, A, X>,
|
tail: Snippet<'a, A, X>,
|
||||||
) -> ParseRes<'a, Vec<Import>, A, X> {
|
) -> ParseRes<'a, Vec<Import>, A, X> {
|
||||||
let Some((tt, tail)) = tail.skip_fluff().pop_front() else {
|
let Some((tt, tail)) = tail.skip_fluff().pop_front() else {
|
||||||
return Err(mk_errv(
|
return Err(ctx.mk_err(
|
||||||
ctx.i().i("Expected token").await,
|
"Expected token",
|
||||||
"Expected a name, a parenthesized list of names, or a globstar.",
|
"Expected a name, a parenthesized list of names, or a globstar.",
|
||||||
[tail.sr().pos()],
|
[tail.sr().pos()],
|
||||||
));
|
));
|
||||||
@@ -231,17 +212,14 @@ pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
|
|||||||
#[allow(clippy::type_complexity)] // it's an internal function
|
#[allow(clippy::type_complexity)] // it's an internal function
|
||||||
pub async fn rec<A: ExprRepr, X: ExtraTok>(
|
pub async fn rec<A: ExprRepr, X: ExtraTok>(
|
||||||
tt: &TokTree<A, X>,
|
tt: &TokTree<A, X>,
|
||||||
ctx: &impl ParseCtx,
|
ctx: &Ctx,
|
||||||
) -> OrcRes<Vec<(Vec<Tok<String>>, Option<Tok<String>>, SrcRange)>> {
|
) -> OrcRes<Vec<(Vec<IStr>, Option<IStr>, SrcRange)>> {
|
||||||
let ttpos = tt.sr.pos();
|
let ttpos = tt.sr.pos();
|
||||||
match &tt.tok {
|
match &tt.tok {
|
||||||
Token::NS(ns, body) => {
|
Token::NS(ns, body) => {
|
||||||
if !ns.starts_with(name_start) {
|
if !ns.starts_with(name_start) {
|
||||||
ctx.rep().report(mk_errv(
|
let err = ctx.mk_err("Unexpected name prefix", "Only names can precede ::", [ttpos]);
|
||||||
ctx.i().i("Unexpected name prefix").await,
|
ctx.rep().report(err)
|
||||||
"Only names can precede ::",
|
|
||||||
[ttpos],
|
|
||||||
))
|
|
||||||
};
|
};
|
||||||
let out = Box::pin(rec(body, ctx)).await?;
|
let out = Box::pin(rec(body, ctx)).await?;
|
||||||
Ok(out.into_iter().update(|i| i.0.push(ns.clone())).collect_vec())
|
Ok(out.into_iter().update(|i| i.0.push(ns.clone())).collect_vec())
|
||||||
@@ -264,9 +242,9 @@ pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
|
|||||||
Ok(o)
|
Ok(o)
|
||||||
},
|
},
|
||||||
t => {
|
t => {
|
||||||
return Err(mk_errv(
|
return Err(ctx.mk_err(
|
||||||
ctx.i().i("Unrecognized name end").await,
|
"Unrecognized name end",
|
||||||
format!("Names cannot end with {:?} tokens", fmt(t, ctx.i()).await),
|
format!("Names cannot end with {:?} tokens", fmt(t, &ctx.i()).await),
|
||||||
[ttpos],
|
[ttpos],
|
||||||
));
|
));
|
||||||
},
|
},
|
||||||
@@ -285,7 +263,7 @@ pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Import {
|
pub struct Import {
|
||||||
pub path: VPath,
|
pub path: VPath,
|
||||||
pub name: Option<Tok<String>>,
|
pub name: Option<IStr>,
|
||||||
pub sr: SrcRange,
|
pub sr: SrcRange,
|
||||||
}
|
}
|
||||||
impl Import {
|
impl Import {
|
||||||
@@ -296,14 +274,14 @@ impl Import {
|
|||||||
None => self.path.into_name().expect("Import cannot be empty"),
|
None => self.path.into_name().expect("Import cannot be empty"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn new(sr: SrcRange, path: VPath, name: Tok<String>) -> Self {
|
pub fn new(sr: SrcRange, path: VPath, name: IStr) -> Self {
|
||||||
Import { path, name: Some(name), sr }
|
Import { path, name: Some(name), sr }
|
||||||
}
|
}
|
||||||
pub fn new_glob(sr: SrcRange, path: VPath) -> Self { Import { path, name: None, sr } }
|
pub fn new_glob(sr: SrcRange, path: VPath) -> Self { Import { path, name: None, sr } }
|
||||||
}
|
}
|
||||||
impl Display for Import {
|
impl Display for Import {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "{}::{}", self.path.iter().join("::"), self.name.as_ref().map_or("*", |t| t.as_str()))
|
write!(f, "{}::{}", self.path, self.name.as_deref().unwrap_or("*"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,18 +1,22 @@
|
|||||||
|
use std::any::Any;
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::future::Future;
|
use std::future::Future;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::ops::{BitAnd, Deref};
|
use std::ops::{BitAnd, Deref};
|
||||||
use std::pin::Pin;
|
use std::pin::Pin;
|
||||||
|
use std::rc::Rc;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::sync::atomic::{AtomicBool, Ordering};
|
use std::sync::atomic::{AtomicBool, Ordering};
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
use derive_destructure::destructure;
|
use derive_destructure::destructure;
|
||||||
use dyn_clone::{DynClone, clone_box};
|
use dyn_clone::{DynClone, clone_box};
|
||||||
use futures::channel::mpsc;
|
use futures::channel::mpsc::{self, Sender};
|
||||||
|
use futures::channel::oneshot;
|
||||||
use futures::future::LocalBoxFuture;
|
use futures::future::LocalBoxFuture;
|
||||||
use futures::lock::Mutex;
|
use futures::lock::Mutex;
|
||||||
use futures::{SinkExt, StreamExt};
|
use futures::{AsyncBufRead, AsyncWrite, SinkExt, Stream, StreamExt};
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use orchid_api_traits::{Channel, Coding, Decode, Encode, MsgSet, Request};
|
use orchid_api_traits::{Channel, Coding, Decode, Encode, MsgSet, Request};
|
||||||
use trait_set::trait_set;
|
use trait_set::trait_set;
|
||||||
@@ -22,6 +26,71 @@ use crate::logging::Logger;
|
|||||||
|
|
||||||
pub struct Receipt<'a>(PhantomData<&'a mut ()>);
|
pub struct Receipt<'a>(PhantomData<&'a mut ()>);
|
||||||
|
|
||||||
|
/// This object holds an exclusive lock on the outbound pipe.
|
||||||
|
pub trait DynRequestWriter {
|
||||||
|
fn writer(&mut self) -> Pin<&mut dyn AsyncWrite>;
|
||||||
|
/// Release the outbound pipe and wait for the response to begin.
|
||||||
|
fn get_response(self: Box<Self>) -> Pin<Box<dyn Future<Output = Box<dyn DynResponseHandle>>>>;
|
||||||
|
}
|
||||||
|
/// This object holds an exclusive lock on the inbound pipe.
|
||||||
|
pub trait DynResponseHandle {
|
||||||
|
fn reader(&mut self) -> Pin<&mut dyn AsyncBufRead>;
|
||||||
|
fn finish(self: Box<Self>) -> Pin<Box<dyn Future<Output = ()>>>;
|
||||||
|
}
|
||||||
|
/// This object holds an exclusive lock on the outbound pipe.
|
||||||
|
pub trait DynNotifWriter {
|
||||||
|
fn writer(&mut self) -> Pin<&mut dyn AsyncWrite>;
|
||||||
|
fn finish(self: Box<Self>) -> Pin<Box<dyn Future<Output = ()>>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait DynClient {
|
||||||
|
fn request(&self) -> Pin<Box<dyn Future<Output = Box<dyn DynRequestWriter>>>>;
|
||||||
|
fn notif(&self) -> Pin<Box<dyn Future<Output = Box<dyn DynNotifWriter>>>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Client<T: MsgSet>(pub(crate) Rc<dyn DynClient>, pub(crate) PhantomData<T>);
|
||||||
|
impl<T: MsgSet> Client<T> {
|
||||||
|
pub async fn notify<Notif: Into<<T::Out as Channel>::Notif>>(&self, notif: Notif) {
|
||||||
|
let mut notif_writer = self.0.notif().await;
|
||||||
|
notif.into().encode(notif_writer.writer()).await;
|
||||||
|
notif_writer.finish().await;
|
||||||
|
}
|
||||||
|
pub async fn request<Req: Request + Into<<T::Out as Channel>::Req>>(
|
||||||
|
&self,
|
||||||
|
req: Req,
|
||||||
|
) -> Req::Response {
|
||||||
|
let root_req = req.into();
|
||||||
|
let mut req_writer = self.0.request().await;
|
||||||
|
root_req.encode(req_writer.writer()).await;
|
||||||
|
let mut req_hand = req_writer.get_response().await;
|
||||||
|
let res = Req::Response::decode(req_hand.reader()).await;
|
||||||
|
req_hand.finish().await;
|
||||||
|
res
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct DuplexServerState {
|
||||||
|
pending_outbound: HashMap<u64, Box<dyn FnOnce(&mut dyn AsyncBufRead)>>,
|
||||||
|
sender: Pin<Box<dyn AsyncWrite>>,
|
||||||
|
receiver: Pin<Box<dyn AsyncBufRead>>,
|
||||||
|
}
|
||||||
|
pub enum ServerEvent<T: MsgSet> {
|
||||||
|
Notif(<T::In as Channel>::Notif),
|
||||||
|
Req(RequestHandle<T>, <T::In as Channel>::Req),
|
||||||
|
}
|
||||||
|
pub async fn run_duplex_server<T: MsgSet>(
|
||||||
|
sender: Pin<Box<dyn AsyncWrite>>,
|
||||||
|
receiver: Pin<Box<dyn AsyncBufRead>>,
|
||||||
|
) -> (impl Stream<Item = ServerEvent<T>>, Client<T>) {
|
||||||
|
let sender = Rc::new(Mutex::new(sender));
|
||||||
|
let receiver = Rc::new(Mutex::new(receiver));
|
||||||
|
let pending_outbound = Rc::new(Mutex::new(HashMap::new()));
|
||||||
|
}
|
||||||
|
pub struct DuplexServer(Rc<Mutex<DuplexServerState>>);
|
||||||
|
impl DuplexServer {
|
||||||
|
pub fn receive(msg: )
|
||||||
|
}
|
||||||
|
|
||||||
trait_set! {
|
trait_set! {
|
||||||
pub trait SendFn<T: MsgSet> =
|
pub trait SendFn<T: MsgSet> =
|
||||||
for<'a> FnMut(&'a [u8], ReqNot<T>) -> LocalBoxFuture<'a, ()>
|
for<'a> FnMut(&'a [u8], ReqNot<T>) -> LocalBoxFuture<'a, ()>
|
||||||
@@ -40,27 +109,32 @@ fn get_id(message: &[u8]) -> (u64, &[u8]) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub trait ReqHandlish {
|
pub trait ReqHandlish {
|
||||||
fn defer(&self, cb: impl Future<Output = ()> + 'static)
|
fn defer_drop(&self, val: impl Any + 'static)
|
||||||
where Self: Sized {
|
where Self: Sized {
|
||||||
self.defer_objsafe(Box::pin(cb));
|
self.defer_drop_objsafe(Box::new(val));
|
||||||
}
|
}
|
||||||
fn defer_objsafe(&self, val: Pin<Box<dyn Future<Output = ()>>>);
|
fn defer_drop_objsafe(&self, val: Box<dyn Any>);
|
||||||
}
|
}
|
||||||
impl ReqHandlish for &'_ dyn ReqHandlish {
|
impl ReqHandlish for &'_ dyn ReqHandlish {
|
||||||
fn defer_objsafe(&self, val: Pin<Box<dyn Future<Output = ()>>>) { (**self).defer_objsafe(val) }
|
fn defer_drop_objsafe(&self, val: Box<dyn Any>) { (**self).defer_drop_objsafe(val) }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(destructure)]
|
#[derive(destructure)]
|
||||||
pub struct RequestHandle<'a, MS: MsgSet> {
|
pub struct RequestHandle<MS: MsgSet> {
|
||||||
defer: RefCell<Vec<Pin<Box<dyn Future<Output = ()>>>>>,
|
defer_drop: RefCell<Vec<Box<dyn Any>>>,
|
||||||
fulfilled: AtomicBool,
|
fulfilled: AtomicBool,
|
||||||
id: u64,
|
id: u64,
|
||||||
_reqlt: PhantomData<&'a mut ()>,
|
|
||||||
parent: ReqNot<MS>,
|
parent: ReqNot<MS>,
|
||||||
}
|
}
|
||||||
impl<'a, MS: MsgSet + 'static> RequestHandle<'a, MS> {
|
impl<'a, MS: MsgSet + 'static> RequestHandle<'a, MS> {
|
||||||
fn new(parent: ReqNot<MS>, id: u64) -> Self {
|
fn new(parent: ReqNot<MS>, id: u64) -> Self {
|
||||||
Self { defer: RefCell::default(), fulfilled: false.into(), _reqlt: PhantomData, parent, id }
|
Self {
|
||||||
|
defer_drop: RefCell::default(),
|
||||||
|
fulfilled: false.into(),
|
||||||
|
_reqlt: PhantomData,
|
||||||
|
parent,
|
||||||
|
id,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
pub fn reqnot(&self) -> ReqNot<MS> { self.parent.clone() }
|
pub fn reqnot(&self) -> ReqNot<MS> { self.parent.clone() }
|
||||||
pub async fn handle<U: Request>(&self, _: &U, rep: &U::Response) -> Receipt<'a> {
|
pub async fn handle<U: Request>(&self, _: &U, rep: &U::Response) -> Receipt<'a> {
|
||||||
@@ -76,19 +150,13 @@ impl<'a, MS: MsgSet + 'static> RequestHandle<'a, MS> {
|
|||||||
response.encode(Pin::new(&mut buf)).await;
|
response.encode(Pin::new(&mut buf)).await;
|
||||||
let mut send = clone_box(&*self.reqnot().0.lock().await.send);
|
let mut send = clone_box(&*self.reqnot().0.lock().await.send);
|
||||||
(send)(&buf, self.parent.clone()).await;
|
(send)(&buf, self.parent.clone()).await;
|
||||||
let deferred = mem::take(&mut *self.defer.borrow_mut());
|
|
||||||
for item in deferred {
|
|
||||||
item.await
|
|
||||||
}
|
|
||||||
Receipt(PhantomData)
|
Receipt(PhantomData)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<MS: MsgSet> ReqHandlish for RequestHandle<'_, MS> {
|
impl<MS: MsgSet> ReqHandlish for RequestHandle<'_, MS> {
|
||||||
fn defer_objsafe(&self, val: Pin<Box<dyn Future<Output = ()>>>) {
|
fn defer_drop_objsafe(&self, val: Box<dyn Any>) { self.defer_drop.borrow_mut().push(val); }
|
||||||
self.defer.borrow_mut().push(val)
|
|
||||||
}
|
}
|
||||||
}
|
impl<MS: MsgSet> Drop for RequestHandle<MS> {
|
||||||
impl<MS: MsgSet> Drop for RequestHandle<'_, MS> {
|
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let done = self.fulfilled.load(Ordering::Relaxed);
|
let done = self.fulfilled.load(Ordering::Relaxed);
|
||||||
debug_assert!(done, "Request {} dropped without response", self.id)
|
debug_assert!(done, "Request {} dropped without response", self.id)
|
||||||
@@ -122,7 +190,7 @@ impl<T: MsgSet> ReqNot<T> {
|
|||||||
notif: impl NotifFn<T>,
|
notif: impl NotifFn<T>,
|
||||||
req: impl ReqFn<T>,
|
req: impl ReqFn<T>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self(
|
let this = Self(
|
||||||
Arc::new(Mutex::new(ReqNotData {
|
Arc::new(Mutex::new(ReqNotData {
|
||||||
id: 1,
|
id: 1,
|
||||||
send: Box::new(send),
|
send: Box::new(send),
|
||||||
@@ -131,7 +199,13 @@ impl<T: MsgSet> ReqNot<T> {
|
|||||||
responses: HashMap::new(),
|
responses: HashMap::new(),
|
||||||
})),
|
})),
|
||||||
logger,
|
logger,
|
||||||
)
|
);
|
||||||
|
let (sig_send, sig_recv) = std::sync::mpsc::sync_channel(0);
|
||||||
|
std::thread::spawn(move || {
|
||||||
|
std::thread::sleep(Duration::from_secs(10));
|
||||||
|
sig_send.send(()).expect("Crash!");
|
||||||
|
});
|
||||||
|
this
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Can be called from a polling thread or dispatched in any other way
|
/// Can be called from a polling thread or dispatched in any other way
|
||||||
@@ -145,7 +219,7 @@ impl<T: MsgSet> ReqNot<T> {
|
|||||||
notif_cb(notif_val, self.clone()).await
|
notif_cb(notif_val, self.clone()).await
|
||||||
} else if 0 < id.bitand(1 << 63) {
|
} else if 0 < id.bitand(1 << 63) {
|
||||||
let mut sender = g.responses.remove(&!id).expect("Received response for invalid message");
|
let mut sender = g.responses.remove(&!id).expect("Received response for invalid message");
|
||||||
let _ = sender.send(message.to_vec()).await;
|
sender.send(message.to_vec()).await.unwrap()
|
||||||
} else {
|
} else {
|
||||||
let message = <T::In as Channel>::Req::decode(Pin::new(&mut &payload[..])).await;
|
let message = <T::In as Channel>::Req::decode(Pin::new(&mut &payload[..])).await;
|
||||||
let mut req_cb = clone_box(&*g.req);
|
let mut req_cb = clone_box(&*g.req);
|
||||||
@@ -235,10 +309,7 @@ impl<This: DynRequester + ?Sized> Requester for This {
|
|||||||
async fn request<R: Request + Into<Self::Transfer>>(&self, data: R) -> R::Response {
|
async fn request<R: Request + Into<Self::Transfer>>(&self, data: R) -> R::Response {
|
||||||
let req = format!("{data:?}");
|
let req = format!("{data:?}");
|
||||||
let rep = R::Response::decode(Pin::new(&mut &self.raw_request(data.into()).await[..])).await;
|
let rep = R::Response::decode(Pin::new(&mut &self.raw_request(data.into()).await[..])).await;
|
||||||
let req_str = req.to_string();
|
|
||||||
if !req_str.starts_with("AtomPrint") && !req_str.starts_with("ExtAtomPrint") {
|
|
||||||
writeln!(self.logger(), "Request {req} got response {rep:?}");
|
writeln!(self.logger(), "Request {req} got response {rep:?}");
|
||||||
}
|
|
||||||
rep
|
rep
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
23
orchid-base/src/testing.rs
Normal file
23
orchid-base/src/testing.rs
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
#![cfg(any(feature = "mocks", test))]
|
||||||
|
|
||||||
|
use std::future::ready;
|
||||||
|
use std::pin::Pin;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
pub struct AsyncMonitor<E: 'static>(Rc<dyn Fn(E) -> Pin<Box<dyn Future<Output = ()>>>>);
|
||||||
|
impl<E: 'static> AsyncMonitor<E> {
|
||||||
|
pub fn new<F: AsyncFn(E) -> () + 'static>(f: F) -> Self {
|
||||||
|
let f_rc = Rc::new(f);
|
||||||
|
AsyncMonitor(Rc::new(move |e| {
|
||||||
|
let f_rc = f_rc.clone();
|
||||||
|
Box::pin(async move { f_rc(e).await })
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
pub async fn notify(&self, e: E) -> () { (self.0)(e).await }
|
||||||
|
}
|
||||||
|
impl<E: 'static> Default for AsyncMonitor<E> {
|
||||||
|
fn default() -> Self { Self(Rc::new(|_| Box::pin(ready(())))) }
|
||||||
|
}
|
||||||
|
impl<E: 'static> Clone for AsyncMonitor<E> {
|
||||||
|
fn clone(&self) -> Self { Self(self.0.clone()) }
|
||||||
|
}
|
||||||
@@ -12,9 +12,9 @@ use never::Never;
|
|||||||
use orchid_api_traits::Coding;
|
use orchid_api_traits::Coding;
|
||||||
use trait_set::trait_set;
|
use trait_set::trait_set;
|
||||||
|
|
||||||
use crate::error::OrcErrv;
|
use crate::error::OwnedOrcErr;
|
||||||
use crate::format::{FmtCtx, FmtUnit, Format, Variants};
|
use crate::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||||
use crate::interner::{Interner, Tok};
|
use crate::interner::{IStr, Interner};
|
||||||
use crate::location::{Pos, SrcRange};
|
use crate::location::{Pos, SrcRange};
|
||||||
use crate::name::{Sym, VName, VPath};
|
use crate::name::{Sym, VName, VPath};
|
||||||
use crate::parse::Snippet;
|
use crate::parse::Snippet;
|
||||||
@@ -113,11 +113,11 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
|
|||||||
let pos = SrcRange::new(tt.range.clone(), src);
|
let pos = SrcRange::new(tt.range.clone(), src);
|
||||||
let tok = match_mapping!(&tt.token, api::Token => Token::<H, X> {
|
let tok = match_mapping!(&tt.token, api::Token => Token::<H, X> {
|
||||||
BR,
|
BR,
|
||||||
NS(n => Tok::from_api(*n, i).await,
|
NS(n => i.es(*n).await,
|
||||||
b => Box::new(Self::from_api(b, hctx, xctx, src, i).boxed_local().await)),
|
b => Box::new(Self::from_api(b, hctx, xctx, src, i).boxed_local().await)),
|
||||||
Bottom(e => OrcErrv::from_api(e, i).await),
|
Bottom(e => OwnedOrcErr::from_api(e, i).await),
|
||||||
LambdaHead(arg => Box::new(Self::from_api(arg, hctx, xctx, src, i).boxed_local().await)),
|
LambdaHead(arg => Box::new(Self::from_api(arg, hctx, xctx, src, i).boxed_local().await)),
|
||||||
Name(n => Tok::from_api(*n, i).await),
|
Name(n => i.es(*n).await),
|
||||||
S(*par, b => ttv_from_api(b, hctx, xctx, src, i).await),
|
S(*par, b => ttv_from_api(b, hctx, xctx, src, i).await),
|
||||||
Comment(c.clone()),
|
Comment(c.clone()),
|
||||||
NewExpr(expr => X::from_api(expr, xctx, pos.clone(), i).await),
|
NewExpr(expr => X::from_api(expr, xctx, pos.clone(), i).await),
|
||||||
@@ -145,8 +145,8 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
|
|||||||
api::TokenTree { range: self.sr.range.clone(), token }
|
api::TokenTree { range: self.sr.range.clone(), token }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_kw(&self, tk: Tok<String>) -> bool { self.tok.is_kw(tk) }
|
pub fn is_kw(&self, tk: IStr) -> bool { self.tok.is_kw(tk) }
|
||||||
pub fn as_name(&self) -> Option<Tok<String>> {
|
pub fn as_name(&self) -> Option<IStr> {
|
||||||
if let Token::Name(n) = &self.tok { Some(n.clone()) } else { None }
|
if let Token::Name(n) = &self.tok { Some(n.clone()) } else { None }
|
||||||
}
|
}
|
||||||
pub fn as_multiname(&self) -> Result<VName, &TokTree<H, X>> {
|
pub fn as_multiname(&self) -> Result<VName, &TokTree<H, X>> {
|
||||||
@@ -245,9 +245,9 @@ pub enum Token<H: ExprRepr, X: ExtraTok> {
|
|||||||
/// stretches to the end of the enclosing parens or the end of the const line
|
/// stretches to the end of the enclosing parens or the end of the const line
|
||||||
LambdaHead(Box<TokTree<H, X>>),
|
LambdaHead(Box<TokTree<H, X>>),
|
||||||
/// A binding, operator, or a segment of a namespaced::name
|
/// A binding, operator, or a segment of a namespaced::name
|
||||||
Name(Tok<String>),
|
Name(IStr),
|
||||||
/// A namespace prefix, like `my_ns::` followed by a token
|
/// A namespace prefix, like `my_ns::` followed by a token
|
||||||
NS(Tok<String>, Box<TokTree<H, X>>),
|
NS(IStr, Box<TokTree<H, X>>),
|
||||||
/// A line break
|
/// A line break
|
||||||
BR,
|
BR,
|
||||||
/// `()`, `[]`, or `{}`
|
/// `()`, `[]`, or `{}`
|
||||||
@@ -259,11 +259,11 @@ pub enum Token<H: ExprRepr, X: ExtraTok> {
|
|||||||
/// A grammar error emitted by a lexer plugin if it was possible to continue
|
/// A grammar error emitted by a lexer plugin if it was possible to continue
|
||||||
/// reading. Parsers should treat it as an atom unless it prevents parsing,
|
/// reading. Parsers should treat it as an atom unless it prevents parsing,
|
||||||
/// in which case both this and a relevant error should be returned.
|
/// in which case both this and a relevant error should be returned.
|
||||||
Bottom(OrcErrv),
|
Bottom(OwnedOrcErr),
|
||||||
}
|
}
|
||||||
impl<H: ExprRepr, X: ExtraTok> Token<H, X> {
|
impl<H: ExprRepr, X: ExtraTok> Token<H, X> {
|
||||||
pub fn at(self, sr: SrcRange) -> TokTree<H, X> { TokTree { sr, tok: self } }
|
pub fn at(self, sr: SrcRange) -> TokTree<H, X> { TokTree { sr, tok: self } }
|
||||||
pub fn is_kw(&self, tk: Tok<String>) -> bool { matches!(self, Token::Name(n) if *n == tk) }
|
pub fn is_kw(&self, tk: IStr) -> bool { matches!(self, Token::Name(n) if *n == tk) }
|
||||||
pub fn as_s(&self, par: Paren) -> Option<&[TokTree<H, X>]> {
|
pub fn as_s(&self, par: Paren) -> Option<&[TokTree<H, X>]> {
|
||||||
match self {
|
match self {
|
||||||
Self::S(p, b) if *p == par => Some(b),
|
Self::S(p, b) if *p == par => Some(b),
|
||||||
@@ -275,8 +275,7 @@ impl<H: ExprRepr, X: ExtraTok> Format for Token<H, X> {
|
|||||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||||
match self {
|
match self {
|
||||||
Self::BR => "\n".to_string().into(),
|
Self::BR => "\n".to_string().into(),
|
||||||
Self::Bottom(err) if err.len() == 1 => format!("Bottom({}) ", err.one().unwrap()).into(),
|
Self::Bottom(err) => format!("Botttom({}) ", indent(&err.to_string())).into(),
|
||||||
Self::Bottom(err) => format!("Botttom(\n{}) ", indent(&err.to_string())).into(),
|
|
||||||
Self::Comment(c) => format!("--[{c}]--").into(),
|
Self::Comment(c) => format!("--[{c}]--").into(),
|
||||||
Self::LambdaHead(arg) =>
|
Self::LambdaHead(arg) =>
|
||||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("\\{0b}.")))
|
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("\\{0b}.")))
|
||||||
@@ -307,7 +306,7 @@ pub async fn ttv_fmt<'a: 'b, 'b>(
|
|||||||
ttv: impl IntoIterator<Item = &'b TokTree<impl ExprRepr + 'a, impl ExtraTok + 'a>>,
|
ttv: impl IntoIterator<Item = &'b TokTree<impl ExprRepr + 'a, impl ExtraTok + 'a>>,
|
||||||
c: &(impl FmtCtx + ?Sized),
|
c: &(impl FmtCtx + ?Sized),
|
||||||
) -> FmtUnit {
|
) -> FmtUnit {
|
||||||
FmtUnit::sequence("", " ", "", None, join_all(ttv.into_iter().map(|t| t.print(c))).await)
|
FmtUnit::sequence(" ", None, join_all(ttv.into_iter().map(|t| t.print(c))).await)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn indent(s: &str) -> String { s.replace("\n", "\n ") }
|
pub fn indent(s: &str) -> String { s.replace("\n", "\n ") }
|
||||||
|
|||||||
@@ -13,13 +13,13 @@ pub enum Loaded {
|
|||||||
Code(Arc<String>),
|
Code(Arc<String>),
|
||||||
/// Conceptually equivalent to the list of *.orc files in a folder, without
|
/// Conceptually equivalent to the list of *.orc files in a folder, without
|
||||||
/// the extension
|
/// the extension
|
||||||
Collection(Arc<Vec<Tok<String>>>),
|
Collection(Arc<Vec<IStr>>),
|
||||||
}
|
}
|
||||||
impl Loaded {
|
impl Loaded {
|
||||||
/// Is the loaded item source code (not a collection)?
|
/// Is the loaded item source code (not a collection)?
|
||||||
pub fn is_code(&self) -> bool { matches!(self, Loaded::Code(_)) }
|
pub fn is_code(&self) -> bool { matches!(self, Loaded::Code(_)) }
|
||||||
/// Collect the elements in a collection rreport
|
/// Collect the elements in a collection rreport
|
||||||
pub fn collection(items: impl IntoIterator<Item = Tok<String>>) -> Self {
|
pub fn collection(items: impl IntoIterator<Item = IStr>) -> Self {
|
||||||
Self::Collection(Arc::new(items.into_iter().collect()))
|
Self::Collection(Arc::new(items.into_iter().collect()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -55,7 +55,7 @@ impl ErrorSansOrigin for CodeNotFound {
|
|||||||
/// formats and other sources for libraries and dependencies.
|
/// formats and other sources for libraries and dependencies.
|
||||||
pub trait VirtFS {
|
pub trait VirtFS {
|
||||||
/// Implementation of [VirtFS::read]
|
/// Implementation of [VirtFS::read]
|
||||||
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult;
|
fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult;
|
||||||
/// Discover information about a path without reading it.
|
/// Discover information about a path without reading it.
|
||||||
///
|
///
|
||||||
/// Implement this if your vfs backend can do expensive operations
|
/// Implement this if your vfs backend can do expensive operations
|
||||||
@@ -68,7 +68,7 @@ pub trait VirtFS {
|
|||||||
}
|
}
|
||||||
/// Convert a path into a human-readable string that is meaningful in the
|
/// Convert a path into a human-readable string that is meaningful in the
|
||||||
/// target context.
|
/// target context.
|
||||||
fn display(&self, path: &[Tok<String>]) -> Option<String>;
|
fn display(&self, path: &[IStr]) -> Option<String>;
|
||||||
/// Convert the FS handler into a type-erased version of itself for packing in
|
/// Convert the FS handler into a type-erased version of itself for packing in
|
||||||
/// a tree.
|
/// a tree.
|
||||||
fn rc(self) -> Rc<dyn VirtFS>
|
fn rc(self) -> Rc<dyn VirtFS>
|
||||||
@@ -81,15 +81,15 @@ pub trait VirtFS {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl VirtFS for &dyn VirtFS {
|
impl VirtFS for &dyn VirtFS {
|
||||||
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult {
|
fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult {
|
||||||
(*self).get(path, full_path)
|
(*self).get(path, full_path)
|
||||||
}
|
}
|
||||||
fn display(&self, path: &[Tok<String>]) -> Option<String> { (*self).display(path) }
|
fn display(&self, path: &[IStr]) -> Option<String> { (*self).display(path) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: VirtFS + ?Sized> VirtFS for Rc<T> {
|
impl<T: VirtFS + ?Sized> VirtFS for Rc<T> {
|
||||||
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult {
|
fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult {
|
||||||
(**self).get(path, full_path)
|
(**self).get(path, full_path)
|
||||||
}
|
}
|
||||||
fn display(&self, path: &[Tok<String>]) -> Option<String> { (**self).display(path) }
|
fn display(&self, path: &[IStr]) -> Option<String> { (**self).display(path) }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ impl<'a> Combine for &'a dyn VirtFS {
|
|||||||
pub type DeclTree = ModEntry<Rc<dyn VirtFS>, (), ()>;
|
pub type DeclTree = ModEntry<Rc<dyn VirtFS>, (), ()>;
|
||||||
|
|
||||||
impl VirtFS for DeclTree {
|
impl VirtFS for DeclTree {
|
||||||
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult {
|
fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult {
|
||||||
match &self.member {
|
match &self.member {
|
||||||
ModMember::Item(it) => it.get(path, full_path),
|
ModMember::Item(it) => it.get(path, full_path),
|
||||||
ModMember::Sub(module) => match path.split_first() {
|
ModMember::Sub(module) => match path.split_first() {
|
||||||
@@ -44,7 +44,7 @@ impl VirtFS for DeclTree {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn display(&self, path: &[Tok<String>]) -> Option<String> {
|
fn display(&self, path: &[IStr]) -> Option<String> {
|
||||||
let (head, tail) = path.split_first()?;
|
let (head, tail) = path.split_first()?;
|
||||||
match &self.member {
|
match &self.member {
|
||||||
ModMember::Item(it) => it.display(path),
|
ModMember::Item(it) => it.display(path),
|
||||||
@@ -54,16 +54,16 @@ impl VirtFS for DeclTree {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl VirtFS for String {
|
impl VirtFS for String {
|
||||||
fn display(&self, _: &[Tok<String>]) -> Option<String> { None }
|
fn display(&self, _: &[IStr]) -> Option<String> { None }
|
||||||
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult {
|
fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult {
|
||||||
(path.is_empty().then(|| Loaded::Code(Arc::new(self.as_str().to_string()))))
|
(path.is_empty().then(|| Loaded::Code(Arc::new(self.as_str().to_string()))))
|
||||||
.ok_or_else(|| CodeNotFound::new(full_path.to_vpath()).pack())
|
.ok_or_else(|| CodeNotFound::new(full_path.to_vpath()).pack())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> VirtFS for &'a str {
|
impl<'a> VirtFS for &'a str {
|
||||||
fn display(&self, _: &[Tok<String>]) -> Option<String> { None }
|
fn display(&self, _: &[IStr]) -> Option<String> { None }
|
||||||
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult {
|
fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult {
|
||||||
(path.is_empty().then(|| Loaded::Code(Arc::new(self.to_string()))))
|
(path.is_empty().then(|| Loaded::Code(Arc::new(self.to_string()))))
|
||||||
.ok_or_else(|| CodeNotFound::new(full_path.to_vpath()).pack())
|
.ok_or_else(|| CodeNotFound::new(full_path.to_vpath()).pack())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -99,14 +99,14 @@ impl DirNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_pathbuf(&self, path: &[Tok<String>]) -> PathBuf {
|
fn mk_pathbuf(&self, path: &[IStr]) -> PathBuf {
|
||||||
let mut fpath = self.root.clone();
|
let mut fpath = self.root.clone();
|
||||||
path.iter().for_each(|seg| fpath.push(seg.as_str()));
|
path.iter().for_each(|seg| fpath.push(seg.as_str()));
|
||||||
fpath
|
fpath
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl VirtFS for DirNode {
|
impl VirtFS for DirNode {
|
||||||
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult {
|
fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult {
|
||||||
let fpath = self.mk_pathbuf(path);
|
let fpath = self.mk_pathbuf(path);
|
||||||
let mut binding = self.cached.borrow_mut();
|
let mut binding = self.cached.borrow_mut();
|
||||||
let (_, res) = (binding.raw_entry_mut().from_key(&fpath))
|
let (_, res) = (binding.raw_entry_mut().from_key(&fpath))
|
||||||
@@ -114,7 +114,7 @@ impl VirtFS for DirNode {
|
|||||||
res.clone()
|
res.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn display(&self, path: &[Tok<String>]) -> Option<String> {
|
fn display(&self, path: &[IStr]) -> Option<String> {
|
||||||
let pathbuf = self.mk_pathbuf(path).with_extension(self.ext());
|
let pathbuf = self.mk_pathbuf(path).with_extension(self.ext());
|
||||||
Some(pathbuf.to_string_lossy().to_string())
|
Some(pathbuf.to_string_lossy().to_string())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ impl EmbeddedFS {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl VirtFS for EmbeddedFS {
|
impl VirtFS for EmbeddedFS {
|
||||||
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult {
|
fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult {
|
||||||
if path.is_empty() {
|
if path.is_empty() {
|
||||||
return Ok(Loaded::collection(self.tree.keys(|_| true)));
|
return Ok(Loaded::collection(self.tree.keys(|_| true)));
|
||||||
}
|
}
|
||||||
@@ -67,7 +67,7 @@ impl VirtFS for EmbeddedFS {
|
|||||||
ModMember::Sub(sub) => Loaded::collection(sub.keys(|_| true)),
|
ModMember::Sub(sub) => Loaded::collection(sub.keys(|_| true)),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
fn display(&self, path: &[Tok<String>]) -> Option<String> {
|
fn display(&self, path: &[IStr]) -> Option<String> {
|
||||||
let Self { gen, suffix, .. } = self;
|
let Self { gen, suffix, .. } = self;
|
||||||
Some(format!("{}{suffix} in {gen}", path.iter().join("/")))
|
Some(format!("{}{suffix} in {gen}", path.iter().join("/")))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,18 +21,18 @@ impl<'a> PrefixFS<'a> {
|
|||||||
add: VPath::parse(add.as_ref()),
|
add: VPath::parse(add.as_ref()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn proc_path(&self, path: &[Tok<String>]) -> Option<Vec<Tok<String>>> {
|
fn proc_path(&self, path: &[IStr]) -> Option<Vec<IStr>> {
|
||||||
let path = path.strip_prefix(self.remove.as_slice())?;
|
let path = path.strip_prefix(self.remove.as_slice())?;
|
||||||
Some(self.add.0.iter().chain(path).cloned().collect_vec())
|
Some(self.add.0.iter().chain(path).cloned().collect_vec())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<'a> VirtFS for PrefixFS<'a> {
|
impl<'a> VirtFS for PrefixFS<'a> {
|
||||||
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> super::FSResult {
|
fn get(&self, path: &[IStr], full_path: &PathSlice) -> super::FSResult {
|
||||||
let path =
|
let path =
|
||||||
self.proc_path(path).ok_or_else(|| CodeNotFound::new(full_path.to_vpath()).pack())?;
|
self.proc_path(path).ok_or_else(|| CodeNotFound::new(full_path.to_vpath()).pack())?;
|
||||||
self.wrapped.get(&path, full_path)
|
self.wrapped.get(&path, full_path)
|
||||||
}
|
}
|
||||||
fn display(&self, path: &[Tok<String>]) -> Option<String> {
|
fn display(&self, path: &[IStr]) -> Option<String> {
|
||||||
self.wrapped.display(&self.proc_path(path)?)
|
self.wrapped.display(&self.proc_path(path)?)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ edition = "2024"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
|
async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
|
||||||
|
async-lock = "3.4.1"
|
||||||
async-once-cell = "0.5.4"
|
async-once-cell = "0.5.4"
|
||||||
derive_destructure = "1.0.0"
|
derive_destructure = "1.0.0"
|
||||||
dyn-clone = "1.0.20"
|
dyn-clone = "1.0.20"
|
||||||
@@ -14,11 +15,10 @@ futures = { version = "0.3.31", features = [
|
|||||||
"std",
|
"std",
|
||||||
"async-await",
|
"async-await",
|
||||||
], default-features = false }
|
], default-features = false }
|
||||||
futures-locks = "0.7.1"
|
|
||||||
hashbrown = "0.16.0"
|
hashbrown = "0.16.0"
|
||||||
include_dir = { version = "0.7.4", optional = true }
|
include_dir = { version = "0.7.4", optional = true }
|
||||||
itertools = "0.14.0"
|
itertools = "0.14.0"
|
||||||
konst = "0.4.2"
|
konst = "0.4.1"
|
||||||
lazy_static = "1.5.0"
|
lazy_static = "1.5.0"
|
||||||
memo-map = "0.3.3"
|
memo-map = "0.3.3"
|
||||||
never = "0.1.0"
|
never = "0.1.0"
|
||||||
@@ -31,7 +31,6 @@ ordered-float = "5.0.0"
|
|||||||
pastey = "0.1.1"
|
pastey = "0.1.1"
|
||||||
some_executor = "0.6.1"
|
some_executor = "0.6.1"
|
||||||
substack = "1.1.1"
|
substack = "1.1.1"
|
||||||
task-local = "0.1.0"
|
|
||||||
tokio = { version = "1.47.1", optional = true, features = [] }
|
tokio = { version = "1.47.1", optional = true, features = [] }
|
||||||
tokio-util = { version = "0.7.16", optional = true, features = ["compat"] }
|
tokio-util = { version = "0.7.16", optional = true, features = ["compat"] }
|
||||||
|
|
||||||
|
|||||||
@@ -12,20 +12,22 @@ use futures::future::LocalBoxFuture;
|
|||||||
use futures::{AsyncRead, AsyncWrite, FutureExt, StreamExt, stream};
|
use futures::{AsyncRead, AsyncWrite, FutureExt, StreamExt, stream};
|
||||||
use orchid_api_derive::Coding;
|
use orchid_api_derive::Coding;
|
||||||
use orchid_api_traits::{Coding, Decode, Encode, Request, enc_vec};
|
use orchid_api_traits::{Coding, Decode, Encode, Request, enc_vec};
|
||||||
use orchid_base::error::{OrcErrv, OrcRes, mk_errv, mk_errv_floating};
|
use orchid_base::clone;
|
||||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, fmt};
|
use orchid_base::ctx::Ctx;
|
||||||
|
use orchid_base::error::{OrcErr, OrcRes};
|
||||||
|
use orchid_base::format::{FmtCtx, FmtUnit, Format};
|
||||||
|
use orchid_base::interner::Interner;
|
||||||
use orchid_base::location::Pos;
|
use orchid_base::location::Pos;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::reqnot::Requester;
|
use orchid_base::reqnot::Requester;
|
||||||
use trait_set::trait_set;
|
use trait_set::trait_set;
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::context::{ctx, i};
|
|
||||||
use crate::conv::ToExpr;
|
use crate::conv::ToExpr;
|
||||||
// use crate::error::{ProjectError, ProjectResult};
|
// use crate::error::{ProjectError, ProjectResult};
|
||||||
use crate::expr::{Expr, ExprData, ExprHandle, ExprKind};
|
use crate::expr::{Expr, ExprData, ExprHandle, ExprKind};
|
||||||
use crate::gen_expr::GExpr;
|
use crate::gen_expr::GExpr;
|
||||||
use crate::system::{DynSystemCard, atom_info_for, downcast_atom};
|
use crate::system::{DynSystemCard, SysCtx, atom_info_for, downcast_atom};
|
||||||
|
|
||||||
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||||
pub struct AtomTypeId(pub NonZeroU32);
|
pub struct AtomTypeId(pub NonZeroU32);
|
||||||
@@ -90,25 +92,26 @@ pub struct ForeignAtom {
|
|||||||
}
|
}
|
||||||
impl ForeignAtom {
|
impl ForeignAtom {
|
||||||
pub fn pos(&self) -> Pos { self.pos.clone() }
|
pub fn pos(&self) -> Pos { self.pos.clone() }
|
||||||
|
pub fn ctx(&self) -> &SysCtx { &self.expr.ctx }
|
||||||
pub fn ex(self) -> Expr {
|
pub fn ex(self) -> Expr {
|
||||||
let (handle, pos) = (self.expr.clone(), self.pos.clone());
|
let (handle, pos) = (self.expr.clone(), self.pos.clone());
|
||||||
let data = ExprData { pos, kind: ExprKind::Atom(ForeignAtom { ..self }) };
|
let data = ExprData { pos, kind: ExprKind::Atom(ForeignAtom { ..self }) };
|
||||||
Expr::from_data(handle, data)
|
Expr::new(handle, data)
|
||||||
}
|
}
|
||||||
pub(crate) fn new(handle: Rc<ExprHandle>, atom: api::Atom, pos: Pos) -> Self {
|
pub(crate) fn new(handle: Rc<ExprHandle>, atom: api::Atom, pos: Pos) -> Self {
|
||||||
ForeignAtom { atom, expr: handle, pos }
|
ForeignAtom { atom, expr: handle, pos }
|
||||||
}
|
}
|
||||||
pub async fn request<M: AtomMethod>(&self, m: M) -> Option<M::Response> {
|
pub async fn request<M: AtomMethod>(&self, m: M) -> Option<M::Response> {
|
||||||
let rep = (ctx().reqnot().request(api::Fwd(
|
let rep = (self.ctx().reqnot().request(api::Fwd(
|
||||||
self.atom.clone(),
|
self.atom.clone(),
|
||||||
Sym::parse(M::NAME, &i()).await.unwrap().tok().to_api(),
|
Sym::parse(M::NAME, self.ctx().i()).await.unwrap().tok().to_api(),
|
||||||
enc_vec(&m).await,
|
enc_vec(&m).await,
|
||||||
)))
|
)))
|
||||||
.await?;
|
.await?;
|
||||||
Some(M::Response::decode(Pin::new(&mut &rep[..])).await)
|
Some(M::Response::decode(Pin::new(&mut &rep[..])).await)
|
||||||
}
|
}
|
||||||
pub async fn downcast<T: AtomicFeatures>(self) -> Result<TAtom<T>, NotTypAtom> {
|
pub async fn downcast<T: AtomicFeatures>(self) -> Result<TypAtom<T>, NotTypAtom> {
|
||||||
TAtom::downcast(self.ex().handle()).await
|
TypAtom::downcast(self.ex().handle()).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl fmt::Display for ForeignAtom {
|
impl fmt::Display for ForeignAtom {
|
||||||
@@ -119,38 +122,38 @@ impl fmt::Debug for ForeignAtom {
|
|||||||
}
|
}
|
||||||
impl Format for ForeignAtom {
|
impl Format for ForeignAtom {
|
||||||
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||||
FmtUnit::from_api(&ctx().reqnot().request(api::ExtAtomPrint(self.atom.clone())).await)
|
FmtUnit::from_api(&self.ctx().reqnot().request(api::ExtAtomPrint(self.atom.clone())).await)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl ToExpr for ForeignAtom {
|
impl ToExpr for ForeignAtom {
|
||||||
async fn to_gen(self) -> GExpr { self.ex().to_gen().await }
|
async fn to_expr(self) -> GExpr { self.ex().to_expr().await }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct NotTypAtom {
|
pub struct NotTypAtom {
|
||||||
pub pos: Pos,
|
pub pos: Pos,
|
||||||
pub expr: Expr,
|
pub expr: Expr,
|
||||||
pub typ: Box<dyn AtomDynfo>,
|
pub typ: Box<dyn AtomDynfo>,
|
||||||
|
pub ctx: SysCtx,
|
||||||
}
|
}
|
||||||
impl NotTypAtom {
|
impl NotTypAtom {
|
||||||
pub async fn mk_err(&self) -> OrcErrv {
|
pub fn mk_err(&self, ctx: &Ctx) -> OrcErr {
|
||||||
mk_errv(
|
ctx.mk_err("Not the expected type", format!("This expression is not a {}", self.typ.name()), [
|
||||||
i().i("Not the expected type").await,
|
self.pos.clone(),
|
||||||
format!("The expression {} is not a {}", fmt(&self.expr, &i()).await, self.typ.name()),
|
])
|
||||||
[self.pos.clone()],
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait AtomMethod: Request + Coding {
|
pub trait AtomMethod: Request {
|
||||||
const NAME: &str;
|
const NAME: &str;
|
||||||
}
|
}
|
||||||
pub trait Supports<M: AtomMethod>: AtomCard {
|
pub trait Supports<M: AtomMethod>: AtomCard {
|
||||||
fn handle(&self, req: M) -> impl Future<Output = <M as Request>::Response>;
|
fn handle(&self, ctx: SysCtx, req: M) -> impl Future<Output = <M as Request>::Response>;
|
||||||
}
|
}
|
||||||
|
|
||||||
trait_set! {
|
trait_set! {
|
||||||
trait AtomReqCb<A> = for<'a> Fn(
|
trait AtomReqCb<A> = for<'a> Fn(
|
||||||
&'a A,
|
&'a A,
|
||||||
|
SysCtx,
|
||||||
Pin<&'a mut dyn AsyncRead>,
|
Pin<&'a mut dyn AsyncRead>,
|
||||||
Pin<&'a mut dyn AsyncWrite>,
|
Pin<&'a mut dyn AsyncWrite>,
|
||||||
) -> LocalBoxFuture<'a, ()>
|
) -> LocalBoxFuture<'a, ()>
|
||||||
@@ -167,18 +170,24 @@ impl<A: AtomCard> MethodSetBuilder<A> {
|
|||||||
assert!(!M::NAME.is_empty(), "AtomMethod::NAME cannoot be empty");
|
assert!(!M::NAME.is_empty(), "AtomMethod::NAME cannoot be empty");
|
||||||
self.handlers.push((
|
self.handlers.push((
|
||||||
M::NAME,
|
M::NAME,
|
||||||
Rc::new(move |a: &A, req: Pin<&mut dyn AsyncRead>, rep: Pin<&mut dyn AsyncWrite>| {
|
Rc::new(
|
||||||
async { Supports::<M>::handle(a, M::decode(req).await).await.encode(rep).await }
|
move |a: &A, ctx: SysCtx, req: Pin<&mut dyn AsyncRead>, rep: Pin<&mut dyn AsyncWrite>| {
|
||||||
|
async { Supports::<M>::handle(a, ctx, M::decode(req).await).await.encode(rep).await }
|
||||||
.boxed_local()
|
.boxed_local()
|
||||||
}),
|
},
|
||||||
|
),
|
||||||
));
|
));
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn pack(&self) -> MethodSet<A> {
|
pub async fn pack(&self, ctx: SysCtx) -> MethodSet<A> {
|
||||||
MethodSet {
|
MethodSet {
|
||||||
handlers: stream::iter(self.handlers.iter())
|
handlers: stream::iter(self.handlers.iter())
|
||||||
.then(async |(k, v)| (Sym::parse(k, &i()).await.unwrap(), v.clone()))
|
.then(|(k, v)| {
|
||||||
|
clone!(ctx; async move {
|
||||||
|
(Sym::parse(k, ctx.i()).await.unwrap(), v.clone())
|
||||||
|
})
|
||||||
|
})
|
||||||
.collect()
|
.collect()
|
||||||
.await,
|
.await,
|
||||||
}
|
}
|
||||||
@@ -192,6 +201,7 @@ impl<A: AtomCard> MethodSet<A> {
|
|||||||
pub(crate) async fn dispatch<'a>(
|
pub(crate) async fn dispatch<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
atom: &'a A,
|
atom: &'a A,
|
||||||
|
ctx: SysCtx,
|
||||||
key: Sym,
|
key: Sym,
|
||||||
req: Pin<&'a mut dyn AsyncRead>,
|
req: Pin<&'a mut dyn AsyncRead>,
|
||||||
rep: Pin<&'a mut dyn AsyncWrite>,
|
rep: Pin<&'a mut dyn AsyncWrite>,
|
||||||
@@ -199,7 +209,7 @@ impl<A: AtomCard> MethodSet<A> {
|
|||||||
match self.handlers.get(&key) {
|
match self.handlers.get(&key) {
|
||||||
None => false,
|
None => false,
|
||||||
Some(handler) => {
|
Some(handler) => {
|
||||||
handler(atom, req, rep).await;
|
handler(atom, ctx, req, rep).await;
|
||||||
true
|
true
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -211,29 +221,38 @@ impl<A: AtomCard> Default for MethodSetBuilder<A> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct TAtom<A: AtomicFeatures> {
|
pub struct TypAtom<A: AtomicFeatures> {
|
||||||
pub untyped: ForeignAtom,
|
pub untyped: ForeignAtom,
|
||||||
pub value: A::Data,
|
pub value: A::Data,
|
||||||
}
|
}
|
||||||
impl<A: AtomicFeatures> TAtom<A> {
|
impl<A: AtomicFeatures> TypAtom<A> {
|
||||||
pub fn ex(&self) -> Expr { self.untyped.clone().ex() }
|
pub fn ctx(&self) -> &SysCtx { self.untyped.ctx() }
|
||||||
pub fn pos(&self) -> Pos { self.untyped.pos() }
|
pub fn i(&self) -> &Interner { self.ctx().i() }
|
||||||
pub async fn downcast(expr: Rc<ExprHandle>) -> Result<Self, NotTypAtom> {
|
pub async fn downcast(expr: Rc<ExprHandle>) -> Result<Self, NotTypAtom> {
|
||||||
match Expr::from_handle(expr).atom().await {
|
match Expr::from_handle(expr).atom().await {
|
||||||
Err(expr) =>
|
Err(expr) => Err(NotTypAtom {
|
||||||
Err(NotTypAtom { pos: expr.data().await.pos.clone(), expr, typ: Box::new(A::info()) }),
|
ctx: expr.handle().get_ctx(),
|
||||||
|
pos: expr.data().await.pos.clone(),
|
||||||
|
expr,
|
||||||
|
typ: Box::new(A::info()),
|
||||||
|
}),
|
||||||
Ok(atm) => match downcast_atom::<A>(atm).await {
|
Ok(atm) => match downcast_atom::<A>(atm).await {
|
||||||
Ok(tatom) => Ok(tatom),
|
Ok(tatom) => Ok(tatom),
|
||||||
Err(fa) => Err(NotTypAtom { pos: fa.pos.clone(), expr: fa.ex(), typ: Box::new(A::info()) }),
|
Err(fa) => Err(NotTypAtom {
|
||||||
|
pos: fa.pos.clone(),
|
||||||
|
ctx: fa.ctx().clone(),
|
||||||
|
expr: fa.ex(),
|
||||||
|
typ: Box::new(A::info()),
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub async fn request<M: AtomMethod>(&self, req: M) -> M::Response
|
pub async fn request<M: AtomMethod>(&self, req: M) -> M::Response
|
||||||
where A: Supports<M> {
|
where A: Supports<M> {
|
||||||
M::Response::decode(Pin::new(
|
M::Response::decode(Pin::new(
|
||||||
&mut &(ctx().reqnot().request(api::Fwd(
|
&mut &(self.untyped.ctx().reqnot().request(api::Fwd(
|
||||||
self.untyped.atom.clone(),
|
self.untyped.atom.clone(),
|
||||||
Sym::parse(M::NAME, &i()).await.unwrap().tok().to_api(),
|
Sym::parse(M::NAME, self.untyped.ctx().i()).await.unwrap().tok().to_api(),
|
||||||
enc_vec(&req).await,
|
enc_vec(&req).await,
|
||||||
)))
|
)))
|
||||||
.await
|
.await
|
||||||
@@ -242,20 +261,18 @@ impl<A: AtomicFeatures> TAtom<A> {
|
|||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<A: AtomicFeatures> Deref for TAtom<A> {
|
impl<A: AtomicFeatures> Deref for TypAtom<A> {
|
||||||
type Target = A::Data;
|
type Target = A::Data;
|
||||||
fn deref(&self) -> &Self::Target { &self.value }
|
fn deref(&self) -> &Self::Target { &self.value }
|
||||||
}
|
}
|
||||||
impl<A: AtomicFeatures> ToExpr for TAtom<A> {
|
impl<A: AtomicFeatures> ToExpr for TypAtom<A> {
|
||||||
async fn to_gen(self) -> GExpr { self.untyped.to_gen().await }
|
async fn to_expr(self) -> GExpr { self.untyped.to_expr().await }
|
||||||
}
|
|
||||||
impl<A: AtomicFeatures> Format for TAtom<A> {
|
|
||||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
|
||||||
self.untyped.print(c).await
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AtomCtx<'a>(pub &'a [u8], pub Option<api::AtomId>);
|
pub struct AtomCtx<'a>(pub &'a [u8], pub Option<api::AtomId>, pub SysCtx);
|
||||||
|
impl FmtCtx for AtomCtx<'_> {
|
||||||
|
fn i(&self) -> &Interner { self.2.i() }
|
||||||
|
}
|
||||||
|
|
||||||
pub trait AtomDynfo: 'static {
|
pub trait AtomDynfo: 'static {
|
||||||
fn tid(&self) -> TypeId;
|
fn tid(&self) -> TypeId;
|
||||||
@@ -277,19 +294,24 @@ pub trait AtomDynfo: 'static {
|
|||||||
ctx: AtomCtx<'a>,
|
ctx: AtomCtx<'a>,
|
||||||
write: Pin<&'b mut dyn AsyncWrite>,
|
write: Pin<&'b mut dyn AsyncWrite>,
|
||||||
) -> LocalBoxFuture<'a, Option<Vec<Expr>>>;
|
) -> LocalBoxFuture<'a, Option<Vec<Expr>>>;
|
||||||
fn deserialize<'a>(&'a self, data: &'a [u8], refs: &'a [Expr]) -> LocalBoxFuture<'a, api::Atom>;
|
fn deserialize<'a>(
|
||||||
|
&'a self,
|
||||||
|
ctx: SysCtx,
|
||||||
|
data: &'a [u8],
|
||||||
|
refs: &'a [Expr],
|
||||||
|
) -> LocalBoxFuture<'a, api::Atom>;
|
||||||
fn drop<'a>(&'a self, ctx: AtomCtx<'a>) -> LocalBoxFuture<'a, ()>;
|
fn drop<'a>(&'a self, ctx: AtomCtx<'a>) -> LocalBoxFuture<'a, ()>;
|
||||||
}
|
}
|
||||||
|
|
||||||
trait_set! {
|
trait_set! {
|
||||||
pub trait AtomFactoryFn = FnOnce() -> LocalBoxFuture<'static, api::Atom> + DynClone;
|
pub trait AtomFactoryFn = FnOnce(SysCtx) -> LocalBoxFuture<'static, api::Atom> + DynClone;
|
||||||
}
|
}
|
||||||
pub struct AtomFactory(Box<dyn AtomFactoryFn>);
|
pub struct AtomFactory(Box<dyn AtomFactoryFn>);
|
||||||
impl AtomFactory {
|
impl AtomFactory {
|
||||||
pub fn new(f: impl AsyncFnOnce() -> api::Atom + Clone + 'static) -> Self {
|
pub fn new(f: impl AsyncFnOnce(SysCtx) -> api::Atom + Clone + 'static) -> Self {
|
||||||
Self(Box::new(|| f().boxed_local()))
|
Self(Box::new(|ctx| f(ctx).boxed_local()))
|
||||||
}
|
}
|
||||||
pub async fn build(self) -> api::Atom { (self.0)().await }
|
pub async fn build(self, ctx: SysCtx) -> api::Atom { (self.0)(ctx).await }
|
||||||
}
|
}
|
||||||
impl Clone for AtomFactory {
|
impl Clone for AtomFactory {
|
||||||
fn clone(&self) -> Self { AtomFactory(clone_box(&*self.0)) }
|
fn clone(&self) -> Self { AtomFactory(clone_box(&*self.0)) }
|
||||||
@@ -306,10 +328,10 @@ impl Format for AtomFactory {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn err_not_callable() -> OrcErrv {
|
pub fn err_not_callable(cx: &Ctx) -> OrcErr {
|
||||||
mk_errv_floating(i().i("This atom is not callable").await, "Attempted to apply value as function")
|
cx.mk_err_floating("This atom is not callable", "Attempted to apply value as function")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn err_not_command() -> OrcErrv {
|
pub fn err_not_command(cx: &Ctx) -> OrcErr {
|
||||||
mk_errv_floating(i().i("This atom is not a command").await, "Settled on an inactionable value")
|
cx.mk_err_floating("This atom is not a command", "Settled on an inactionable value")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,50 +1,46 @@
|
|||||||
use std::any::{Any, TypeId, type_name};
|
use std::any::{Any, TypeId, type_name};
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::future::Future;
|
use std::future::Future;
|
||||||
use std::marker::PhantomData;
|
|
||||||
use std::num::NonZero;
|
use std::num::NonZero;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::pin::Pin;
|
use std::pin::Pin;
|
||||||
use std::sync::atomic::AtomicU64;
|
use std::sync::atomic::AtomicU64;
|
||||||
|
|
||||||
|
use async_lock::{RwLock, RwLockReadGuard};
|
||||||
use async_once_cell::OnceCell;
|
use async_once_cell::OnceCell;
|
||||||
use dyn_clone::{DynClone, clone_box};
|
|
||||||
use futures::future::{LocalBoxFuture, ready};
|
use futures::future::{LocalBoxFuture, ready};
|
||||||
use futures::{AsyncRead, AsyncWrite, FutureExt};
|
use futures::{AsyncRead, AsyncWrite, FutureExt};
|
||||||
use futures_locks::{RwLock, RwLockReadGuard};
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use memo_map::MemoMap;
|
use memo_map::MemoMap;
|
||||||
use never::Never;
|
use never::Never;
|
||||||
use orchid_api_traits::{Decode, Encode, enc_vec};
|
use orchid_api_traits::{Decode, Encode, enc_vec};
|
||||||
use orchid_base::error::OrcRes;
|
use orchid_base::error::OrcRes;
|
||||||
use orchid_base::format::{FmtCtx, FmtCtxImpl, FmtUnit, take_first};
|
use orchid_base::format::{FmtCtx, FmtCtxImpl, FmtUnit};
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::atom::{
|
use crate::atom::{
|
||||||
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
|
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
|
||||||
MethodSetBuilder, TAtom, err_not_callable, err_not_command, get_info,
|
MethodSetBuilder, TypAtom, err_not_callable, err_not_command, get_info,
|
||||||
};
|
};
|
||||||
use crate::context::{SysCtxEntry, ctx, i};
|
|
||||||
use crate::expr::Expr;
|
use crate::expr::Expr;
|
||||||
use crate::gen_expr::{GExpr, bot};
|
use crate::gen_expr::{GExpr, bot};
|
||||||
|
use crate::system::{SysCtx, SysCtxEntry};
|
||||||
use crate::system_ctor::CtedObj;
|
use crate::system_ctor::CtedObj;
|
||||||
|
|
||||||
pub struct OwnedVariant;
|
pub struct OwnedVariant;
|
||||||
impl AtomicVariant for OwnedVariant {}
|
impl AtomicVariant for OwnedVariant {}
|
||||||
impl<A: OwnedAtom + Atomic<Variant = OwnedVariant>> AtomicFeaturesImpl<OwnedVariant> for A {
|
impl<A: OwnedAtom + Atomic<Variant = OwnedVariant>> AtomicFeaturesImpl<OwnedVariant> for A {
|
||||||
fn _factory(self) -> AtomFactory {
|
fn _factory(self) -> AtomFactory {
|
||||||
AtomFactory::new(async move || {
|
AtomFactory::new(async move |ctx| {
|
||||||
let serial = ctx()
|
let serial =
|
||||||
.get_or_default::<ObjStore>()
|
ctx.get_or_default::<ObjStore>().next_id.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
|
||||||
.next_id
|
|
||||||
.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
|
|
||||||
let atom_id = api::AtomId(NonZero::new(serial + 1).unwrap());
|
let atom_id = api::AtomId(NonZero::new(serial + 1).unwrap());
|
||||||
let (typ_id, _) = get_info::<A>(ctx().get::<CtedObj>().inst().card());
|
let (typ_id, _) = get_info::<A>(ctx.get::<CtedObj>().inst().card());
|
||||||
let mut data = enc_vec(&typ_id).await;
|
let mut data = enc_vec(&typ_id).await;
|
||||||
self.encode(Pin::<&mut Vec<u8>>::new(&mut data)).await;
|
self.encode(Pin::<&mut Vec<u8>>::new(&mut data)).await;
|
||||||
ctx().get_or_default::<ObjStore>().objects.read().await.insert(atom_id, Box::new(self));
|
ctx.get_or_default::<ObjStore>().objects.read().await.insert(atom_id, Box::new(self));
|
||||||
api::Atom { drop: Some(atom_id), data: api::AtomData(data), owner: ctx().sys_id() }
|
api::Atom { drop: Some(atom_id), data, owner: ctx.sys_id() }
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
fn _info() -> Self::_Info { OwnedAtomDynfo { msbuild: A::reg_reqs(), ms: OnceCell::new() } }
|
fn _info() -> Self::_Info { OwnedAtomDynfo { msbuild: A::reg_reqs(), ms: OnceCell::new() } }
|
||||||
@@ -54,16 +50,14 @@ impl<A: OwnedAtom + Atomic<Variant = OwnedVariant>> AtomicFeaturesImpl<OwnedVari
|
|||||||
/// While an atom read guard is held, no atom can be removed.
|
/// While an atom read guard is held, no atom can be removed.
|
||||||
pub(crate) struct AtomReadGuard<'a> {
|
pub(crate) struct AtomReadGuard<'a> {
|
||||||
id: api::AtomId,
|
id: api::AtomId,
|
||||||
_lock: PhantomData<&'a ()>,
|
guard: RwLockReadGuard<'a, MemoMap<api::AtomId, Box<dyn DynOwnedAtom>>>,
|
||||||
guard: RwLockReadGuard<MemoMap<api::AtomId, Box<dyn DynOwnedAtom>>>,
|
|
||||||
}
|
}
|
||||||
impl<'a> AtomReadGuard<'a> {
|
impl<'a> AtomReadGuard<'a> {
|
||||||
async fn new(id: api::AtomId) -> Self {
|
async fn new(id: api::AtomId, ctx: &'a SysCtx) -> Self {
|
||||||
let guard = ctx().get_or_default::<ObjStore>().objects.read().await;
|
let guard = ctx.get_or_default::<ObjStore>().objects.read().await;
|
||||||
if guard.get(&id).is_none() {
|
let valid = guard.iter().map(|i| i.0).collect_vec();
|
||||||
panic!("Received invalid atom ID: {id:?}");
|
assert!(guard.get(&id).is_some(), "Received invalid atom ID: {id:?} not in {valid:?}");
|
||||||
}
|
Self { id, guard }
|
||||||
Self { id, guard, _lock: PhantomData }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Deref for AtomReadGuard<'_> {
|
impl Deref for AtomReadGuard<'_> {
|
||||||
@@ -72,8 +66,8 @@ impl Deref for AtomReadGuard<'_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Remove an atom from the store
|
/// Remove an atom from the store
|
||||||
pub(crate) async fn take_atom(id: api::AtomId) -> Box<dyn DynOwnedAtom> {
|
pub(crate) async fn take_atom(id: api::AtomId, ctx: &SysCtx) -> Box<dyn DynOwnedAtom> {
|
||||||
let mut g = ctx().get_or_default::<ObjStore>().objects.write().await;
|
let mut g = ctx.get_or_default::<ObjStore>().objects.write().await;
|
||||||
g.remove(&id).unwrap_or_else(|| panic!("Received invalid atom ID: {}", id.0))
|
g.remove(&id).unwrap_or_else(|| panic!("Received invalid atom ID: {}", id.0))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -89,53 +83,64 @@ impl<T: OwnedAtom> AtomDynfo for OwnedAtomDynfo<T> {
|
|||||||
Box::new(<T as AtomCard>::Data::decode(Pin::new(&mut &data[..])).await) as Box<dyn Any>
|
Box::new(<T as AtomCard>::Data::decode(Pin::new(&mut &data[..])).await) as Box<dyn Any>
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
fn call(&self, AtomCtx(_, id): AtomCtx, arg: Expr) -> LocalBoxFuture<'_, GExpr> {
|
fn call(&self, AtomCtx(_, id, ctx): AtomCtx, arg: Expr) -> LocalBoxFuture<'_, GExpr> {
|
||||||
Box::pin(async move { take_atom(id.unwrap()).await.dyn_call(arg).await })
|
Box::pin(async move { take_atom(id.unwrap(), &ctx).await.dyn_call(arg).await })
|
||||||
}
|
}
|
||||||
fn call_ref<'a>(&'a self, AtomCtx(_, id): AtomCtx<'a>, arg: Expr) -> LocalBoxFuture<'a, GExpr> {
|
fn call_ref<'a>(
|
||||||
Box::pin(async move { AtomReadGuard::new(id.unwrap()).await.dyn_call_ref(arg).await })
|
&'a self,
|
||||||
|
AtomCtx(_, id, ctx): AtomCtx<'a>,
|
||||||
|
arg: Expr,
|
||||||
|
) -> LocalBoxFuture<'a, GExpr> {
|
||||||
|
Box::pin(async move { AtomReadGuard::new(id.unwrap(), &ctx).await.dyn_call_ref(arg).await })
|
||||||
}
|
}
|
||||||
fn print(&self, AtomCtx(_, id): AtomCtx<'_>) -> LocalBoxFuture<'_, FmtUnit> {
|
fn print(&self, AtomCtx(_, id, ctx): AtomCtx<'_>) -> LocalBoxFuture<'_, FmtUnit> {
|
||||||
Box::pin(async move { AtomReadGuard::new(id.unwrap()).await.dyn_print().await })
|
Box::pin(
|
||||||
|
async move { AtomReadGuard::new(id.unwrap(), &ctx).await.dyn_print(ctx.clone()).await },
|
||||||
|
)
|
||||||
}
|
}
|
||||||
fn handle_req<'a, 'b: 'a, 'c: 'a>(
|
fn handle_req<'a, 'b: 'a, 'c: 'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
AtomCtx(_, id): AtomCtx,
|
AtomCtx(_, id, ctx): AtomCtx,
|
||||||
key: Sym,
|
key: Sym,
|
||||||
req: Pin<&'b mut dyn AsyncRead>,
|
req: Pin<&'b mut dyn AsyncRead>,
|
||||||
rep: Pin<&'c mut dyn AsyncWrite>,
|
rep: Pin<&'c mut dyn AsyncWrite>,
|
||||||
) -> LocalBoxFuture<'a, bool> {
|
) -> LocalBoxFuture<'a, bool> {
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let a = AtomReadGuard::new(id.unwrap()).await;
|
let a = AtomReadGuard::new(id.unwrap(), &ctx).await;
|
||||||
let ms = self.ms.get_or_init(self.msbuild.pack()).await;
|
let ms = self.ms.get_or_init(self.msbuild.pack(ctx.clone())).await;
|
||||||
ms.dispatch(a.as_any_ref().downcast_ref().unwrap(), key, req, rep).await
|
ms.dispatch(a.as_any_ref().downcast_ref().unwrap(), ctx.clone(), key, req, rep).await
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
fn command<'a>(
|
fn command<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
AtomCtx(_, id): AtomCtx<'a>,
|
AtomCtx(_, id, ctx): AtomCtx<'a>,
|
||||||
) -> LocalBoxFuture<'a, OrcRes<Option<GExpr>>> {
|
) -> LocalBoxFuture<'a, OrcRes<Option<GExpr>>> {
|
||||||
Box::pin(async move { take_atom(id.unwrap()).await.dyn_command().await })
|
Box::pin(async move { take_atom(id.unwrap(), &ctx).await.dyn_command(ctx.clone()).await })
|
||||||
}
|
}
|
||||||
fn drop(&self, AtomCtx(_, id): AtomCtx) -> LocalBoxFuture<'_, ()> {
|
fn drop(&self, AtomCtx(_, id, ctx): AtomCtx) -> LocalBoxFuture<'_, ()> {
|
||||||
Box::pin(async move { take_atom(id.unwrap()).await.dyn_free().await })
|
Box::pin(async move { take_atom(id.unwrap(), &ctx).await.dyn_free(ctx.clone()).await })
|
||||||
}
|
}
|
||||||
fn serialize<'a, 'b: 'a>(
|
fn serialize<'a, 'b: 'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
AtomCtx(_, id): AtomCtx<'a>,
|
AtomCtx(_, id, ctx): AtomCtx<'a>,
|
||||||
mut write: Pin<&'b mut dyn AsyncWrite>,
|
mut write: Pin<&'b mut dyn AsyncWrite>,
|
||||||
) -> LocalBoxFuture<'a, Option<Vec<Expr>>> {
|
) -> LocalBoxFuture<'a, Option<Vec<Expr>>> {
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let id = id.unwrap();
|
let id = id.unwrap();
|
||||||
id.encode(write.as_mut()).await;
|
id.encode(write.as_mut()).await;
|
||||||
AtomReadGuard::new(id).await.dyn_serialize(write).await
|
AtomReadGuard::new(id, &ctx).await.dyn_serialize(ctx.clone(), write).await
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
fn deserialize<'a>(&'a self, data: &'a [u8], refs: &'a [Expr]) -> LocalBoxFuture<'a, api::Atom> {
|
fn deserialize<'a>(
|
||||||
|
&'a self,
|
||||||
|
ctx: SysCtx,
|
||||||
|
data: &'a [u8],
|
||||||
|
refs: &'a [Expr],
|
||||||
|
) -> LocalBoxFuture<'a, api::Atom> {
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let refs = T::Refs::from_iter(refs.iter().cloned());
|
let refs = T::Refs::from_iter(refs.iter().cloned());
|
||||||
let obj = T::deserialize(DeserCtxImpl(data), refs).await;
|
let obj = T::deserialize(DeserCtxImpl(data, &ctx), refs).await;
|
||||||
obj._factory().build().await
|
obj._factory().build(ctx).await
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -151,12 +156,14 @@ pub trait DeserializeCtx: Sized {
|
|||||||
t
|
t
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
fn sys(&self) -> SysCtx;
|
||||||
}
|
}
|
||||||
|
|
||||||
struct DeserCtxImpl<'a>(&'a [u8]);
|
struct DeserCtxImpl<'a>(&'a [u8], &'a SysCtx);
|
||||||
impl DeserializeCtx for DeserCtxImpl<'_> {
|
impl DeserializeCtx for DeserCtxImpl<'_> {
|
||||||
async fn read<T: Decode>(&mut self) -> T { T::decode(Pin::new(&mut self.0)).await }
|
async fn read<T: Decode>(&mut self) -> T { T::decode(Pin::new(&mut self.0)).await }
|
||||||
fn is_empty(&self) -> bool { self.0.is_empty() }
|
fn is_empty(&self) -> bool { self.0.is_empty() }
|
||||||
|
fn sys(&self) -> SysCtx { self.1.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait RefSet {
|
pub trait RefSet {
|
||||||
@@ -207,21 +214,22 @@ pub trait OwnedAtom: Atomic<Variant = OwnedVariant> + Any + Clone + 'static {
|
|||||||
fn val(&self) -> impl Future<Output = Cow<'_, Self::Data>>;
|
fn val(&self) -> impl Future<Output = Cow<'_, Self::Data>>;
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
fn call_ref(&self, arg: Expr) -> impl Future<Output = GExpr> {
|
fn call_ref(&self, arg: Expr) -> impl Future<Output = GExpr> {
|
||||||
async move { bot(err_not_callable().await) }
|
async move { bot(err_not_callable(arg.ctx().i()).await) }
|
||||||
}
|
}
|
||||||
fn call(self, arg: Expr) -> impl Future<Output = GExpr> {
|
fn call(self, arg: Expr) -> impl Future<Output = GExpr> {
|
||||||
async {
|
async {
|
||||||
|
let ctx = arg.ctx();
|
||||||
let gcl = self.call_ref(arg).await;
|
let gcl = self.call_ref(arg).await;
|
||||||
self.free().await;
|
self.free(ctx).await;
|
||||||
gcl
|
gcl
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
fn command(self) -> impl Future<Output = OrcRes<Option<GExpr>>> {
|
fn command(self, ctx: SysCtx) -> impl Future<Output = OrcRes<Option<GExpr>>> {
|
||||||
async move { Err(err_not_command().await) }
|
async move { Err(err_not_command(ctx.i()).await) }
|
||||||
}
|
}
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
fn free(self) -> impl Future<Output = ()> { async {} }
|
fn free(self, ctx: SysCtx) -> impl Future<Output = ()> { async {} }
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
fn print_atom<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> impl Future<Output = FmtUnit> {
|
fn print_atom<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> impl Future<Output = FmtUnit> {
|
||||||
async { format!("OwnedAtom({})", type_name::<Self>()).into() }
|
async { format!("OwnedAtom({})", type_name::<Self>()).into() }
|
||||||
@@ -229,13 +237,14 @@ pub trait OwnedAtom: Atomic<Variant = OwnedVariant> + Any + Clone + 'static {
|
|||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
fn serialize(
|
fn serialize(
|
||||||
&self,
|
&self,
|
||||||
|
ctx: SysCtx,
|
||||||
write: Pin<&mut (impl AsyncWrite + ?Sized)>,
|
write: Pin<&mut (impl AsyncWrite + ?Sized)>,
|
||||||
) -> impl Future<Output = Self::Refs> {
|
) -> impl Future<Output = Self::Refs> {
|
||||||
assert_serializable::<Self>();
|
assert_serializable::<Self>();
|
||||||
async { panic!("Either implement serialize or set Refs to Never for {}", type_name::<Self>()) }
|
async { panic!("Either implement serialize or set Refs to Never for {}", type_name::<Self>()) }
|
||||||
}
|
}
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
fn deserialize(dctx: impl DeserializeCtx, refs: Self::Refs) -> impl Future<Output = Self> {
|
fn deserialize(ctx: impl DeserializeCtx, refs: Self::Refs) -> impl Future<Output = Self> {
|
||||||
assert_serializable::<Self>();
|
assert_serializable::<Self>();
|
||||||
async {
|
async {
|
||||||
panic!("Either implement deserialize or set Refs to Never for {}", type_name::<Self>())
|
panic!("Either implement deserialize or set Refs to Never for {}", type_name::<Self>())
|
||||||
@@ -248,17 +257,18 @@ fn assert_serializable<T: OwnedAtom>() {
|
|||||||
assert_ne!(TypeId::of::<T::Refs>(), TypeId::of::<Never>(), "{MSG}");
|
assert_ne!(TypeId::of::<T::Refs>(), TypeId::of::<Never>(), "{MSG}");
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait DynOwnedAtom: DynClone + 'static {
|
pub trait DynOwnedAtom: 'static {
|
||||||
fn atom_tid(&self) -> TypeId;
|
fn atom_tid(&self) -> TypeId;
|
||||||
fn as_any_ref(&self) -> &dyn Any;
|
fn as_any_ref(&self) -> &dyn Any;
|
||||||
fn encode<'a>(&'a self, buffer: Pin<&'a mut dyn AsyncWrite>) -> LocalBoxFuture<'a, ()>;
|
fn encode<'a>(&'a self, buffer: Pin<&'a mut dyn AsyncWrite>) -> LocalBoxFuture<'a, ()>;
|
||||||
fn dyn_call_ref(&self, arg: Expr) -> LocalBoxFuture<'_, GExpr>;
|
fn dyn_call_ref(&self, arg: Expr) -> LocalBoxFuture<'_, GExpr>;
|
||||||
fn dyn_call(self: Box<Self>, arg: Expr) -> LocalBoxFuture<'static, GExpr>;
|
fn dyn_call(self: Box<Self>, arg: Expr) -> LocalBoxFuture<'static, GExpr>;
|
||||||
fn dyn_command(self: Box<Self>) -> LocalBoxFuture<'static, OrcRes<Option<GExpr>>>;
|
fn dyn_command(self: Box<Self>, ctx: SysCtx) -> LocalBoxFuture<'static, OrcRes<Option<GExpr>>>;
|
||||||
fn dyn_free(self: Box<Self>) -> LocalBoxFuture<'static, ()>;
|
fn dyn_free(self: Box<Self>, ctx: SysCtx) -> LocalBoxFuture<'static, ()>;
|
||||||
fn dyn_print(&self) -> LocalBoxFuture<'_, FmtUnit>;
|
fn dyn_print(&self, ctx: SysCtx) -> LocalBoxFuture<'_, FmtUnit>;
|
||||||
fn dyn_serialize<'a>(
|
fn dyn_serialize<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
|
ctx: SysCtx,
|
||||||
sink: Pin<&'a mut dyn AsyncWrite>,
|
sink: Pin<&'a mut dyn AsyncWrite>,
|
||||||
) -> LocalBoxFuture<'a, Option<Vec<Expr>>>;
|
) -> LocalBoxFuture<'a, Option<Vec<Expr>>>;
|
||||||
}
|
}
|
||||||
@@ -274,57 +284,38 @@ impl<T: OwnedAtom> DynOwnedAtom for T {
|
|||||||
fn dyn_call(self: Box<Self>, arg: Expr) -> LocalBoxFuture<'static, GExpr> {
|
fn dyn_call(self: Box<Self>, arg: Expr) -> LocalBoxFuture<'static, GExpr> {
|
||||||
self.call(arg).boxed_local()
|
self.call(arg).boxed_local()
|
||||||
}
|
}
|
||||||
fn dyn_command(self: Box<Self>) -> LocalBoxFuture<'static, OrcRes<Option<GExpr>>> {
|
fn dyn_command(self: Box<Self>, ctx: SysCtx) -> LocalBoxFuture<'static, OrcRes<Option<GExpr>>> {
|
||||||
self.command().boxed_local()
|
self.command(ctx).boxed_local()
|
||||||
}
|
}
|
||||||
fn dyn_free(self: Box<Self>) -> LocalBoxFuture<'static, ()> { self.free().boxed_local() }
|
fn dyn_free(self: Box<Self>, ctx: SysCtx) -> LocalBoxFuture<'static, ()> {
|
||||||
fn dyn_print(&self) -> LocalBoxFuture<'_, FmtUnit> {
|
self.free(ctx).boxed_local()
|
||||||
async move { self.print_atom(&FmtCtxImpl { i: &i() }).await }.boxed_local()
|
}
|
||||||
|
fn dyn_print(&self, ctx: SysCtx) -> LocalBoxFuture<'_, FmtUnit> {
|
||||||
|
async move { self.print_atom(&FmtCtxImpl { i: ctx.i() }).await }.boxed_local()
|
||||||
}
|
}
|
||||||
fn dyn_serialize<'a>(
|
fn dyn_serialize<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
|
ctx: SysCtx,
|
||||||
sink: Pin<&'a mut dyn AsyncWrite>,
|
sink: Pin<&'a mut dyn AsyncWrite>,
|
||||||
) -> LocalBoxFuture<'a, Option<Vec<Expr>>> {
|
) -> LocalBoxFuture<'a, Option<Vec<Expr>>> {
|
||||||
match TypeId::of::<Never>() == TypeId::of::<<Self as OwnedAtom>::Refs>() {
|
match TypeId::of::<Never>() == TypeId::of::<<Self as OwnedAtom>::Refs>() {
|
||||||
true => ready(None).boxed_local(),
|
true => ready(None).boxed_local(),
|
||||||
false => async { Some(self.serialize(sink).await.to_vec()) }.boxed_local(),
|
false => async { Some(self.serialize(ctx, sink).await.to_vec()) }.boxed_local(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub(crate) struct ObjStore {
|
struct ObjStore {
|
||||||
pub(crate) next_id: AtomicU64,
|
next_id: AtomicU64,
|
||||||
pub(crate) objects: RwLock<MemoMap<api::AtomId, Box<dyn DynOwnedAtom>>>,
|
objects: RwLock<MemoMap<api::AtomId, Box<dyn DynOwnedAtom>>>,
|
||||||
}
|
}
|
||||||
impl SysCtxEntry for ObjStore {}
|
impl SysCtxEntry for ObjStore {}
|
||||||
|
|
||||||
pub async fn own<A: OwnedAtom>(typ: &TAtom<A>) -> A {
|
pub async fn own<A: OwnedAtom>(typ: TypAtom<A>) -> A {
|
||||||
let g = ctx().get_or_default::<ObjStore>().objects.read().await;
|
let ctx = typ.untyped.ctx();
|
||||||
let atom_id = typ.untyped.atom.drop.expect("Owned atoms always have a drop ID");
|
let g = ctx.get_or_default::<ObjStore>().objects.read().await;
|
||||||
let dyn_atom =
|
let dyn_atom = (g.get(&typ.untyped.atom.drop.expect("Owned atoms always have a drop ID")))
|
||||||
g.get(&atom_id).expect("Atom ID invalid; atom type probably not owned by this crate");
|
.expect("Atom ID invalid; atom type probably not owned by this crate");
|
||||||
dyn_atom.as_any_ref().downcast_ref().cloned().expect("The ID should imply a type as well")
|
dyn_atom.as_any_ref().downcast_ref().cloned().expect("The ID should imply a type as well")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn debug_print_obj_store(show_atoms: bool) {
|
|
||||||
let ctx = ctx();
|
|
||||||
let store = ctx.get_or_default::<ObjStore>();
|
|
||||||
let keys = store.objects.read().await.keys().cloned().collect_vec();
|
|
||||||
let mut message = "Atoms in store:".to_string();
|
|
||||||
if !show_atoms {
|
|
||||||
message += &keys.iter().map(|k| format!(" {:?}", k)).join("");
|
|
||||||
} else {
|
|
||||||
for k in keys {
|
|
||||||
let g = store.objects.read().await;
|
|
||||||
let Some(atom) = g.get(&k) else {
|
|
||||||
message += &format!("\n{k:?} has since been deleted");
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
let atom = clone_box(&**atom);
|
|
||||||
std::mem::drop(g);
|
|
||||||
message += &format!("\n{k:?} -> {}", take_first(&atom.dyn_print().await, true));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
eprintln!("{message}")
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -15,20 +15,20 @@ use crate::atom::{
|
|||||||
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
|
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
|
||||||
MethodSetBuilder, err_not_callable, err_not_command, get_info,
|
MethodSetBuilder, err_not_callable, err_not_command, get_info,
|
||||||
};
|
};
|
||||||
use crate::context::ctx;
|
|
||||||
use crate::expr::Expr;
|
use crate::expr::Expr;
|
||||||
use crate::gen_expr::{GExpr, bot};
|
use crate::gen_expr::{GExpr, bot};
|
||||||
|
use crate::system::SysCtx;
|
||||||
use crate::system_ctor::CtedObj;
|
use crate::system_ctor::CtedObj;
|
||||||
|
|
||||||
pub struct ThinVariant;
|
pub struct ThinVariant;
|
||||||
impl AtomicVariant for ThinVariant {}
|
impl AtomicVariant for ThinVariant {}
|
||||||
impl<A: ThinAtom + Atomic<Variant = ThinVariant>> AtomicFeaturesImpl<ThinVariant> for A {
|
impl<A: ThinAtom + Atomic<Variant = ThinVariant>> AtomicFeaturesImpl<ThinVariant> for A {
|
||||||
fn _factory(self) -> AtomFactory {
|
fn _factory(self) -> AtomFactory {
|
||||||
AtomFactory::new(async move || {
|
AtomFactory::new(async move |ctx| {
|
||||||
let (id, _) = get_info::<A>(ctx().get::<CtedObj>().inst().card());
|
let (id, _) = get_info::<A>(ctx.get::<CtedObj>().inst().card());
|
||||||
let mut buf = enc_vec(&id).await;
|
let mut buf = enc_vec(&id).await;
|
||||||
self.encode(Pin::new(&mut buf)).await;
|
self.encode(Pin::new(&mut buf)).await;
|
||||||
api::Atom { drop: None, data: api::AtomData(buf), owner: ctx().sys_id() }
|
api::Atom { drop: None, data: buf, owner: ctx.sys_id() }
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
fn _info() -> Self::_Info { ThinAtomDynfo { msbuild: Self::reg_reqs(), ms: OnceCell::new() } }
|
fn _info() -> Self::_Info { ThinAtomDynfo { msbuild: Self::reg_reqs(), ms: OnceCell::new() } }
|
||||||
@@ -40,8 +40,8 @@ pub struct ThinAtomDynfo<T: ThinAtom> {
|
|||||||
ms: OnceCell<MethodSet<T>>,
|
ms: OnceCell<MethodSet<T>>,
|
||||||
}
|
}
|
||||||
impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
|
impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
|
||||||
fn print<'a>(&self, AtomCtx(buf, _): AtomCtx<'a>) -> LocalBoxFuture<'a, FmtUnit> {
|
fn print<'a>(&self, AtomCtx(buf, _, ctx): AtomCtx<'a>) -> LocalBoxFuture<'a, FmtUnit> {
|
||||||
Box::pin(async move { T::decode(Pin::new(&mut &buf[..])).await.print().await })
|
Box::pin(async move { T::decode(Pin::new(&mut &buf[..])).await.print(ctx).await })
|
||||||
}
|
}
|
||||||
fn tid(&self) -> TypeId { TypeId::of::<T>() }
|
fn tid(&self) -> TypeId { TypeId::of::<T>() }
|
||||||
fn name(&self) -> &'static str { type_name::<T>() }
|
fn name(&self) -> &'static str { type_name::<T>() }
|
||||||
@@ -56,21 +56,21 @@ impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
|
|||||||
}
|
}
|
||||||
fn handle_req<'a, 'm1: 'a, 'm2: 'a>(
|
fn handle_req<'a, 'm1: 'a, 'm2: 'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
AtomCtx(buf, _): AtomCtx<'a>,
|
AtomCtx(buf, _, sys): AtomCtx<'a>,
|
||||||
key: Sym,
|
key: Sym,
|
||||||
req: Pin<&'m1 mut dyn AsyncRead>,
|
req: Pin<&'m1 mut dyn AsyncRead>,
|
||||||
rep: Pin<&'m2 mut dyn AsyncWrite>,
|
rep: Pin<&'m2 mut dyn AsyncWrite>,
|
||||||
) -> LocalBoxFuture<'a, bool> {
|
) -> LocalBoxFuture<'a, bool> {
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let ms = self.ms.get_or_init(self.msbuild.pack()).await;
|
let ms = self.ms.get_or_init(self.msbuild.pack(sys.clone())).await;
|
||||||
ms.dispatch(&T::decode(Pin::new(&mut &buf[..])).await, key, req, rep).await
|
ms.dispatch(&T::decode(Pin::new(&mut &buf[..])).await, sys, key, req, rep).await
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
fn command<'a>(
|
fn command<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
AtomCtx(buf, _): AtomCtx<'a>,
|
AtomCtx(buf, _, ctx): AtomCtx<'a>,
|
||||||
) -> LocalBoxFuture<'a, OrcRes<Option<GExpr>>> {
|
) -> LocalBoxFuture<'a, OrcRes<Option<GExpr>>> {
|
||||||
async move { T::decode(Pin::new(&mut &buf[..])).await.command().await }.boxed_local()
|
async move { T::decode(Pin::new(&mut &buf[..])).await.command(ctx).await }.boxed_local()
|
||||||
}
|
}
|
||||||
fn serialize<'a, 'b: 'a>(
|
fn serialize<'a, 'b: 'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
@@ -82,14 +82,19 @@ impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
|
|||||||
Some(Vec::new())
|
Some(Vec::new())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
fn deserialize<'a>(&'a self, data: &'a [u8], refs: &'a [Expr]) -> LocalBoxFuture<'a, api::Atom> {
|
fn deserialize<'a>(
|
||||||
|
&'a self,
|
||||||
|
ctx: SysCtx,
|
||||||
|
data: &'a [u8],
|
||||||
|
refs: &'a [Expr],
|
||||||
|
) -> LocalBoxFuture<'a, api::Atom> {
|
||||||
assert!(refs.is_empty(), "Refs found when deserializing thin atom");
|
assert!(refs.is_empty(), "Refs found when deserializing thin atom");
|
||||||
Box::pin(async { T::decode(Pin::new(&mut &data[..])).await._factory().build().await })
|
Box::pin(async { T::decode(Pin::new(&mut &data[..])).await._factory().build(ctx).await })
|
||||||
}
|
}
|
||||||
fn drop<'a>(&'a self, AtomCtx(buf, _): AtomCtx<'a>) -> LocalBoxFuture<'a, ()> {
|
fn drop<'a>(&'a self, AtomCtx(buf, _, ctx): AtomCtx<'a>) -> LocalBoxFuture<'a, ()> {
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let string_self = T::decode(Pin::new(&mut &buf[..])).await.print().await;
|
let string_self = T::decode(Pin::new(&mut &buf[..])).await.print(ctx.clone()).await;
|
||||||
writeln!(ctx().logger(), "Received drop signal for non-drop atom {string_self:?}");
|
writeln!(ctx.logger(), "Received drop signal for non-drop atom {string_self:?}");
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -99,14 +104,14 @@ pub trait ThinAtom:
|
|||||||
{
|
{
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
fn call(&self, arg: Expr) -> impl Future<Output = GExpr> {
|
fn call(&self, arg: Expr) -> impl Future<Output = GExpr> {
|
||||||
async move { bot(err_not_callable().await) }
|
async move { bot(err_not_callable(arg.ctx().i()).await) }
|
||||||
}
|
}
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
fn command(&self) -> impl Future<Output = OrcRes<Option<GExpr>>> {
|
fn command(&self, ctx: SysCtx) -> impl Future<Output = OrcRes<Option<GExpr>>> {
|
||||||
async move { Err(err_not_command().await) }
|
async move { Err(err_not_command(ctx.i()).await) }
|
||||||
}
|
}
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
fn print(&self) -> impl Future<Output = FmtUnit> {
|
fn print(&self, ctx: SysCtx) -> impl Future<Output = FmtUnit> {
|
||||||
async { format!("ThinAtom({})", type_name::<Self>()).into() }
|
async { format!("ThinAtom({})", type_name::<Self>()).into() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,90 +0,0 @@
|
|||||||
use std::any::{Any, TypeId, type_name};
|
|
||||||
use std::fmt;
|
|
||||||
use std::num::NonZero;
|
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use memo_map::MemoMap;
|
|
||||||
use orchid_base::builtin::Spawner;
|
|
||||||
use orchid_base::interner::Interner;
|
|
||||||
use orchid_base::logging::Logger;
|
|
||||||
use orchid_base::reqnot::ReqNot;
|
|
||||||
use task_local::task_local;
|
|
||||||
|
|
||||||
use crate::api;
|
|
||||||
use crate::system_ctor::CtedObj;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct SysCtx(Rc<MemoMap<TypeId, Box<dyn Any>>>);
|
|
||||||
impl SysCtx {
|
|
||||||
pub fn new(
|
|
||||||
id: api::SysId,
|
|
||||||
i: Interner,
|
|
||||||
reqnot: ReqNot<api::ExtMsgSet>,
|
|
||||||
spawner: Spawner,
|
|
||||||
logger: Logger,
|
|
||||||
cted: CtedObj,
|
|
||||||
) -> Self {
|
|
||||||
let this = Self(Rc::new(MemoMap::new()));
|
|
||||||
this.add(id).add(i).add(reqnot).add(spawner).add(logger).add(cted);
|
|
||||||
this
|
|
||||||
}
|
|
||||||
pub fn add<T: SysCtxEntry>(&self, t: T) -> &Self {
|
|
||||||
assert!(self.0.insert(TypeId::of::<T>(), Box::new(t)), "Key already exists");
|
|
||||||
self
|
|
||||||
}
|
|
||||||
pub fn get_or_insert<T: SysCtxEntry>(&self, f: impl FnOnce() -> T) -> &T {
|
|
||||||
(self.0.get_or_insert_owned(TypeId::of::<T>(), || Box::new(f())).downcast_ref())
|
|
||||||
.expect("Keyed by TypeId")
|
|
||||||
}
|
|
||||||
pub fn get_or_default<T: SysCtxEntry + Default>(&self) -> &T { self.get_or_insert(T::default) }
|
|
||||||
pub fn try_get<T: SysCtxEntry>(&self) -> Option<&T> {
|
|
||||||
Some(self.0.get(&TypeId::of::<T>())?.downcast_ref().expect("Keyed by TypeId"))
|
|
||||||
}
|
|
||||||
pub fn get<T: SysCtxEntry>(&self) -> &T {
|
|
||||||
self.try_get().unwrap_or_else(|| panic!("Context {} missing", type_name::<T>()))
|
|
||||||
}
|
|
||||||
/// Shorthand to get the messaging link
|
|
||||||
pub fn reqnot(&self) -> &ReqNot<api::ExtMsgSet> { self.get::<ReqNot<api::ExtMsgSet>>() }
|
|
||||||
/// Shorthand to get the system ID
|
|
||||||
pub fn sys_id(&self) -> api::SysId { *self.get::<api::SysId>() }
|
|
||||||
/// Spawn a task that will eventually be executed asynchronously
|
|
||||||
pub fn spawn(&self, f: impl Future<Output = ()> + 'static) {
|
|
||||||
(self.get::<Spawner>())(Box::pin(CTX.scope(self.clone(), f)))
|
|
||||||
}
|
|
||||||
/// Shorthand to get the logger
|
|
||||||
pub fn logger(&self) -> &Logger { self.get::<Logger>() }
|
|
||||||
/// Shorthand to get the constructed system object
|
|
||||||
pub fn cted(&self) -> &CtedObj { self.get::<CtedObj>() }
|
|
||||||
}
|
|
||||||
impl fmt::Debug for SysCtx {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "SysCtx({:?})", self.sys_id())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub trait SysCtxEntry: 'static + Sized {}
|
|
||||||
impl SysCtxEntry for api::SysId {}
|
|
||||||
impl SysCtxEntry for ReqNot<api::ExtMsgSet> {}
|
|
||||||
impl SysCtxEntry for Spawner {}
|
|
||||||
impl SysCtxEntry for CtedObj {}
|
|
||||||
impl SysCtxEntry for Logger {}
|
|
||||||
impl SysCtxEntry for Interner {}
|
|
||||||
|
|
||||||
task_local! {
|
|
||||||
static CTX: SysCtx;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn with_ctx<F: Future>(ctx: SysCtx, f: F) -> F::Output { CTX.scope(ctx, f).await }
|
|
||||||
pub fn ctx() -> SysCtx { CTX.get() }
|
|
||||||
|
|
||||||
/// Shorthand to get the [Interner] instance
|
|
||||||
pub fn i() -> Interner { ctx().get::<Interner>().clone() }
|
|
||||||
|
|
||||||
pub fn mock_ctx() -> SysCtx {
|
|
||||||
let ctx = SysCtx(Rc::default());
|
|
||||||
ctx
|
|
||||||
.add(Logger::new(api::LogStrategy::StdErr))
|
|
||||||
.add(Interner::new_master())
|
|
||||||
.add::<Spawner>(Rc::new(|_| panic!("Cannot fork in test environment")))
|
|
||||||
.add(api::SysId(NonZero::<u16>::MIN));
|
|
||||||
ctx
|
|
||||||
}
|
|
||||||
@@ -1,16 +1,14 @@
|
|||||||
use std::future::Future;
|
use std::future::Future;
|
||||||
use std::pin::Pin;
|
|
||||||
|
|
||||||
use dyn_clone::DynClone;
|
|
||||||
use never::Never;
|
use never::Never;
|
||||||
use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
|
use orchid_base::error::{OrcErrv, OrcRes, mk_err};
|
||||||
|
use orchid_base::interner::Interner;
|
||||||
use orchid_base::location::Pos;
|
use orchid_base::location::Pos;
|
||||||
use trait_set::trait_set;
|
|
||||||
|
|
||||||
use crate::atom::{AtomicFeatures, ForeignAtom, TAtom, ToAtom};
|
use crate::atom::{AtomicFeatures, ForeignAtom, ToAtom, TypAtom};
|
||||||
use crate::context::i;
|
|
||||||
use crate::expr::Expr;
|
use crate::expr::Expr;
|
||||||
use crate::gen_expr::{GExpr, atom, bot};
|
use crate::gen_expr::{GExpr, atom, bot};
|
||||||
|
use crate::system::{SysCtx, downcast_atom};
|
||||||
|
|
||||||
pub trait TryFromExpr: Sized {
|
pub trait TryFromExpr: Sized {
|
||||||
fn try_from_expr(expr: Expr) -> impl Future<Output = OrcRes<Self>>;
|
fn try_from_expr(expr: Expr) -> impl Future<Output = OrcRes<Self>>;
|
||||||
@@ -26,91 +24,61 @@ impl<T: TryFromExpr, U: TryFromExpr> TryFromExpr for (T, U) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn err_not_atom(pos: Pos) -> OrcErrv {
|
async fn err_not_atom(pos: Pos, i: &Interner) -> OrcErrv {
|
||||||
mk_errv(i().i("Expected an atom").await, "This expression is not an atom", [pos])
|
mk_err(i.i("Expected an atom").await, "This expression is not an atom", [pos])
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn err_type(pos: Pos, i: &Interner) -> OrcErrv {
|
||||||
|
mk_err(i.i("Type error").await, "The atom is a different type than expected", [pos])
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFromExpr for ForeignAtom {
|
impl TryFromExpr for ForeignAtom {
|
||||||
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
||||||
match expr.atom().await {
|
match expr.atom().await {
|
||||||
Err(ex) => Err(err_not_atom(ex.data().await.pos.clone()).await),
|
Err(ex) => Err(err_not_atom(ex.data().await.pos.clone(), ex.ctx().i()).await),
|
||||||
Ok(f) => Ok(f),
|
Ok(f) => Ok(f),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A: AtomicFeatures> TryFromExpr for TAtom<A> {
|
impl<A: AtomicFeatures> TryFromExpr for TypAtom<A> {
|
||||||
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
||||||
let f = ForeignAtom::try_from_expr(expr).await?;
|
let f = ForeignAtom::try_from_expr(expr).await?;
|
||||||
match f.clone().downcast::<A>().await {
|
match downcast_atom::<A>(f).await {
|
||||||
Ok(a) => Ok(a),
|
Ok(a) => Ok(a),
|
||||||
Err(e) => Err(e.mk_err().await),
|
Err(f) => Err(err_type(f.pos(), f.ctx().i()).await),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl TryFromExpr for SysCtx {
|
||||||
|
async fn try_from_expr(expr: Expr) -> OrcRes<Self> { Ok(expr.ctx()) }
|
||||||
|
}
|
||||||
|
|
||||||
pub trait ToExpr {
|
pub trait ToExpr {
|
||||||
fn to_gen(self) -> impl Future<Output = GExpr>;
|
fn to_expr(self) -> impl Future<Output = GExpr>;
|
||||||
fn to_expr(self) -> impl Future<Output = Expr>
|
|
||||||
where Self: Sized {
|
|
||||||
async { self.to_gen().await.create().await }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait ToExprDyn {
|
|
||||||
fn to_gen_dyn<'a>(self: Box<Self>) -> Pin<Box<dyn Future<Output = GExpr> + 'a>>
|
|
||||||
where Self: 'a;
|
|
||||||
|
|
||||||
fn to_expr_dyn<'a>(self: Box<Self>) -> Pin<Box<dyn Future<Output = Expr> + 'a>>
|
|
||||||
where Self: 'a;
|
|
||||||
}
|
|
||||||
impl<T: ToExpr> ToExprDyn for T {
|
|
||||||
fn to_gen_dyn<'a>(self: Box<Self>) -> Pin<Box<dyn Future<Output = GExpr> + 'a>>
|
|
||||||
where Self: 'a {
|
|
||||||
Box::pin(self.to_gen())
|
|
||||||
}
|
|
||||||
fn to_expr_dyn<'a>(self: Box<Self>) -> Pin<Box<dyn Future<Output = Expr> + 'a>>
|
|
||||||
where Self: 'a {
|
|
||||||
Box::pin(self.to_expr())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
trait_set! {
|
|
||||||
pub trait ClonableToExprDyn = ToExprDyn + DynClone;
|
|
||||||
}
|
|
||||||
impl ToExpr for Box<dyn ToExprDyn> {
|
|
||||||
async fn to_gen(self) -> GExpr { self.to_gen_dyn().await }
|
|
||||||
async fn to_expr(self) -> Expr { self.to_expr_dyn().await }
|
|
||||||
}
|
|
||||||
impl ToExpr for Box<dyn ClonableToExprDyn> {
|
|
||||||
async fn to_gen(self) -> GExpr { self.to_gen_dyn().await }
|
|
||||||
async fn to_expr(self) -> Expr { self.to_expr_dyn().await }
|
|
||||||
}
|
|
||||||
impl Clone for Box<dyn ClonableToExprDyn> {
|
|
||||||
fn clone(&self) -> Self { dyn_clone::clone_box(&**self) }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToExpr for GExpr {
|
impl ToExpr for GExpr {
|
||||||
async fn to_gen(self) -> GExpr { self }
|
async fn to_expr(self) -> GExpr { self }
|
||||||
async fn to_expr(self) -> Expr { self.create().await }
|
|
||||||
}
|
}
|
||||||
impl ToExpr for Expr {
|
impl ToExpr for Expr {
|
||||||
async fn to_gen(self) -> GExpr { self.slot() }
|
async fn to_expr(self) -> GExpr { self.slot() }
|
||||||
async fn to_expr(self) -> Expr { self }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: ToExpr> ToExpr for OrcRes<T> {
|
impl<T: ToExpr> ToExpr for OrcRes<T> {
|
||||||
async fn to_gen(self) -> GExpr {
|
async fn to_expr(self) -> GExpr {
|
||||||
match self {
|
match self {
|
||||||
Err(e) => bot(e),
|
Err(e) => bot(e),
|
||||||
Ok(t) => t.to_gen().await,
|
Ok(t) => t.to_expr().await,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A: ToAtom> ToExpr for A {
|
impl<A: ToAtom> ToExpr for A {
|
||||||
async fn to_gen(self) -> GExpr { atom(self) }
|
async fn to_expr(self) -> GExpr { atom(self) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToExpr for Never {
|
impl ToExpr for Never {
|
||||||
async fn to_gen(self) -> GExpr { match self {} }
|
async fn to_expr(self) -> GExpr { match self {} }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ use futures::stream::{self, LocalBoxStream};
|
|||||||
use futures::{FutureExt, SinkExt, StreamExt};
|
use futures::{FutureExt, SinkExt, StreamExt};
|
||||||
use never::Never;
|
use never::Never;
|
||||||
use orchid_base::error::OrcRes;
|
use orchid_base::error::OrcRes;
|
||||||
|
use orchid_base::format::{FmtCtx, FmtUnit};
|
||||||
|
|
||||||
use crate::atom::Atomic;
|
use crate::atom::Atomic;
|
||||||
use crate::atom_owned::{OwnedAtom, OwnedVariant};
|
use crate::atom_owned::{OwnedAtom, OwnedVariant};
|
||||||
@@ -22,6 +23,7 @@ enum Command {
|
|||||||
}
|
}
|
||||||
|
|
||||||
struct BuilderCoroutineData {
|
struct BuilderCoroutineData {
|
||||||
|
name: Option<String>,
|
||||||
receiver: Mutex<LocalBoxStream<'static, Command>>,
|
receiver: Mutex<LocalBoxStream<'static, Command>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -33,15 +35,15 @@ impl BuilderCoroutine {
|
|||||||
match cmd {
|
match cmd {
|
||||||
None => panic!("Before the stream ends, we should have gotten a Halt"),
|
None => panic!("Before the stream ends, we should have gotten a Halt"),
|
||||||
Some(Command::Halt(expr)) => expr,
|
Some(Command::Halt(expr)) => expr,
|
||||||
Some(Command::Execute(expr, reply)) => call(
|
Some(Command::Execute(expr, reply)) => call([
|
||||||
lambda(0, [seq(
|
lambda(0, [seq([
|
||||||
[arg(0)],
|
arg(0),
|
||||||
call(Replier { reply, builder: self }.to_gen().await, [arg(0)]),
|
call([Replier { reply, builder: self }.to_expr().await, arg(0)]),
|
||||||
)]),
|
])]),
|
||||||
[expr],
|
expr,
|
||||||
),
|
]),
|
||||||
Some(Command::Register(expr, reply)) =>
|
Some(Command::Register(expr, reply)) =>
|
||||||
call(Replier { reply, builder: self }.to_gen().await, [expr]),
|
call([Replier { reply, builder: self }.to_expr().await, expr]),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -63,13 +65,23 @@ impl OwnedAtom for Replier {
|
|||||||
std::mem::drop(self.reply);
|
std::mem::drop(self.reply);
|
||||||
self.builder.run().await
|
self.builder.run().await
|
||||||
}
|
}
|
||||||
|
async fn print_atom<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||||
|
match &self.builder.0.name {
|
||||||
|
None => "BuilderCoroutine".into(),
|
||||||
|
Some(name) => format!("BuilderCoroutine({name})").into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn exec<R: ToExpr>(f: impl for<'a> AsyncFnOnce(ExecHandle<'a>) -> R + 'static) -> GExpr {
|
pub async fn exec<R: ToExpr>(
|
||||||
|
debug: impl AsRef<str>,
|
||||||
|
f: impl for<'a> AsyncFnOnce(ExecHandle<'a>) -> R + 'static,
|
||||||
|
) -> GExpr {
|
||||||
let (cmd_snd, cmd_recv) = channel(0);
|
let (cmd_snd, cmd_recv) = channel(0);
|
||||||
let halt = async { Command::Halt(f(ExecHandle(cmd_snd, PhantomData)).await.to_gen().await) }
|
let halt = async { Command::Halt(f(ExecHandle(cmd_snd, PhantomData)).await.to_expr().await) }
|
||||||
.into_stream();
|
.into_stream();
|
||||||
let coro = BuilderCoroutine(Rc::new(BuilderCoroutineData {
|
let coro = BuilderCoroutine(Rc::new(BuilderCoroutineData {
|
||||||
|
name: Some(debug.as_ref().to_string()),
|
||||||
receiver: Mutex::new(stream::select(halt, cmd_recv).boxed_local()),
|
receiver: Mutex::new(stream::select(halt, cmd_recv).boxed_local()),
|
||||||
}));
|
}));
|
||||||
coro.run().await
|
coro.run().await
|
||||||
@@ -81,12 +93,12 @@ pub struct ExecHandle<'a>(Sender<Command>, PhantomData<&'a ()>);
|
|||||||
impl ExecHandle<'_> {
|
impl ExecHandle<'_> {
|
||||||
pub async fn exec<T: TryFromExpr>(&mut self, val: impl ToExpr) -> OrcRes<T> {
|
pub async fn exec<T: TryFromExpr>(&mut self, val: impl ToExpr) -> OrcRes<T> {
|
||||||
let (reply_snd, mut reply_recv) = channel(1);
|
let (reply_snd, mut reply_recv) = channel(1);
|
||||||
self.0.send(Command::Execute(val.to_gen().await, reply_snd)).await.expect(WEIRD_DROP_ERR);
|
self.0.send(Command::Execute(val.to_expr().await, reply_snd)).await.expect(WEIRD_DROP_ERR);
|
||||||
T::try_from_expr(reply_recv.next().await.expect(WEIRD_DROP_ERR)).await
|
T::try_from_expr(reply_recv.next().await.expect(WEIRD_DROP_ERR)).await
|
||||||
}
|
}
|
||||||
pub async fn register(&mut self, val: impl ToExpr) -> Expr {
|
pub async fn register(&mut self, val: impl ToExpr) -> Expr {
|
||||||
let (reply_snd, mut reply_recv) = channel(1);
|
let (reply_snd, mut reply_recv) = channel(1);
|
||||||
self.0.send(Command::Register(val.to_gen().await, reply_snd)).await.expect(WEIRD_DROP_ERR);
|
self.0.send(Command::Register(val.to_expr().await, reply_snd)).await.expect(WEIRD_DROP_ERR);
|
||||||
reply_recv.next().await.expect(WEIRD_DROP_ERR)
|
reply_recv.next().await.expect(WEIRD_DROP_ERR)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ use futures::channel::mpsc::{Receiver, Sender, channel};
|
|||||||
use futures::future::{LocalBoxFuture, join_all};
|
use futures::future::{LocalBoxFuture, join_all};
|
||||||
use futures::lock::Mutex;
|
use futures::lock::Mutex;
|
||||||
use futures::{FutureExt, SinkExt, StreamExt, stream, stream_select};
|
use futures::{FutureExt, SinkExt, StreamExt, stream, stream_select};
|
||||||
use futures_locks::RwLock;
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_api_traits::{Decode, UnderRoot, enc_vec};
|
use orchid_api_traits::{Decode, UnderRoot, enc_vec};
|
||||||
@@ -29,11 +28,10 @@ use trait_set::trait_set;
|
|||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId};
|
use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId};
|
||||||
use crate::atom_owned::take_atom;
|
use crate::atom_owned::take_atom;
|
||||||
use crate::context::{SysCtx, ctx, i, with_ctx};
|
|
||||||
use crate::expr::{BorrowedExprStore, Expr, ExprHandle};
|
use crate::expr::{BorrowedExprStore, Expr, ExprHandle};
|
||||||
use crate::lexer::{LexContext, ekey_cascade, ekey_not_applicable};
|
use crate::lexer::{LexContext, ekey_cascade, ekey_not_applicable};
|
||||||
use crate::parser::{PTokTree, ParsCtx, get_const, linev_into_api};
|
use crate::parser::{PTokTree, ParsCtx, get_const, linev_into_api};
|
||||||
use crate::system::atom_by_idx;
|
use crate::system::{SysCtx, atom_by_idx};
|
||||||
use crate::system_ctor::{CtedObj, DynSystemCtor};
|
use crate::system_ctor::{CtedObj, DynSystemCtor};
|
||||||
use crate::tree::{LazyMemberFactory, TreeIntoApiCtxImpl};
|
use crate::tree::{LazyMemberFactory, TreeIntoApiCtxImpl};
|
||||||
|
|
||||||
@@ -51,18 +49,19 @@ impl ExtensionData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub enum MemberRecord {
|
pub enum MemberRecord {
|
||||||
Gen(Vec<Tok<String>>, LazyMemberFactory),
|
Gen(Vec<IStr>, LazyMemberFactory),
|
||||||
Res,
|
Res,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct SystemRecord {
|
pub struct SystemRecord {
|
||||||
lazy_members: Mutex<HashMap<api::TreeId, MemberRecord>>,
|
lazy_members: HashMap<api::TreeId, MemberRecord>,
|
||||||
ctx: SysCtx,
|
ctx: SysCtx,
|
||||||
}
|
}
|
||||||
|
|
||||||
trait_set! {
|
trait_set! {
|
||||||
pub trait WithAtomRecordCallback<'a, T> = AsyncFnOnce(
|
pub trait WithAtomRecordCallback<'a, T> = AsyncFnOnce(
|
||||||
Box<dyn AtomDynfo>,
|
Box<dyn AtomDynfo>,
|
||||||
|
SysCtx,
|
||||||
AtomTypeId,
|
AtomTypeId,
|
||||||
&'a [u8]
|
&'a [u8]
|
||||||
) -> T
|
) -> T
|
||||||
@@ -73,17 +72,17 @@ pub async fn with_atom_record<'a, F: Future<Output = SysCtx>, T>(
|
|||||||
atom: &'a api::Atom,
|
atom: &'a api::Atom,
|
||||||
cb: impl WithAtomRecordCallback<'a, T>,
|
cb: impl WithAtomRecordCallback<'a, T>,
|
||||||
) -> T {
|
) -> T {
|
||||||
let mut data = &atom.data.0[..];
|
let mut data = &atom.data[..];
|
||||||
let ctx = get_sys_ctx(atom.owner).await;
|
let ctx = get_sys_ctx(atom.owner).await;
|
||||||
let inst = ctx.get::<CtedObj>().inst();
|
let inst = ctx.get::<CtedObj>().inst();
|
||||||
let id = AtomTypeId::decode(Pin::new(&mut data)).await;
|
let id = AtomTypeId::decode(Pin::new(&mut data)).await;
|
||||||
let atom_record = atom_by_idx(inst.card(), id.clone()).expect("Atom ID reserved");
|
let atom_record = atom_by_idx(inst.card(), id.clone()).expect("Atom ID reserved");
|
||||||
with_ctx(ctx, async move { cb(atom_record, id, data).await }).await
|
cb(atom_record, ctx, id, data).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ExtensionOwner {
|
pub struct ExtensionOwner {
|
||||||
_interner_cell: Rc<RefCell<Option<Interner>>>,
|
_interner_cell: Rc<RefCell<Option<Interner>>>,
|
||||||
_systems_lock: Rc<RwLock<HashMap<api::SysId, SystemRecord>>>,
|
_systems_lock: Rc<Mutex<HashMap<api::SysId, SystemRecord>>>,
|
||||||
out_recv: Mutex<Receiver<Vec<u8>>>,
|
out_recv: Mutex<Receiver<Vec<u8>>>,
|
||||||
out_send: Sender<Vec<u8>>,
|
out_send: Sender<Vec<u8>>,
|
||||||
}
|
}
|
||||||
@@ -107,7 +106,7 @@ pub fn extension_init(
|
|||||||
.map(|(id, sys)| (u16::try_from(id).expect("more than u16max system ctors"), sys))
|
.map(|(id, sys)| (u16::try_from(id).expect("more than u16max system ctors"), sys))
|
||||||
.map(|(id, sys)| sys.decl(api::SysDeclId(NonZero::new(id + 1).unwrap())))
|
.map(|(id, sys)| sys.decl(api::SysDeclId(NonZero::new(id + 1).unwrap())))
|
||||||
.collect_vec();
|
.collect_vec();
|
||||||
let systems_lock = Rc::new(RwLock::new(HashMap::<api::SysId, SystemRecord>::new()));
|
let systems_lock = Rc::new(Mutex::new(HashMap::<api::SysId, SystemRecord>::new()));
|
||||||
let ext_header = api::ExtensionHeader { name: data.name.to_string(), systems: decls.clone() };
|
let ext_header = api::ExtensionHeader { name: data.name.to_string(), systems: decls.clone() };
|
||||||
let (out_send, in_recv) = channel::<Vec<u8>>(1);
|
let (out_send, in_recv) = channel::<Vec<u8>>(1);
|
||||||
let (in_send, out_recv) = channel::<Vec<u8>>(1);
|
let (in_send, out_recv) = channel::<Vec<u8>>(1);
|
||||||
@@ -120,7 +119,7 @@ pub fn extension_init(
|
|||||||
let get_ctx = clone!(systems_weak; move |id: api::SysId| clone!(systems_weak; async move {
|
let get_ctx = clone!(systems_weak; move |id: api::SysId| clone!(systems_weak; async move {
|
||||||
let systems =
|
let systems =
|
||||||
systems_weak.upgrade().expect("System table dropped before request processing done");
|
systems_weak.upgrade().expect("System table dropped before request processing done");
|
||||||
systems.read().await.get(&id).expect("System not found").ctx.clone()
|
systems.lock().await.get(&id).expect("System not found").ctx.clone()
|
||||||
}));
|
}));
|
||||||
let init_ctx = {
|
let init_ctx = {
|
||||||
clone!(interner_weak, spawner, logger);
|
clone!(interner_weak, spawner, logger);
|
||||||
@@ -140,12 +139,20 @@ pub fn extension_init(
|
|||||||
Box::pin(async move { in_send.send(a.to_vec()).await.unwrap() })
|
Box::pin(async move { in_send.send(a.to_vec()).await.unwrap() })
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
clone!(exit_send);
|
clone!(systems_weak, exit_send, get_ctx);
|
||||||
move |n, _| {
|
move |n, _| {
|
||||||
clone!(exit_send mut);
|
clone!(systems_weak, exit_send mut, get_ctx);
|
||||||
async move {
|
async move {
|
||||||
match n {
|
match n {
|
||||||
api::HostExtNotif::Exit => exit_send.send(()).await.unwrap(),
|
api::HostExtNotif::Exit => exit_send.send(()).await.unwrap(),
|
||||||
|
api::HostExtNotif::SystemDrop(api::SystemDrop(sys_id)) =>
|
||||||
|
if let Some(rc) = systems_weak.upgrade() {
|
||||||
|
mem::drop(rc.lock().await.remove(&sys_id))
|
||||||
|
},
|
||||||
|
api::HostExtNotif::AtomDrop(api::AtomDrop(sys_id, atom)) => {
|
||||||
|
let ctx = get_ctx(sys_id).await;
|
||||||
|
take_atom(atom, &ctx).await.dyn_free(ctx.clone()).await
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.boxed_local()
|
.boxed_local()
|
||||||
@@ -157,111 +164,98 @@ pub fn extension_init(
|
|||||||
clone!(logger, get_ctx, init_ctx, systems_weak, interner_weak, decls, msg_logger);
|
clone!(logger, get_ctx, init_ctx, systems_weak, interner_weak, decls, msg_logger);
|
||||||
async move {
|
async move {
|
||||||
let interner_cell = interner_weak.upgrade().expect("Interner dropped before request");
|
let interner_cell = interner_weak.upgrade().expect("Interner dropped before request");
|
||||||
let interner =
|
let i = interner_cell.borrow().clone().expect("Request arrived before interner set");
|
||||||
interner_cell.borrow().clone().expect("Request arrived before interner set");
|
|
||||||
if !matches!(req, api::HostExtReq::AtomReq(api::AtomReq::AtomPrint(_))) {
|
|
||||||
writeln!(msg_logger, "{} extension received request {req:?}", data.name);
|
writeln!(msg_logger, "{} extension received request {req:?}", data.name);
|
||||||
}
|
|
||||||
|
|
||||||
match req {
|
match req {
|
||||||
api::HostExtReq::SystemDrop(sys_drop) => {
|
|
||||||
if let Some(rc) = systems_weak.upgrade() {
|
|
||||||
mem::drop(rc.write().await.remove(&sys_drop.0))
|
|
||||||
}
|
|
||||||
hand.handle(&sys_drop, &()).await
|
|
||||||
},
|
|
||||||
api::HostExtReq::AtomDrop(atom_drop @ api::AtomDrop(sys_id, atom)) =>
|
|
||||||
with_ctx(get_ctx(sys_id).await, async move {
|
|
||||||
take_atom(atom).await.dyn_free().await;
|
|
||||||
hand.handle(&atom_drop, &()).await
|
|
||||||
})
|
|
||||||
.await,
|
|
||||||
api::HostExtReq::Ping(ping @ api::Ping) => hand.handle(&ping, &()).await,
|
api::HostExtReq::Ping(ping @ api::Ping) => hand.handle(&ping, &()).await,
|
||||||
api::HostExtReq::Sweep(sweep @ api::Sweep) =>
|
api::HostExtReq::Sweep(sweep @ api::Sweep) =>
|
||||||
hand.handle(&sweep, &interner.sweep_replica().await).await,
|
hand.handle(&sweep, &i.sweep_replica().await).await,
|
||||||
api::HostExtReq::SysReq(api::SysReq::NewSystem(new_sys)) => {
|
api::HostExtReq::SysReq(api::SysReq::NewSystem(new_sys)) => {
|
||||||
let (sys_id, _) = (decls.iter().enumerate().find(|(_, s)| s.id == new_sys.system))
|
let (sys_id, _) = (decls.iter().enumerate().find(|(_, s)| s.id == new_sys.system))
|
||||||
.expect("NewSystem call received for invalid system");
|
.expect("NewSystem call received for invalid system");
|
||||||
let cted = data.systems[sys_id].new_system(&new_sys);
|
let cted = data.systems[sys_id].new_system(&new_sys);
|
||||||
with_ctx(init_ctx(new_sys.id, cted.clone(), hand.reqnot()).await, async move {
|
|
||||||
let lex_filter =
|
let lex_filter =
|
||||||
cted.inst().dyn_lexers().iter().fold(api::CharFilter(vec![]), |cf, lx| {
|
cted.inst().dyn_lexers().iter().fold(api::CharFilter(vec![]), |cf, lx| {
|
||||||
char_filter_union(&cf, &mk_char_filter(lx.char_filter().iter().cloned()))
|
char_filter_union(&cf, &mk_char_filter(lx.char_filter().iter().cloned()))
|
||||||
});
|
});
|
||||||
let lazy_members = Mutex::new(HashMap::new());
|
let lazy_mems = Mutex::new(HashMap::new());
|
||||||
let const_root = stream::iter(cted.inst().dyn_env().await)
|
let ctx = init_ctx(new_sys.id, cted.clone(), hand.reqnot()).await;
|
||||||
|
let const_root = stream::iter(cted.inst().dyn_env())
|
||||||
.then(|mem| {
|
.then(|mem| {
|
||||||
let lazy_mems = &lazy_members;
|
let (req, lazy_mems) = (&hand, &lazy_mems);
|
||||||
async move {
|
clone!(i, ctx; async move {
|
||||||
let name = i().i(&mem.name).await;
|
|
||||||
let mut tia_ctx = TreeIntoApiCtxImpl {
|
let mut tia_ctx = TreeIntoApiCtxImpl {
|
||||||
lazy_members: &mut *lazy_mems.lock().await,
|
lazy_members: &mut *lazy_mems.lock().await,
|
||||||
|
sys: ctx,
|
||||||
basepath: &[],
|
basepath: &[],
|
||||||
path: Substack::Bottom.push(name.clone()),
|
path: Substack::Bottom,
|
||||||
|
req
|
||||||
};
|
};
|
||||||
(name.to_api(), mem.kind.into_api(&mut tia_ctx).await)
|
(i.i(&mem.name).await.to_api(), mem.kind.into_api(&mut tia_ctx).await)
|
||||||
}
|
})
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
.await;
|
.await;
|
||||||
let prelude =
|
let prelude =
|
||||||
cted.inst().dyn_prelude().await.iter().map(|sym| sym.to_api()).collect();
|
cted.inst().dyn_prelude(&i).await.iter().map(|sym| sym.to_api()).collect();
|
||||||
let record = SystemRecord { ctx: ctx(), lazy_members };
|
let record = SystemRecord { ctx, lazy_members: lazy_mems.into_inner() };
|
||||||
let systems = systems_weak.upgrade().expect("System constructed during shutdown");
|
let systems = systems_weak.upgrade().expect("System constructed during shutdown");
|
||||||
systems.write().await.insert(new_sys.id, record);
|
systems.lock().await.insert(new_sys.id, record);
|
||||||
let line_types = join_all(
|
let line_types = join_all(
|
||||||
(cted.inst().dyn_parsers().iter())
|
(cted.inst().dyn_parsers().iter())
|
||||||
.map(|p| async { interner.i(p.line_head()).await.to_api() }),
|
.map(|p| async { i.i(p.line_head()).await.to_api() }),
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
let response =
|
let response = api::NewSystemResponse { lex_filter, const_root, line_types, prelude };
|
||||||
api::NewSystemResponse { lex_filter, const_root, line_types, prelude };
|
|
||||||
hand.handle(&new_sys, &response).await
|
hand.handle(&new_sys, &response).await
|
||||||
})
|
|
||||||
.await
|
|
||||||
},
|
},
|
||||||
api::HostExtReq::GetMember(get_tree @ api::GetMember(sys_id, tree_id)) =>
|
api::HostExtReq::GetMember(get_tree @ api::GetMember(sys_id, tree_id)) => {
|
||||||
with_ctx(get_ctx(sys_id).await, async move {
|
let sys_ctx = get_ctx(sys_id).await;
|
||||||
let systems = systems_weak.upgrade().expect("Member queried during shutdown");
|
let systems = systems_weak.upgrade().expect("Member queried during shutdown");
|
||||||
let systems_g = systems.read().await;
|
let mut systems_g = systems.lock().await;
|
||||||
let mut lazy_members =
|
let SystemRecord { lazy_members, .. } =
|
||||||
systems_g.get(&sys_id).expect("System not found").lazy_members.lock().await;
|
systems_g.get_mut(&sys_id).expect("System not found");
|
||||||
let (path, cb) = match lazy_members.insert(tree_id, MemberRecord::Res) {
|
let (path, cb) = match lazy_members.insert(tree_id, MemberRecord::Res) {
|
||||||
None => panic!("Tree for ID not found"),
|
None => panic!("Tree for ID not found"),
|
||||||
Some(MemberRecord::Res) => panic!("This tree has already been transmitted"),
|
Some(MemberRecord::Res) => panic!("This tree has already been transmitted"),
|
||||||
Some(MemberRecord::Gen(path, cb)) => (path, cb),
|
Some(MemberRecord::Gen(path, cb)) => (path, cb),
|
||||||
};
|
};
|
||||||
let tree = cb.build(Sym::new(path.clone(), &interner).await.unwrap()).await;
|
let tree = cb.build(Sym::new(path.clone(), &i).await.unwrap(), sys_ctx.clone()).await;
|
||||||
let mut tia_ctx = TreeIntoApiCtxImpl {
|
let mut tia_ctx = TreeIntoApiCtxImpl {
|
||||||
|
sys: sys_ctx,
|
||||||
path: Substack::Bottom,
|
path: Substack::Bottom,
|
||||||
basepath: &path,
|
basepath: &path,
|
||||||
lazy_members: &mut lazy_members,
|
lazy_members,
|
||||||
|
req: &hand,
|
||||||
};
|
};
|
||||||
hand.handle(&get_tree, &tree.into_api(&mut tia_ctx).await).await
|
hand.handle(&get_tree, &tree.into_api(&mut tia_ctx).await).await
|
||||||
})
|
},
|
||||||
.await,
|
|
||||||
api::HostExtReq::SysReq(api::SysReq::SysFwded(fwd)) => {
|
api::HostExtReq::SysReq(api::SysReq::SysFwded(fwd)) => {
|
||||||
let api::SysFwded(sys_id, payload) = fwd;
|
let api::SysFwded(sys_id, payload) = fwd;
|
||||||
let ctx = get_ctx(sys_id).await;
|
let ctx = get_ctx(sys_id).await;
|
||||||
with_ctx(ctx.clone(), async move {
|
|
||||||
let sys = ctx.cted().inst();
|
let sys = ctx.cted().inst();
|
||||||
sys.dyn_request(hand, payload).await
|
sys.dyn_request(hand, payload).await
|
||||||
})
|
|
||||||
.await
|
|
||||||
},
|
},
|
||||||
api::HostExtReq::LexExpr(lex @ api::LexExpr { sys, src, text, pos, id }) =>
|
api::HostExtReq::LexExpr(lex @ api::LexExpr { sys, src, text, pos, id }) => {
|
||||||
with_ctx(get_ctx(sys).await, async move {
|
let sys_ctx = get_ctx(sys).await;
|
||||||
let text = Tok::from_api(text, &i()).await;
|
let text = Tok::from_api(text, &i).await;
|
||||||
let src = Sym::from_api(src, &i()).await;
|
let src = Sym::from_api(src, sys_ctx.i()).await;
|
||||||
let rep = Reporter::new();
|
let rep = Reporter::new();
|
||||||
let expr_store = BorrowedExprStore::new();
|
let expr_store = BorrowedExprStore::new();
|
||||||
let trigger_char = text.chars().nth(pos as usize).unwrap();
|
let trigger_char = text.chars().nth(pos as usize).unwrap();
|
||||||
let ekey_na = ekey_not_applicable().await;
|
let ekey_na = ekey_not_applicable(&i).await;
|
||||||
let ekey_cascade = ekey_cascade().await;
|
let ekey_cascade = ekey_cascade(&i).await;
|
||||||
let lexers = ctx().cted().inst().dyn_lexers();
|
let lexers = sys_ctx.cted().inst().dyn_lexers();
|
||||||
for lx in lexers.iter().filter(|l| char_filter_match(l.char_filter(), trigger_char))
|
for lx in lexers.iter().filter(|l| char_filter_match(l.char_filter(), trigger_char)) {
|
||||||
{
|
let ctx = LexContext {
|
||||||
let ctx = LexContext::new(&expr_store, &text, id, pos, src.clone(), &rep);
|
id,
|
||||||
|
pos,
|
||||||
|
text: &text,
|
||||||
|
src: src.clone(),
|
||||||
|
ctx: sys_ctx.clone(),
|
||||||
|
rep: &rep,
|
||||||
|
exprs: &expr_store,
|
||||||
|
};
|
||||||
match lx.lex(&text[pos as usize..], &ctx).await {
|
match lx.lex(&text[pos as usize..], &ctx).await {
|
||||||
Err(e) if e.any(|e| *e == ekey_na) => continue,
|
Err(e) if e.any(|e| *e == ekey_na) => continue,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
@@ -270,7 +264,7 @@ pub fn extension_init(
|
|||||||
return hand.handle(&lex, &eopt).await;
|
return hand.handle(&lex, &eopt).await;
|
||||||
},
|
},
|
||||||
Ok((s, expr)) => {
|
Ok((s, expr)) => {
|
||||||
let expr = expr.into_api(&mut (), &mut ()).await;
|
let expr = expr.into_api(&mut (), &mut (sys_ctx, &hand)).await;
|
||||||
let pos = (text.len() - s.len()) as u32;
|
let pos = (text.len() - s.len()) as u32;
|
||||||
expr_store.dispose().await;
|
expr_store.dispose().await;
|
||||||
return hand.handle(&lex, &Some(Ok(api::LexedExpr { pos, expr }))).await;
|
return hand.handle(&lex, &Some(Ok(api::LexedExpr { pos, expr }))).await;
|
||||||
@@ -280,46 +274,42 @@ pub fn extension_init(
|
|||||||
writeln!(logger, "Got notified about n/a character '{trigger_char}'");
|
writeln!(logger, "Got notified about n/a character '{trigger_char}'");
|
||||||
expr_store.dispose().await;
|
expr_store.dispose().await;
|
||||||
hand.handle(&lex, &None).await
|
hand.handle(&lex, &None).await
|
||||||
})
|
},
|
||||||
.await,
|
|
||||||
api::HostExtReq::ParseLine(pline) => {
|
api::HostExtReq::ParseLine(pline) => {
|
||||||
let api::ParseLine { module, src, exported, comments, sys, line, idx } = &pline;
|
let api::ParseLine { module, src, exported, comments, sys, line, idx } = &pline;
|
||||||
with_ctx(get_ctx(*sys).await, async {
|
let ctx = get_ctx(*sys).await;
|
||||||
let parsers = ctx().cted().inst().dyn_parsers();
|
let parsers = ctx.cted().inst().dyn_parsers();
|
||||||
let src = Sym::from_api(*src, &i()).await;
|
let src = Sym::from_api(*src, ctx.i()).await;
|
||||||
let comments =
|
let comments =
|
||||||
join_all(comments.iter().map(|c| Comment::from_api(c, src.clone(), &interner)))
|
join_all(comments.iter().map(|c| Comment::from_api(c, src.clone(), &i))).await;
|
||||||
.await;
|
|
||||||
let expr_store = BorrowedExprStore::new();
|
let expr_store = BorrowedExprStore::new();
|
||||||
|
let mut from_api_ctx = (ctx.clone(), &expr_store);
|
||||||
let line: Vec<PTokTree> =
|
let line: Vec<PTokTree> =
|
||||||
ttv_from_api(line, &mut &expr_store, &mut (), &src, &i()).await;
|
ttv_from_api(line, &mut from_api_ctx, &mut (), &src, &i).await;
|
||||||
let snip = Snippet::new(line.first().expect("Empty line"), &line);
|
let snip = Snippet::new(line.first().expect("Empty line"), &line);
|
||||||
let parser = parsers[*idx as usize];
|
let parser = parsers[*idx as usize];
|
||||||
let module = Sym::from_api(*module, &i()).await;
|
let module = Sym::from_api(*module, ctx.i()).await;
|
||||||
let reporter = Reporter::new();
|
let reporter = Reporter::new();
|
||||||
let pctx = ParsCtx::new(module, &reporter);
|
let pctx = ParsCtx::new(ctx.clone(), module, &reporter);
|
||||||
let parse_res = parser.parse(pctx, *exported, comments, snip).await;
|
let parse_res = parser.parse(pctx, *exported, comments, snip).await;
|
||||||
let o_line = match reporter.merge(parse_res) {
|
let o_line = match reporter.merge(parse_res) {
|
||||||
Err(e) => Err(e.to_api()),
|
Err(e) => Err(e.to_api()),
|
||||||
Ok(t) => Ok(linev_into_api(t).await),
|
Ok(t) => Ok(linev_into_api(t, ctx.clone(), &hand).await),
|
||||||
};
|
};
|
||||||
mem::drop(line);
|
|
||||||
expr_store.dispose().await;
|
expr_store.dispose().await;
|
||||||
hand.handle(&pline, &o_line).await
|
hand.handle(&pline, &o_line).await
|
||||||
})
|
|
||||||
.await
|
|
||||||
},
|
},
|
||||||
api::HostExtReq::FetchParsedConst(ref fpc @ api::FetchParsedConst(sys, id)) =>
|
api::HostExtReq::FetchParsedConst(ref fpc @ api::FetchParsedConst { id, sys }) => {
|
||||||
with_ctx(get_ctx(sys).await, async move {
|
let ctx = get_ctx(sys).await;
|
||||||
let cnst = get_const(id).await;
|
let cnst = get_const(id, ctx.clone()).await;
|
||||||
hand.handle(fpc, &cnst.serialize().await).await
|
hand.handle(fpc, &cnst.api_return(ctx, &hand).await).await
|
||||||
})
|
},
|
||||||
.await,
|
|
||||||
api::HostExtReq::AtomReq(atom_req) => {
|
api::HostExtReq::AtomReq(atom_req) => {
|
||||||
let atom = atom_req.get_atom();
|
let atom = atom_req.get_atom();
|
||||||
let atom_req = atom_req.clone();
|
let atom_req = atom_req.clone();
|
||||||
with_atom_record(&get_ctx, atom, async move |nfo, id, buf| {
|
with_atom_record(&get_ctx, atom, async move |nfo, ctx, id, buf| {
|
||||||
let actx = AtomCtx(buf, atom.drop);
|
let actx = AtomCtx(buf, atom.drop, ctx.clone());
|
||||||
|
|
||||||
match &atom_req {
|
match &atom_req {
|
||||||
api::AtomReq::SerializeAtom(ser) => {
|
api::AtomReq::SerializeAtom(ser) => {
|
||||||
let mut buf = enc_vec(&id).await;
|
let mut buf = enc_vec(&id).await;
|
||||||
@@ -338,7 +328,7 @@ pub fn extension_init(
|
|||||||
api::AtomReq::Fwded(fwded) => {
|
api::AtomReq::Fwded(fwded) => {
|
||||||
let api::Fwded(_, key, payload) = &fwded;
|
let api::Fwded(_, key, payload) = &fwded;
|
||||||
let mut reply = Vec::new();
|
let mut reply = Vec::new();
|
||||||
let key = Sym::from_api(*key, &interner).await;
|
let key = Sym::from_api(*key, &i).await;
|
||||||
let some = nfo
|
let some = nfo
|
||||||
.handle_req(
|
.handle_req(
|
||||||
actx,
|
actx,
|
||||||
@@ -350,20 +340,21 @@ pub fn extension_init(
|
|||||||
hand.handle(fwded, &some.then_some(reply)).await
|
hand.handle(fwded, &some.then_some(reply)).await
|
||||||
},
|
},
|
||||||
api::AtomReq::CallRef(call @ api::CallRef(_, arg)) => {
|
api::AtomReq::CallRef(call @ api::CallRef(_, arg)) => {
|
||||||
|
// SAFETY: function calls borrow their argument implicitly
|
||||||
let expr_store = BorrowedExprStore::new();
|
let expr_store = BorrowedExprStore::new();
|
||||||
let expr_handle = ExprHandle::borrowed(*arg, &expr_store);
|
let expr_handle = ExprHandle::borrowed(ctx.clone(), *arg, &expr_store);
|
||||||
let ret = nfo.call_ref(actx, Expr::from_handle(expr_handle.clone())).await;
|
let ret = nfo.call_ref(actx, Expr::from_handle(expr_handle.clone())).await;
|
||||||
let api_expr = ret.serialize().await;
|
expr_handle.drop_one().await;
|
||||||
mem::drop(expr_handle);
|
let api_expr = ret.api_return(ctx.clone(), &hand).await;
|
||||||
expr_store.dispose().await;
|
expr_store.dispose().await;
|
||||||
hand.handle(call, &api_expr).await
|
hand.handle(call, &api_expr).await
|
||||||
},
|
},
|
||||||
api::AtomReq::FinalCall(call @ api::FinalCall(_, arg)) => {
|
api::AtomReq::FinalCall(call @ api::FinalCall(_, arg)) => {
|
||||||
|
// SAFETY: function calls borrow their argument implicitly
|
||||||
let expr_store = BorrowedExprStore::new();
|
let expr_store = BorrowedExprStore::new();
|
||||||
let expr_handle = ExprHandle::borrowed(*arg, &expr_store);
|
let expr_handle = ExprHandle::borrowed(ctx.clone(), *arg, &expr_store);
|
||||||
let ret = nfo.call(actx, Expr::from_handle(expr_handle.clone())).await;
|
let ret = nfo.call(actx, Expr::from_handle(expr_handle.clone())).await;
|
||||||
let api_expr = ret.serialize().await;
|
let api_expr = ret.api_return(ctx.clone(), &hand).await;
|
||||||
mem::drop(expr_handle);
|
|
||||||
expr_store.dispose().await;
|
expr_store.dispose().await;
|
||||||
hand.handle(call, &api_expr).await
|
hand.handle(call, &api_expr).await
|
||||||
},
|
},
|
||||||
@@ -372,7 +363,7 @@ pub fn extension_init(
|
|||||||
Ok(opt) => match opt {
|
Ok(opt) => match opt {
|
||||||
None => hand.handle(cmd, &Ok(api::NextStep::Halt)).await,
|
None => hand.handle(cmd, &Ok(api::NextStep::Halt)).await,
|
||||||
Some(cont) => {
|
Some(cont) => {
|
||||||
let cont = cont.serialize().await;
|
let cont = cont.api_return(ctx.clone(), &hand).await;
|
||||||
hand.handle(cmd, &Ok(api::NextStep::Continue(cont))).await
|
hand.handle(cmd, &Ok(api::NextStep::Continue(cont))).await
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -387,12 +378,12 @@ pub fn extension_init(
|
|||||||
let ctx = get_ctx(*sys).await;
|
let ctx = get_ctx(*sys).await;
|
||||||
// SAFETY: deserialization implicitly grants ownership to previously owned exprs
|
// SAFETY: deserialization implicitly grants ownership to previously owned exprs
|
||||||
let refs = (refs.iter())
|
let refs = (refs.iter())
|
||||||
.map(|tk| Expr::from_handle(ExprHandle::deserialize(*tk)))
|
.map(|tk| Expr::from_handle(ExprHandle::deserialize(ctx.clone(), *tk)))
|
||||||
.collect_vec();
|
.collect_vec();
|
||||||
let id = AtomTypeId::decode(Pin::new(&mut read)).await;
|
let id = AtomTypeId::decode(Pin::new(&mut read)).await;
|
||||||
let inst = ctx.cted().inst();
|
let inst = ctx.cted().inst();
|
||||||
let nfo = atom_by_idx(inst.card(), id).expect("Deserializing atom with invalid ID");
|
let nfo = atom_by_idx(inst.card(), id).expect("Deserializing atom with invalid ID");
|
||||||
hand.handle(&deser, &nfo.deserialize(read, &refs).await).await
|
hand.handle(&deser, &nfo.deserialize(ctx.clone(), read, &refs).await).await
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ use orchid_base::reqnot::Requester;
|
|||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::atom::ForeignAtom;
|
use crate::atom::ForeignAtom;
|
||||||
use crate::context::{ctx, i};
|
|
||||||
use crate::gen_expr::{GExpr, GExprKind};
|
use crate::gen_expr::{GExpr, GExprKind};
|
||||||
|
use crate::system::SysCtx;
|
||||||
|
|
||||||
pub struct BorrowedExprStore(RefCell<Option<HashSet<Rc<ExprHandle>>>>);
|
pub struct BorrowedExprStore(RefCell<Option<HashSet<Rc<ExprHandle>>>>);
|
||||||
impl BorrowedExprStore {
|
impl BorrowedExprStore {
|
||||||
@@ -22,7 +22,7 @@ impl BorrowedExprStore {
|
|||||||
pub async fn dispose(self) {
|
pub async fn dispose(self) {
|
||||||
let elements = self.0.borrow_mut().take().unwrap();
|
let elements = self.0.borrow_mut().take().unwrap();
|
||||||
for handle in elements {
|
for handle in elements {
|
||||||
handle.on_borrow_expire().await
|
handle.drop_one().await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -34,67 +34,54 @@ impl Drop for BorrowedExprStore {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(destructure, PartialEq, Eq, Hash)]
|
#[derive(destructure)]
|
||||||
pub struct ExprHandle(api::ExprTicket);
|
pub struct ExprHandle {
|
||||||
|
pub tk: api::ExprTicket,
|
||||||
|
pub ctx: SysCtx,
|
||||||
|
}
|
||||||
impl ExprHandle {
|
impl ExprHandle {
|
||||||
/// Do not signal to take ownership of the expr. Instead, the
|
/// This function does not signal to take ownership of the expr.
|
||||||
/// [BorrowedExprStore] signifies the lifetime of the borrow, and when it is
|
pub fn borrowed(ctx: SysCtx, tk: api::ExprTicket, store: &BorrowedExprStore) -> Rc<Self> {
|
||||||
/// freed, it signals to take ownership of any exprs that ended up outliving
|
let this = Rc::new(Self { ctx, tk });
|
||||||
/// it. It is used to receive exprs sent via [ExprHandle::ticket] as an
|
|
||||||
/// optimization over [ExprHandle::from_ticket]
|
|
||||||
pub fn borrowed(tk: api::ExprTicket, store: &BorrowedExprStore) -> Rc<Self> {
|
|
||||||
let this = Rc::new(Self(tk));
|
|
||||||
store.0.borrow_mut().as_mut().unwrap().insert(this.clone());
|
store.0.borrow_mut().as_mut().unwrap().insert(this.clone());
|
||||||
this
|
this
|
||||||
}
|
}
|
||||||
/// This function takes over the loose reference pre-created via
|
pub fn deserialize(ctx: SysCtx, tk: api::ExprTicket) -> Rc<Self> { Rc::new(Self { ctx, tk }) }
|
||||||
/// [ExprHandle::serialize] in the sender. It must therefore pair up with a
|
pub fn get_ctx(&self) -> SysCtx { self.ctx.clone() }
|
||||||
/// corresponding call to that function.
|
/// Drop one instance of the handle silently; if it's the last one, do
|
||||||
pub fn deserialize(tk: api::ExprTicket) -> Rc<Self> { Rc::new(Self(tk)) }
|
/// nothing, otherwise send an Acquire
|
||||||
/// This function takes ownership of a borrowed expr sent via
|
pub async fn drop_one(self: Rc<Self>) {
|
||||||
/// [ExprHandle::ticket] and signals immediately to record that ownership. It
|
if let Err(rc) = Rc::try_unwrap(self) {
|
||||||
/// is used in place of [ExprHandle::borrowed] when it's impractical to
|
rc.ctx.reqnot().notify(api::Acquire(rc.ctx.sys_id(), rc.tk)).await
|
||||||
/// determine how long the borrow will live.
|
}
|
||||||
///
|
|
||||||
/// # Safety
|
|
||||||
///
|
|
||||||
/// You need to ensure that the [api::Acquire] sent by this function arrives
|
|
||||||
/// before the borrow expires, so you still need a borrow delimited by some
|
|
||||||
/// message you will send in the future.
|
|
||||||
pub async fn from_ticket(tk: api::ExprTicket) -> Rc<Self> {
|
|
||||||
let store = BorrowedExprStore::new();
|
|
||||||
let expr = Self::borrowed(tk, &store);
|
|
||||||
store.dispose().await;
|
|
||||||
expr
|
|
||||||
}
|
}
|
||||||
/// The raw ticket used in messages. If you want to transfer ownership via the
|
|
||||||
/// ticket, you should use [ExprHandle::serialize]. Only send this if you want
|
|
||||||
/// to lend the expr, and you expect the receiver to use
|
|
||||||
/// [ExprHandle::borrowed] or [ExprHandle::from_ticket]
|
|
||||||
pub fn ticket(&self) -> api::ExprTicket { self.0 }
|
|
||||||
async fn send_acq(&self) { ctx().reqnot().notify(api::Acquire(ctx().sys_id(), self.0)).await }
|
|
||||||
/// If this is the last one reference, do nothing, otherwise send an Acquire
|
|
||||||
pub async fn on_borrow_expire(self: Rc<Self>) { self.serialize().await; }
|
|
||||||
/// Drop the handle and get the ticket without a release notification.
|
/// Drop the handle and get the ticket without a release notification.
|
||||||
/// Use this with messages that imply ownership transfer. This function is
|
/// Use this with messages that imply ownership transfer. This function is
|
||||||
/// safe because abusing it is a memory leak.
|
/// safe because abusing it is a memory leak.
|
||||||
pub async fn serialize(self: Rc<Self>) -> api::ExprTicket {
|
pub fn serialize(self) -> api::ExprTicket { self.destructure().0 }
|
||||||
match Rc::try_unwrap(self) {
|
|
||||||
Err(rc) => {
|
|
||||||
rc.send_acq().await;
|
|
||||||
rc.0
|
|
||||||
},
|
|
||||||
Ok(hand) => hand.destructure().0,
|
|
||||||
}
|
}
|
||||||
|
impl Eq for ExprHandle {}
|
||||||
|
impl PartialEq for ExprHandle {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.ctx.sys_id() == other.ctx.sys_id() && self.tk == other.tk
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Hash for ExprHandle {
|
||||||
|
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||||
|
self.ctx.sys_id().hash(state);
|
||||||
|
self.tk.hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl fmt::Debug for ExprHandle {
|
impl fmt::Debug for ExprHandle {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ExprHandle({})", self.0.0) }
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "ExprHandle({})", self.tk.0)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
impl Drop for ExprHandle {
|
impl Drop for ExprHandle {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let notif = api::Release(ctx().sys_id(), self.0);
|
let notif = api::Release(self.ctx.sys_id(), self.tk);
|
||||||
ctx().spawn(async move { ctx().reqnot().clone().notify(notif).await })
|
let reqnot = self.ctx.reqnot().clone();
|
||||||
|
self.ctx.spawner()(Box::pin(async move { reqnot.notify(notif).await }))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -105,23 +92,19 @@ pub struct Expr {
|
|||||||
}
|
}
|
||||||
impl Expr {
|
impl Expr {
|
||||||
pub fn from_handle(handle: Rc<ExprHandle>) -> Self { Self { handle, data: Rc::default() } }
|
pub fn from_handle(handle: Rc<ExprHandle>) -> Self { Self { handle, data: Rc::default() } }
|
||||||
pub fn from_data(handle: Rc<ExprHandle>, d: ExprData) -> Self {
|
pub fn new(handle: Rc<ExprHandle>, d: ExprData) -> Self {
|
||||||
Self { handle, data: Rc::new(OnceCell::from(d)) }
|
Self { handle, data: Rc::new(OnceCell::from(d)) }
|
||||||
}
|
}
|
||||||
/// Creates an instance without incrementing the reference count. This is
|
|
||||||
/// only safe to be called on a reference created with an [Expr::serialize]
|
|
||||||
/// call which created the loose reference it can take ownership of.
|
|
||||||
pub async fn deserialize(tk: api::ExprTicket) -> Self {
|
|
||||||
Self::from_handle(ExprHandle::deserialize(tk))
|
|
||||||
}
|
|
||||||
pub async fn data(&self) -> &ExprData {
|
pub async fn data(&self) -> &ExprData {
|
||||||
(self.data.get_or_init(async {
|
(self.data.get_or_init(async {
|
||||||
let details = ctx().reqnot().request(api::Inspect { target: self.handle.ticket() }).await;
|
let details = self.handle.ctx.reqnot().request(api::Inspect { target: self.handle.tk }).await;
|
||||||
let pos = Pos::from_api(&details.location, &i()).await;
|
let pos = Pos::from_api(&details.location, self.handle.ctx.i()).await;
|
||||||
let kind = match details.kind {
|
let kind = match details.kind {
|
||||||
api::InspectedKind::Atom(a) =>
|
api::InspectedKind::Atom(a) =>
|
||||||
ExprKind::Atom(ForeignAtom::new(self.handle.clone(), a, pos.clone())),
|
ExprKind::Atom(ForeignAtom::new(self.handle.clone(), a, pos.clone())),
|
||||||
api::InspectedKind::Bottom(b) => ExprKind::Bottom(OrcErrv::from_api(&b, &i()).await),
|
api::InspectedKind::Bottom(b) =>
|
||||||
|
ExprKind::Bottom(OrcErrv::from_api(&b, self.handle.ctx.i()).await),
|
||||||
api::InspectedKind::Opaque => ExprKind::Opaque,
|
api::InspectedKind::Opaque => ExprKind::Opaque,
|
||||||
};
|
};
|
||||||
ExprData { pos, kind }
|
ExprData { pos, kind }
|
||||||
@@ -135,22 +118,20 @@ impl Expr {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn handle(&self) -> Rc<ExprHandle> { self.handle.clone() }
|
pub fn handle(&self) -> Rc<ExprHandle> { self.handle.clone() }
|
||||||
|
pub fn ctx(&self) -> SysCtx { self.handle.ctx.clone() }
|
||||||
|
|
||||||
pub fn slot(&self) -> GExpr {
|
pub fn slot(&self) -> GExpr {
|
||||||
GExpr { pos: Pos::SlotTarget, kind: GExprKind::Slot(self.clone()) }
|
GExpr { pos: Pos::SlotTarget, kind: GExprKind::Slot(self.clone()) }
|
||||||
}
|
}
|
||||||
/// Increments the refcount to ensure that the ticket remains valid even if
|
|
||||||
/// the handle is freed. To avoid a leak, [Expr::deserialize] must eventually
|
|
||||||
/// be called.
|
|
||||||
pub async fn serialize(self) -> api::ExprTicket { self.handle.serialize().await }
|
|
||||||
}
|
}
|
||||||
impl Format for Expr {
|
impl Format for Expr {
|
||||||
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||||
match &self.data().await.kind {
|
match &self.data().await.kind {
|
||||||
ExprKind::Opaque => "OPAQUE".to_string().into(),
|
ExprKind::Opaque => "OPAQUE".to_string().into(),
|
||||||
ExprKind::Bottom(b) => format!("Bottom({b})").into(),
|
ExprKind::Bottom(b) => format!("Bottom({b})").into(),
|
||||||
ExprKind::Atom(a) =>
|
ExprKind::Atom(a) => FmtUnit::from_api(
|
||||||
FmtUnit::from_api(&ctx().reqnot().request(api::ExtAtomPrint(a.atom.clone())).await),
|
&self.handle.ctx.reqnot().request(api::ExtAtomPrint(a.atom.clone())).await,
|
||||||
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,11 +19,11 @@ use trait_set::trait_set;
|
|||||||
|
|
||||||
use crate::atom::Atomic;
|
use crate::atom::Atomic;
|
||||||
use crate::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
|
use crate::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
|
||||||
use crate::context::{SysCtxEntry, ctx, i};
|
|
||||||
use crate::conv::ToExpr;
|
use crate::conv::ToExpr;
|
||||||
use crate::coroutine_exec::{ExecHandle, exec};
|
use crate::coroutine_exec::{ExecHandle, exec};
|
||||||
use crate::expr::Expr;
|
use crate::expr::Expr;
|
||||||
use crate::gen_expr::GExpr;
|
use crate::gen_expr::GExpr;
|
||||||
|
use crate::system::{SysCtx, SysCtxEntry};
|
||||||
|
|
||||||
trait_set! {
|
trait_set! {
|
||||||
trait FunCB = Fn(Vec<Expr>) -> LocalBoxFuture<'static, OrcRes<GExpr>> + 'static;
|
trait FunCB = Fn(Vec<Expr>) -> LocalBoxFuture<'static, OrcRes<GExpr>> + 'static;
|
||||||
@@ -43,11 +43,14 @@ struct FunRecord {
|
|||||||
fun: Rc<dyn FunCB>,
|
fun: Rc<dyn FunCB>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_args<I, O, F: ExprFunc<I, O>>(f: F) -> FunRecord {
|
async fn process_args<I, O, F: ExprFunc<I, O>>(
|
||||||
|
debug: impl AsRef<str> + Clone + 'static,
|
||||||
|
f: F,
|
||||||
|
) -> FunRecord {
|
||||||
let argtyps = F::argtyps();
|
let argtyps = F::argtyps();
|
||||||
let fun = Rc::new(move |v: Vec<Expr>| {
|
let fun = Rc::new(move |v: Vec<Expr>| {
|
||||||
clone!(f, v mut);
|
clone!(f, v mut);
|
||||||
exec(async move |mut hand| {
|
exec(debug.clone(), async move |mut hand| {
|
||||||
let mut norm_args = Vec::with_capacity(v.len());
|
let mut norm_args = Vec::with_capacity(v.len());
|
||||||
for (expr, typ) in v.into_iter().zip(argtyps) {
|
for (expr, typ) in v.into_iter().zip(argtyps) {
|
||||||
if *typ != TypeId::of::<Expr>() {
|
if *typ != TypeId::of::<Expr>() {
|
||||||
@@ -74,14 +77,13 @@ pub(crate) struct Fun {
|
|||||||
record: FunRecord,
|
record: FunRecord,
|
||||||
}
|
}
|
||||||
impl Fun {
|
impl Fun {
|
||||||
pub async fn new<I, O, F: ExprFunc<I, O>>(path: Sym, f: F) -> Self {
|
pub async fn new<I, O, F: ExprFunc<I, O>>(path: Sym, ctx: SysCtx, f: F) -> Self {
|
||||||
let ctx = ctx();
|
|
||||||
let funs: &FunsCtx = ctx.get_or_default();
|
let funs: &FunsCtx = ctx.get_or_default();
|
||||||
let mut fung = funs.0.lock().await;
|
let mut fung = funs.0.lock().await;
|
||||||
let record = if let Some(record) = fung.get(&path) {
|
let record = if let Some(record) = fung.get(&path) {
|
||||||
record.clone()
|
record.clone()
|
||||||
} else {
|
} else {
|
||||||
let record = process_args(f);
|
let record = process_args(path.to_string(), f).await;
|
||||||
fung.insert(path.clone(), record.clone());
|
fung.insert(path.clone(), record.clone());
|
||||||
record
|
record
|
||||||
};
|
};
|
||||||
@@ -99,19 +101,20 @@ impl OwnedAtom for Fun {
|
|||||||
async fn call_ref(&self, arg: Expr) -> GExpr {
|
async fn call_ref(&self, arg: Expr) -> GExpr {
|
||||||
let new_args = self.args.iter().cloned().chain([arg]).collect_vec();
|
let new_args = self.args.iter().cloned().chain([arg]).collect_vec();
|
||||||
if new_args.len() == self.record.argtyps.len() {
|
if new_args.len() == self.record.argtyps.len() {
|
||||||
(self.record.fun)(new_args).await.to_gen().await
|
(self.record.fun)(new_args).await.to_expr().await
|
||||||
} else {
|
} else {
|
||||||
Self { args: new_args, record: self.record.clone(), path: self.path.clone() }.to_gen().await
|
Self { args: new_args, record: self.record.clone(), path: self.path.clone() }.to_expr().await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await }
|
async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await }
|
||||||
async fn serialize(&self, write: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
|
async fn serialize(&self, _: SysCtx, write: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
|
||||||
self.path.to_api().encode(write).await;
|
self.path.to_api().encode(write).await;
|
||||||
self.args.clone()
|
self.args.clone()
|
||||||
}
|
}
|
||||||
async fn deserialize(mut ds_cx: impl DeserializeCtx, args: Self::Refs) -> Self {
|
async fn deserialize(mut ctx: impl DeserializeCtx, args: Self::Refs) -> Self {
|
||||||
let path = Sym::from_api(ds_cx.decode().await, &i()).await;
|
let sys = ctx.sys();
|
||||||
let record = (ctx().get::<FunsCtx>().0.lock().await.get(&path))
|
let path = Sym::from_api(ctx.decode().await, sys.i()).await;
|
||||||
|
let record = (sys.get::<FunsCtx>().0.lock().await.get(&path))
|
||||||
.expect("Function missing during deserialization")
|
.expect("Function missing during deserialization")
|
||||||
.clone();
|
.clone();
|
||||||
Self { args, path, record }
|
Self { args, path, record }
|
||||||
@@ -131,8 +134,11 @@ pub struct Lambda {
|
|||||||
record: FunRecord,
|
record: FunRecord,
|
||||||
}
|
}
|
||||||
impl Lambda {
|
impl Lambda {
|
||||||
pub fn new<I, O, F: ExprFunc<I, O>>(f: F) -> Self {
|
pub async fn new<I, O, F: ExprFunc<I, O>>(
|
||||||
Self { args: vec![], record: process_args(f) }
|
debug: impl AsRef<str> + Clone + 'static,
|
||||||
|
f: F,
|
||||||
|
) -> Self {
|
||||||
|
Self { args: vec![], record: process_args(debug, f).await }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Atomic for Lambda {
|
impl Atomic for Lambda {
|
||||||
@@ -145,9 +151,9 @@ impl OwnedAtom for Lambda {
|
|||||||
async fn call_ref(&self, arg: Expr) -> GExpr {
|
async fn call_ref(&self, arg: Expr) -> GExpr {
|
||||||
let new_args = self.args.iter().cloned().chain([arg]).collect_vec();
|
let new_args = self.args.iter().cloned().chain([arg]).collect_vec();
|
||||||
if new_args.len() == self.record.argtyps.len() {
|
if new_args.len() == self.record.argtyps.len() {
|
||||||
(self.record.fun)(new_args).await.to_gen().await
|
(self.record.fun)(new_args).await.to_expr().await
|
||||||
} else {
|
} else {
|
||||||
Self { args: new_args, record: self.record.clone() }.to_gen().await
|
Self { args: new_args, record: self.record.clone() }.to_expr().await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await }
|
async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await }
|
||||||
@@ -170,7 +176,7 @@ mod expr_func_derives {
|
|||||||
impl<
|
impl<
|
||||||
$($t: TryFromExpr + 'static, )*
|
$($t: TryFromExpr + 'static, )*
|
||||||
Out: ToExpr,
|
Out: ToExpr,
|
||||||
Func: AsyncFn($($t,)*) -> Out + Clone + 'static
|
Func: AsyncFn($($t,)*) -> Out + Clone + Send + Sync + 'static
|
||||||
> ExprFunc<($($t,)*), Out> for Func {
|
> ExprFunc<($($t,)*), Out> for Func {
|
||||||
fn argtyps() -> &'static [TypeId] {
|
fn argtyps() -> &'static [TypeId] {
|
||||||
static STORE: OnceLock<Vec<TypeId>> = OnceLock::new();
|
static STORE: OnceLock<Vec<TypeId>> = OnceLock::new();
|
||||||
@@ -179,7 +185,7 @@ mod expr_func_derives {
|
|||||||
async fn apply<'a>(&self, _: ExecHandle<'a>, v: Vec<Expr>) -> OrcRes<GExpr> {
|
async fn apply<'a>(&self, _: ExecHandle<'a>, v: Vec<Expr>) -> OrcRes<GExpr> {
|
||||||
assert_eq!(v.len(), Self::argtyps().len(), "Arity mismatch");
|
assert_eq!(v.len(), Self::argtyps().len(), "Arity mismatch");
|
||||||
let [$([< $t:lower >],)*] = v.try_into().unwrap_or_else(|_| panic!("Checked above"));
|
let [$([< $t:lower >],)*] = v.try_into().unwrap_or_else(|_| panic!("Checked above"));
|
||||||
Ok(self($($t::try_from_expr([< $t:lower >]).await?,)*).await.to_gen().await)
|
Ok(self($($t::try_from_expr([< $t:lower >]).await?,)*).await.to_expr().await)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
use std::mem;
|
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
@@ -6,13 +5,13 @@ use orchid_base::error::{OrcErr, OrcErrv};
|
|||||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||||
use orchid_base::location::Pos;
|
use orchid_base::location::Pos;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::reqnot::Requester;
|
use orchid_base::reqnot::ReqHandlish;
|
||||||
use orchid_base::{match_mapping, tl_cache};
|
use orchid_base::{match_mapping, tl_cache};
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::atom::{AtomFactory, ToAtom};
|
use crate::atom::{AtomFactory, ToAtom};
|
||||||
use crate::context::ctx;
|
|
||||||
use crate::expr::Expr;
|
use crate::expr::Expr;
|
||||||
|
use crate::system::SysCtx;
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct GExpr {
|
pub struct GExpr {
|
||||||
@@ -20,32 +19,25 @@ pub struct GExpr {
|
|||||||
pub pos: Pos,
|
pub pos: Pos,
|
||||||
}
|
}
|
||||||
impl GExpr {
|
impl GExpr {
|
||||||
/// Release notifications will not be sent for the slots. Use this with
|
pub async fn api_return(self, ctx: SysCtx, hand: &impl ReqHandlish) -> api::Expression {
|
||||||
/// messages that imply ownership transfer
|
|
||||||
pub async fn serialize(self) -> api::Expression {
|
|
||||||
if let GExprKind::Slot(ex) = self.kind {
|
if let GExprKind::Slot(ex) = self.kind {
|
||||||
let hand = ex.handle();
|
hand.defer_drop(ex.handle());
|
||||||
mem::drop(ex);
|
|
||||||
api::Expression {
|
api::Expression {
|
||||||
location: api::Location::SlotTarget,
|
location: api::Location::SlotTarget,
|
||||||
// an instance is leaked here, we must take ownership of it when we receive this
|
kind: api::ExpressionKind::Slot(ex.handle().tk),
|
||||||
kind: api::ExpressionKind::Slot(hand.serialize().await),
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
api::Expression {
|
api::Expression {
|
||||||
location: api::Location::Inherit,
|
location: api::Location::Inherit,
|
||||||
kind: self.kind.serialize().boxed_local().await,
|
kind: self.kind.api_return(ctx, hand).boxed_local().await,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn at(self, pos: Pos) -> Self { GExpr { pos, kind: self.kind } }
|
pub fn at(self, pos: Pos) -> Self { GExpr { pos, kind: self.kind } }
|
||||||
pub async fn create(self) -> Expr {
|
|
||||||
Expr::deserialize(ctx().reqnot().request(api::Create(self.serialize().await)).await).await
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
impl Format for GExpr {
|
impl Format for GExpr {
|
||||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||||
self.kind.print(c).boxed_local().await
|
self.kind.print(c).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -61,21 +53,21 @@ pub enum GExprKind {
|
|||||||
Bottom(OrcErrv),
|
Bottom(OrcErrv),
|
||||||
}
|
}
|
||||||
impl GExprKind {
|
impl GExprKind {
|
||||||
pub async fn serialize(self) -> api::ExpressionKind {
|
pub async fn api_return(self, ctx: SysCtx, hand: &impl ReqHandlish) -> api::ExpressionKind {
|
||||||
match_mapping!(self, Self => api::ExpressionKind {
|
match_mapping!(self, Self => api::ExpressionKind {
|
||||||
Call(
|
Call(
|
||||||
f => Box::new(f.serialize().await),
|
f => Box::new(f.api_return(ctx.clone(), hand).await),
|
||||||
x => Box::new(x.serialize().await)
|
x => Box::new(x.api_return(ctx, hand).await)
|
||||||
),
|
),
|
||||||
Seq(
|
Seq(
|
||||||
a => Box::new(a.serialize().await),
|
a => Box::new(a.api_return(ctx.clone(), hand).await),
|
||||||
b => Box::new(b.serialize().await)
|
b => Box::new(b.api_return(ctx, hand).await)
|
||||||
),
|
),
|
||||||
Lambda(arg, body => Box::new(body.serialize().await)),
|
Lambda(arg, body => Box::new(body.api_return(ctx, hand).await)),
|
||||||
Arg(arg),
|
Arg(arg),
|
||||||
Const(name.to_api()),
|
Const(name.to_api()),
|
||||||
Bottom(err.to_api()),
|
Bottom(err.to_api()),
|
||||||
NewAtom(fac.clone().build().await),
|
NewAtom(fac.clone().build(ctx).await),
|
||||||
} {
|
} {
|
||||||
Self::Slot(_) => panic!("processed elsewhere")
|
Self::Slot(_) => panic!("processed elsewhere")
|
||||||
})
|
})
|
||||||
@@ -109,7 +101,7 @@ fn inherit(kind: GExprKind) -> GExpr { GExpr { pos: Pos::Inherit, kind } }
|
|||||||
pub fn sym_ref(path: Sym) -> GExpr { inherit(GExprKind::Const(path)) }
|
pub fn sym_ref(path: Sym) -> GExpr { inherit(GExprKind::Const(path)) }
|
||||||
pub fn atom<A: ToAtom>(atom: A) -> GExpr { inherit(GExprKind::NewAtom(atom.to_atom_factory())) }
|
pub fn atom<A: ToAtom>(atom: A) -> GExpr { inherit(GExprKind::NewAtom(atom.to_atom_factory())) }
|
||||||
|
|
||||||
pub fn seq(deps: impl IntoIterator<Item = GExpr>, val: GExpr) -> GExpr {
|
pub fn seq(ops: impl IntoIterator<Item = GExpr>) -> GExpr {
|
||||||
fn recur(mut ops: impl Iterator<Item = GExpr>) -> Option<GExpr> {
|
fn recur(mut ops: impl Iterator<Item = GExpr>) -> Option<GExpr> {
|
||||||
let op = ops.next()?;
|
let op = ops.next()?;
|
||||||
Some(match recur(ops) {
|
Some(match recur(ops) {
|
||||||
@@ -117,15 +109,19 @@ pub fn seq(deps: impl IntoIterator<Item = GExpr>, val: GExpr) -> GExpr {
|
|||||||
Some(rec) => inherit(GExprKind::Seq(Box::new(op), Box::new(rec))),
|
Some(rec) => inherit(GExprKind::Seq(Box::new(op), Box::new(rec))),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
recur(deps.into_iter().chain([val])).expect("Empty list provided to seq!")
|
recur(ops.into_iter()).expect("Empty list provided to seq!")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn arg(n: u64) -> GExpr { inherit(GExprKind::Arg(n)) }
|
pub fn arg(n: u64) -> GExpr { inherit(GExprKind::Arg(n)) }
|
||||||
|
|
||||||
pub fn lambda(n: u64, [b]: [GExpr; 1]) -> GExpr { inherit(GExprKind::Lambda(n, Box::new(b))) }
|
pub fn lambda(n: u64, b: impl IntoIterator<Item = GExpr>) -> GExpr {
|
||||||
|
inherit(GExprKind::Lambda(n, Box::new(call(b))))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn call(f: GExpr, argv: impl IntoIterator<Item = GExpr>) -> GExpr {
|
pub fn call(v: impl IntoIterator<Item = GExpr>) -> GExpr {
|
||||||
(argv.into_iter()).fold(f, |f, x| inherit(GExprKind::Call(Box::new(f), Box::new(x))))
|
v.into_iter()
|
||||||
|
.reduce(|f, x| inherit(GExprKind::Call(Box::new(f), Box::new(x))))
|
||||||
|
.expect("Empty call expression")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bot(ev: impl IntoIterator<Item = OrcErr>) -> GExpr {
|
pub fn bot(ev: impl IntoIterator<Item = OrcErr>) -> GExpr {
|
||||||
|
|||||||
7
orchid-extension/src/interner.rs
Normal file
7
orchid-extension/src/interner.rs
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
use orchid_base::interner::{ApiStrTok, ApiStrvTok, IStr};
|
||||||
|
|
||||||
|
pub struct ExtIStr(ApiStrTok, Rc<String>);
|
||||||
|
impl Deref for ExtIStr {}
|
||||||
|
pub struct ExtIStrv(ApiStrvTok, Rc<Vec<IStr>>);
|
||||||
|
|
||||||
|
pub struct ExtInterner {}
|
||||||
@@ -4,7 +4,7 @@ use std::ops::RangeInclusive;
|
|||||||
|
|
||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
use futures::future::LocalBoxFuture;
|
use futures::future::LocalBoxFuture;
|
||||||
use orchid_base::error::{OrcErrv, OrcRes, Reporter, mk_errv};
|
use orchid_base::error::{OrcErrv, OrcRes, Reporter, mk_err};
|
||||||
use orchid_base::interner::{Interner, Tok};
|
use orchid_base::interner::{Interner, Tok};
|
||||||
use orchid_base::location::{Pos, SrcRange};
|
use orchid_base::location::{Pos, SrcRange};
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
@@ -12,48 +12,38 @@ use orchid_base::parse::ParseCtx;
|
|||||||
use orchid_base::reqnot::Requester;
|
use orchid_base::reqnot::Requester;
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::context::{ctx, i};
|
|
||||||
use crate::expr::BorrowedExprStore;
|
use crate::expr::BorrowedExprStore;
|
||||||
use crate::parser::PTokTree;
|
use crate::parser::PTokTree;
|
||||||
|
use crate::system::SysCtx;
|
||||||
use crate::tree::GenTokTree;
|
use crate::tree::GenTokTree;
|
||||||
|
|
||||||
pub async fn ekey_cascade() -> Tok<String> {
|
pub async fn ekey_cascade(i: &Interner) -> IStr {
|
||||||
i().i("An error cascading from a recursive call").await
|
i.i("An error cascading from a recursive call").await
|
||||||
}
|
}
|
||||||
pub async fn ekey_not_applicable() -> Tok<String> {
|
pub async fn ekey_not_applicable(i: &Interner) -> IStr {
|
||||||
i().i("Pseudo-error to communicate that the current branch in a dispatch doesn't apply").await
|
i.i("Pseudo-error to communicate that the current branch in a dispatch doesn't apply").await
|
||||||
}
|
}
|
||||||
const MSG_INTERNAL_ERROR: &str = "This error is a sentinel for the extension library.\
|
const MSG_INTERNAL_ERROR: &str = "This error is a sentinel for the extension library.\
|
||||||
it should not be emitted by the extension.";
|
it should not be emitted by the extension.";
|
||||||
|
|
||||||
pub async fn err_cascade() -> OrcErrv {
|
pub async fn err_cascade(i: &Interner) -> OrcErrv {
|
||||||
mk_errv(ekey_cascade().await, MSG_INTERNAL_ERROR, [Pos::None])
|
mk_err(ekey_cascade(i).await, MSG_INTERNAL_ERROR, [Pos::None])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn err_not_applicable() -> OrcErrv {
|
pub async fn err_not_applicable(i: &Interner) -> OrcErrv {
|
||||||
mk_errv(ekey_not_applicable().await, MSG_INTERNAL_ERROR, [Pos::None])
|
mk_err(ekey_not_applicable(i).await, MSG_INTERNAL_ERROR, [Pos::None])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct LexContext<'a> {
|
pub struct LexContext<'a> {
|
||||||
pub(crate) exprs: &'a BorrowedExprStore,
|
pub(crate) exprs: &'a BorrowedExprStore,
|
||||||
pub text: &'a Tok<String>,
|
pub ctx: SysCtx,
|
||||||
|
pub text: &'a IStr,
|
||||||
pub id: api::ParsId,
|
pub id: api::ParsId,
|
||||||
pub pos: u32,
|
pub pos: u32,
|
||||||
i: Interner,
|
|
||||||
pub(crate) src: Sym,
|
pub(crate) src: Sym,
|
||||||
pub(crate) rep: &'a Reporter,
|
pub(crate) rep: &'a Reporter,
|
||||||
}
|
}
|
||||||
impl<'a> LexContext<'a> {
|
impl<'a> LexContext<'a> {
|
||||||
pub fn new(
|
|
||||||
exprs: &'a BorrowedExprStore,
|
|
||||||
text: &'a Tok<String>,
|
|
||||||
id: api::ParsId,
|
|
||||||
pos: u32,
|
|
||||||
src: Sym,
|
|
||||||
rep: &'a Reporter,
|
|
||||||
) -> Self {
|
|
||||||
Self { exprs, i: i(), id, pos, rep, src, text }
|
|
||||||
}
|
|
||||||
pub fn src(&self) -> &Sym { &self.src }
|
pub fn src(&self) -> &Sym { &self.src }
|
||||||
/// This function returns [PTokTree] because it can never return
|
/// This function returns [PTokTree] because it can never return
|
||||||
/// [orchid_base::tree::Token::NewExpr]. You can use
|
/// [orchid_base::tree::Token::NewExpr]. You can use
|
||||||
@@ -61,10 +51,17 @@ impl<'a> LexContext<'a> {
|
|||||||
/// for embedding in the return value.
|
/// for embedding in the return value.
|
||||||
pub async fn recurse(&self, tail: &'a str) -> OrcRes<(&'a str, PTokTree)> {
|
pub async fn recurse(&self, tail: &'a str) -> OrcRes<(&'a str, PTokTree)> {
|
||||||
let start = self.pos(tail);
|
let start = self.pos(tail);
|
||||||
let Some(lx) = ctx().reqnot().request(api::SubLex { pos: start, id: self.id }).await else {
|
let Some(lx) = self.ctx.reqnot().request(api::SubLex { pos: start, id: self.id }).await else {
|
||||||
return Err(err_cascade().await);
|
return Err(err_cascade(self.ctx.i()).await);
|
||||||
};
|
};
|
||||||
let tree = PTokTree::from_api(&lx.tree, &mut { self.exprs }, &mut (), &self.src, &i()).await;
|
let tree = PTokTree::from_api(
|
||||||
|
&lx.tree,
|
||||||
|
&mut (self.ctx.clone(), self.exprs),
|
||||||
|
&mut (),
|
||||||
|
&self.src,
|
||||||
|
self.ctx.i(),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
Ok((&self.text[lx.pos as usize..], tree))
|
Ok((&self.text[lx.pos as usize..], tree))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -78,7 +75,7 @@ impl<'a> LexContext<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl ParseCtx for LexContext<'_> {
|
impl ParseCtx for LexContext<'_> {
|
||||||
fn i(&self) -> &Interner { &self.i }
|
fn i(&self) -> &Interner { self.ctx.i() }
|
||||||
fn rep(&self) -> &Reporter { self.rep }
|
fn rep(&self) -> &Reporter { self.rep }
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -86,7 +83,7 @@ pub trait Lexer: Send + Sync + Sized + Default + 'static {
|
|||||||
const CHAR_FILTER: &'static [RangeInclusive<char>];
|
const CHAR_FILTER: &'static [RangeInclusive<char>];
|
||||||
fn lex<'a>(
|
fn lex<'a>(
|
||||||
tail: &'a str,
|
tail: &'a str,
|
||||||
lctx: &'a LexContext<'a>,
|
ctx: &'a LexContext<'a>,
|
||||||
) -> impl Future<Output = OrcRes<(&'a str, GenTokTree)>>;
|
) -> impl Future<Output = OrcRes<(&'a str, GenTokTree)>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ pub mod func_atom;
|
|||||||
pub mod gen_expr;
|
pub mod gen_expr;
|
||||||
pub mod lexer;
|
pub mod lexer;
|
||||||
// pub mod msg;
|
// pub mod msg;
|
||||||
pub mod context;
|
mod interner;
|
||||||
pub mod other_system;
|
pub mod other_system;
|
||||||
pub mod parser;
|
pub mod parser;
|
||||||
pub mod reflection;
|
pub mod reflection;
|
||||||
|
|||||||
@@ -1,12 +1,15 @@
|
|||||||
|
use std::marker::PhantomData;
|
||||||
|
use std::mem::size_of;
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::system::{DynSystemCard, SystemCard};
|
use crate::system::{DynSystemCard, SystemCard};
|
||||||
|
|
||||||
pub struct SystemHandle<C: SystemCard> {
|
pub struct SystemHandle<C: SystemCard> {
|
||||||
pub(crate) card: C,
|
pub(crate) _card: PhantomData<C>,
|
||||||
pub(crate) id: api::SysId,
|
pub(crate) id: api::SysId,
|
||||||
}
|
}
|
||||||
impl<C: SystemCard> SystemHandle<C> {
|
impl<C: SystemCard> SystemHandle<C> {
|
||||||
pub(crate) fn new(id: api::SysId) -> Self { Self { card: C::default(), id } }
|
pub(crate) fn new(id: api::SysId) -> Self { Self { _card: PhantomData, id } }
|
||||||
pub fn id(&self) -> api::SysId { self.id }
|
pub fn id(&self) -> api::SysId { self.id }
|
||||||
}
|
}
|
||||||
impl<C: SystemCard> Clone for SystemHandle<C> {
|
impl<C: SystemCard> Clone for SystemHandle<C> {
|
||||||
@@ -18,7 +21,16 @@ pub trait DynSystemHandle {
|
|||||||
fn get_card(&self) -> &dyn DynSystemCard;
|
fn get_card(&self) -> &dyn DynSystemCard;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn leak_card<T: Default>() -> &'static T {
|
||||||
|
const {
|
||||||
|
if 0 != size_of::<T>() {
|
||||||
|
panic!("Attempted to leak positively sized Card. Card types must always be zero-sized");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Box::leak(Box::default())
|
||||||
|
}
|
||||||
|
|
||||||
impl<C: SystemCard> DynSystemHandle for SystemHandle<C> {
|
impl<C: SystemCard> DynSystemHandle for SystemHandle<C> {
|
||||||
fn id(&self) -> api::SysId { self.id }
|
fn id(&self) -> api::SysId { self.id }
|
||||||
fn get_card(&self) -> &dyn DynSystemCard { &self.card }
|
fn get_card(&self) -> &'static dyn DynSystemCard { leak_card::<C>() }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,14 +12,14 @@ use orchid_base::location::SrcRange;
|
|||||||
use orchid_base::match_mapping;
|
use orchid_base::match_mapping;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::parse::{Comment, ParseCtx, Snippet};
|
use orchid_base::parse::{Comment, ParseCtx, Snippet};
|
||||||
use orchid_base::reqnot::Requester;
|
use orchid_base::reqnot::{ReqHandlish, Requester};
|
||||||
use orchid_base::tree::{TokTree, Token, ttv_into_api};
|
use orchid_base::tree::{TokTree, Token, ttv_into_api};
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::context::{SysCtxEntry, ctx, i};
|
|
||||||
use crate::conv::ToExpr;
|
use crate::conv::ToExpr;
|
||||||
use crate::expr::Expr;
|
use crate::expr::Expr;
|
||||||
use crate::gen_expr::GExpr;
|
use crate::gen_expr::GExpr;
|
||||||
|
use crate::system::{SysCtx, SysCtxEntry};
|
||||||
use crate::tree::{GenTok, GenTokTree};
|
use crate::tree::{GenTok, GenTokTree};
|
||||||
|
|
||||||
pub type PTok = Token<Expr, Never>;
|
pub type PTok = Token<Expr, Never>;
|
||||||
@@ -81,26 +81,27 @@ pub type ParserObj = &'static dyn DynParser;
|
|||||||
|
|
||||||
pub struct ParsCtx<'a> {
|
pub struct ParsCtx<'a> {
|
||||||
_parse: PhantomData<&'a mut ()>,
|
_parse: PhantomData<&'a mut ()>,
|
||||||
|
ctx: SysCtx,
|
||||||
module: Sym,
|
module: Sym,
|
||||||
reporter: &'a Reporter,
|
reporter: &'a Reporter,
|
||||||
i: Interner,
|
|
||||||
}
|
}
|
||||||
impl<'a> ParsCtx<'a> {
|
impl<'a> ParsCtx<'a> {
|
||||||
pub(crate) fn new(module: Sym, reporter: &'a Reporter) -> Self {
|
pub(crate) fn new(ctx: SysCtx, module: Sym, reporter: &'a Reporter) -> Self {
|
||||||
Self { _parse: PhantomData, module, reporter, i: i() }
|
Self { _parse: PhantomData, ctx, module, reporter }
|
||||||
}
|
}
|
||||||
|
pub fn ctx(&self) -> &SysCtx { &self.ctx }
|
||||||
pub fn module(&self) -> Sym { self.module.clone() }
|
pub fn module(&self) -> Sym { self.module.clone() }
|
||||||
}
|
}
|
||||||
impl ParseCtx for ParsCtx<'_> {
|
impl ParseCtx for ParsCtx<'_> {
|
||||||
fn i(&self) -> &Interner { &self.i }
|
fn i(&self) -> &Interner { self.ctx.i() }
|
||||||
fn rep(&self) -> &Reporter { self.reporter }
|
fn rep(&self) -> &Reporter { self.reporter }
|
||||||
}
|
}
|
||||||
|
|
||||||
type BoxConstCallback = Box<dyn FnOnce(ConstCtx) -> LocalBoxFuture<'static, GExpr>>;
|
type BoxConstCallback = Box<dyn FnOnce(ConstCtx) -> LocalBoxFuture<'static, GExpr>>;
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub(crate) struct ParsedConstCtxEntry {
|
struct ParsedConstCtxEntry {
|
||||||
pub(crate) consts: IdStore<BoxConstCallback>,
|
consts: IdStore<BoxConstCallback>,
|
||||||
}
|
}
|
||||||
impl SysCtxEntry for ParsedConstCtxEntry {}
|
impl SysCtxEntry for ParsedConstCtxEntry {}
|
||||||
|
|
||||||
@@ -114,10 +115,10 @@ impl ParsedLine {
|
|||||||
sr: &SrcRange,
|
sr: &SrcRange,
|
||||||
comments: impl IntoIterator<Item = &'a Comment>,
|
comments: impl IntoIterator<Item = &'a Comment>,
|
||||||
exported: bool,
|
exported: bool,
|
||||||
name: Tok<String>,
|
name: IStr,
|
||||||
f: F,
|
f: F,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let cb = Box::new(|ctx| async move { f(ctx).await.to_gen().await }.boxed_local());
|
let cb = Box::new(|ctx| async move { f(ctx).await.to_expr().await }.boxed_local());
|
||||||
let kind = ParsedLineKind::Mem(ParsedMem { name, exported, kind: ParsedMemKind::Const(cb) });
|
let kind = ParsedLineKind::Mem(ParsedMem { name, exported, kind: ParsedMemKind::Const(cb) });
|
||||||
let comments = comments.into_iter().cloned().collect();
|
let comments = comments.into_iter().cloned().collect();
|
||||||
ParsedLine { comments, sr: sr.clone(), kind }
|
ParsedLine { comments, sr: sr.clone(), kind }
|
||||||
@@ -126,7 +127,7 @@ impl ParsedLine {
|
|||||||
sr: &SrcRange,
|
sr: &SrcRange,
|
||||||
comments: impl IntoIterator<Item = &'a Comment>,
|
comments: impl IntoIterator<Item = &'a Comment>,
|
||||||
exported: bool,
|
exported: bool,
|
||||||
name: &Tok<String>,
|
name: &IStr,
|
||||||
use_prelude: bool,
|
use_prelude: bool,
|
||||||
lines: impl IntoIterator<Item = ParsedLine>,
|
lines: impl IntoIterator<Item = ParsedLine>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
@@ -135,7 +136,7 @@ impl ParsedLine {
|
|||||||
let comments = comments.into_iter().cloned().collect();
|
let comments = comments.into_iter().cloned().collect();
|
||||||
ParsedLine { comments, sr: sr.clone(), kind: line_kind }
|
ParsedLine { comments, sr: sr.clone(), kind: line_kind }
|
||||||
}
|
}
|
||||||
pub async fn into_api(self) -> api::ParsedLine {
|
pub async fn into_api(self, ctx: SysCtx, hand: &dyn ReqHandlish) -> api::ParsedLine {
|
||||||
api::ParsedLine {
|
api::ParsedLine {
|
||||||
comments: self.comments.into_iter().map(|c| c.to_api()).collect(),
|
comments: self.comments.into_iter().map(|c| c.to_api()).collect(),
|
||||||
source_range: self.sr.to_api(),
|
source_range: self.sr.to_api(),
|
||||||
@@ -145,23 +146,27 @@ impl ParsedLine {
|
|||||||
exported: mem.exported,
|
exported: mem.exported,
|
||||||
kind: match mem.kind {
|
kind: match mem.kind {
|
||||||
ParsedMemKind::Const(cb) => api::ParsedMemberKind::Constant(api::ParsedConstId(
|
ParsedMemKind::Const(cb) => api::ParsedMemberKind::Constant(api::ParsedConstId(
|
||||||
ctx().get_or_default::<ParsedConstCtxEntry>().consts.add(cb).id(),
|
ctx.get_or_default::<ParsedConstCtxEntry>().consts.add(cb).id(),
|
||||||
)),
|
)),
|
||||||
ParsedMemKind::Mod { lines, use_prelude } => api::ParsedMemberKind::Module {
|
ParsedMemKind::Mod { lines, use_prelude } => api::ParsedMemberKind::Module {
|
||||||
lines: linev_into_api(lines).boxed_local().await,
|
lines: linev_into_api(lines, ctx, hand).boxed_local().await,
|
||||||
use_prelude,
|
use_prelude,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
ParsedLineKind::Rec(tv) =>
|
ParsedLineKind::Rec(tv) =>
|
||||||
api::ParsedLineKind::Recursive(ttv_into_api(tv, &mut (), &mut ()).await),
|
api::ParsedLineKind::Recursive(ttv_into_api(tv, &mut (), &mut (ctx, hand)).await),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn linev_into_api(v: Vec<ParsedLine>) -> Vec<api::ParsedLine> {
|
pub(crate) async fn linev_into_api(
|
||||||
join_all(v.into_iter().map(|l| l.into_api())).await
|
v: Vec<ParsedLine>,
|
||||||
|
ctx: SysCtx,
|
||||||
|
hand: &dyn ReqHandlish,
|
||||||
|
) -> Vec<api::ParsedLine> {
|
||||||
|
join_all(v.into_iter().map(|l| l.into_api(ctx.clone(), hand))).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum ParsedLineKind {
|
pub enum ParsedLineKind {
|
||||||
@@ -170,7 +175,7 @@ pub enum ParsedLineKind {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub struct ParsedMem {
|
pub struct ParsedMem {
|
||||||
pub name: Tok<String>,
|
pub name: IStr,
|
||||||
pub exported: bool,
|
pub exported: bool,
|
||||||
pub kind: ParsedMemKind,
|
pub kind: ParsedMemKind,
|
||||||
}
|
}
|
||||||
@@ -182,23 +187,26 @@ pub enum ParsedMemKind {
|
|||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct ConstCtx {
|
pub struct ConstCtx {
|
||||||
|
ctx: SysCtx,
|
||||||
constid: api::ParsedConstId,
|
constid: api::ParsedConstId,
|
||||||
}
|
}
|
||||||
impl ConstCtx {
|
impl ConstCtx {
|
||||||
|
pub fn ctx(&self) -> &SysCtx { &self.ctx }
|
||||||
|
pub fn i(&self) -> &Interner { self.ctx.i() }
|
||||||
pub fn names<'b>(
|
pub fn names<'b>(
|
||||||
&'b self,
|
&'b self,
|
||||||
names: impl IntoIterator<Item = &'b Sym> + 'b,
|
names: impl IntoIterator<Item = &'b Sym> + 'b,
|
||||||
) -> impl Stream<Item = OrcRes<Sym>> + 'b {
|
) -> impl Stream<Item = OrcRes<Sym>> + 'b {
|
||||||
let resolve_names = api::ResolveNames {
|
let resolve_names = api::ResolveNames {
|
||||||
constid: self.constid,
|
constid: self.constid,
|
||||||
sys: ctx().sys_id(),
|
sys: self.ctx.sys_id(),
|
||||||
names: names.into_iter().map(|n| n.to_api()).collect_vec(),
|
names: names.into_iter().map(|n| n.to_api()).collect_vec(),
|
||||||
};
|
};
|
||||||
stream(async |mut cx| {
|
stream(async |mut cx| {
|
||||||
for name_opt in ctx().reqnot().request(resolve_names).await {
|
for name_opt in self.ctx.reqnot().request(resolve_names).await {
|
||||||
cx.emit(match name_opt {
|
cx.emit(match name_opt {
|
||||||
Err(e) => Err(OrcErrv::from_api(&e, &i()).await),
|
Err(e) => Err(OrcErrv::from_api(&e, self.ctx.i()).await),
|
||||||
Ok(name) => Ok(Sym::from_api(name, &i()).await),
|
Ok(name) => Ok(Sym::from_api(name, self.ctx.i()).await),
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
@@ -209,9 +217,9 @@ impl ConstCtx {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn get_const(id: api::ParsedConstId) -> GExpr {
|
pub(crate) async fn get_const(id: api::ParsedConstId, ctx: SysCtx) -> GExpr {
|
||||||
let cb = (ctx().get_or_default::<ParsedConstCtxEntry>().consts.get(id.0))
|
let ent = ctx.get::<ParsedConstCtxEntry>();
|
||||||
.expect("Bad ID or double read of parsed const")
|
let rec = ent.consts.get(id.0).expect("Bad ID or double read of parsed const");
|
||||||
.remove();
|
let ctx = ConstCtx { constid: id, ctx: ctx.clone() };
|
||||||
cb(ConstCtx { constid: id }).await
|
rec.remove()(ctx).await
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,57 +9,59 @@ use orchid_base::name::{NameLike, VPath};
|
|||||||
use orchid_base::reqnot::Requester;
|
use orchid_base::reqnot::Requester;
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::context::{SysCtxEntry, ctx, i};
|
use crate::system::{SysCtx, SysCtxEntry, WeakSysCtx};
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct ReflMemData {
|
pub struct ReflMemData {
|
||||||
// None for inferred steps
|
// None for inferred steps
|
||||||
public: OnceCell<bool>,
|
public: OnceCell<bool>,
|
||||||
kind: ReflMemKind,
|
kind: ReflMemKind,
|
||||||
}
|
}
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone)]
|
||||||
pub struct ReflMem(Rc<ReflMemData>);
|
pub struct ReflMem(Rc<ReflMemData>);
|
||||||
impl ReflMem {
|
impl ReflMem {
|
||||||
pub fn kind(&self) -> ReflMemKind { self.0.kind.clone() }
|
pub fn kind(&self) -> ReflMemKind { self.0.kind.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone)]
|
||||||
pub enum ReflMemKind {
|
pub enum ReflMemKind {
|
||||||
Const,
|
Const,
|
||||||
Mod(ReflMod),
|
Mod(ReflMod),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct ReflModData {
|
pub struct ReflModData {
|
||||||
inferred: Mutex<bool>,
|
inferred: Mutex<bool>,
|
||||||
path: VPath,
|
path: VPath,
|
||||||
members: MemoMap<Tok<String>, ReflMem>,
|
ctx: WeakSysCtx,
|
||||||
|
members: MemoMap<IStr, ReflMem>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone)]
|
||||||
pub struct ReflMod(Rc<ReflModData>);
|
pub struct ReflMod(Rc<ReflModData>);
|
||||||
impl ReflMod {
|
impl ReflMod {
|
||||||
pub fn path(&self) -> &[Tok<String>] { &self.0.path[..] }
|
fn ctx(&self) -> SysCtx {
|
||||||
|
self.0.ctx.upgrade().expect("ReflectedModule accessed after context drop")
|
||||||
|
}
|
||||||
|
pub fn path(&self) -> &[IStr] { &self.0.path[..] }
|
||||||
pub fn is_root(&self) -> bool { self.0.path.is_empty() }
|
pub fn is_root(&self) -> bool { self.0.path.is_empty() }
|
||||||
async fn try_populate(&self) -> Result<(), api::LsModuleError> {
|
async fn try_populate(&self) -> Result<(), api::LsModuleError> {
|
||||||
let path_tok = i().i(&self.0.path[..]).await;
|
let ctx = self.ctx();
|
||||||
let reply = match ctx().reqnot().request(api::LsModule(ctx().sys_id(), path_tok.to_api())).await
|
let path_tok = ctx.i().i(&self.0.path[..]).await;
|
||||||
{
|
let reply = match ctx.reqnot().request(api::LsModule(ctx.sys_id(), path_tok.to_api())).await {
|
||||||
Err(api::LsModuleError::TreeUnavailable) =>
|
Err(api::LsModuleError::TreeUnavailable) =>
|
||||||
panic!("Reflected tree accessed outside an interpreter call. This extension is faulty."),
|
panic!("Reflected tree accessed outside an interpreter call. This extension is faulty."),
|
||||||
Err(err) => return Err(err),
|
Err(err) => return Err(err),
|
||||||
Ok(details) => details,
|
Ok(details) => details,
|
||||||
};
|
};
|
||||||
for (k, v) in reply.members {
|
for (k, v) in reply.members {
|
||||||
let k = i().ex(k).await;
|
let k = ctx.i().ex(k).await;
|
||||||
let mem = match self.0.members.get(&k) {
|
let mem = match self.0.members.get(&k) {
|
||||||
Some(mem) => mem,
|
Some(mem) => mem,
|
||||||
None => {
|
None => {
|
||||||
let path = self.0.path.clone().name_with_suffix(k.clone()).to_sym(&i()).await;
|
let path = self.0.path.clone().name_with_suffix(k.clone()).to_sym(ctx.i()).await;
|
||||||
let kind = match v.kind {
|
let kind = match v.kind {
|
||||||
api::MemberInfoKind::Constant => ReflMemKind::Const,
|
api::MemberInfoKind::Constant => ReflMemKind::Const,
|
||||||
api::MemberInfoKind::Module =>
|
api::MemberInfoKind::Module =>
|
||||||
ReflMemKind::Mod(default_module(VPath::new(path.segs()))),
|
ReflMemKind::Mod(default_module(&ctx, VPath::new(path.segs()))),
|
||||||
};
|
};
|
||||||
self.0.members.get_or_insert(&k, || default_member(self.is_root(), kind))
|
self.0.members.get_or_insert(&k, || default_member(self.is_root(), kind))
|
||||||
},
|
},
|
||||||
@@ -68,7 +70,7 @@ impl ReflMod {
|
|||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
pub async fn get_child(&self, key: &Tok<String>) -> Option<ReflMem> {
|
pub async fn get_child(&self, key: &IStr) -> Option<ReflMem> {
|
||||||
let inferred_g = self.0.inferred.lock().await;
|
let inferred_g = self.0.inferred.lock().await;
|
||||||
if let Some(mem) = self.0.members.get(key) {
|
if let Some(mem) = self.0.members.get(key) {
|
||||||
return Some(mem.clone());
|
return Some(mem.clone());
|
||||||
@@ -86,7 +88,8 @@ impl ReflMod {
|
|||||||
}
|
}
|
||||||
self.0.members.get(key).cloned()
|
self.0.members.get(key).cloned()
|
||||||
}
|
}
|
||||||
pub async fn get_by_path(&self, path: &[Tok<String>]) -> Result<ReflMem, InvalidPathError> {
|
pub async fn get_by_path(&self, path: &[IStr]) -> Result<ReflMem, InvalidPathError> {
|
||||||
|
let ctx = self.ctx();
|
||||||
let (next, tail) = path.split_first().expect("Attempted to walk by empty path");
|
let (next, tail) = path.split_first().expect("Attempted to walk by empty path");
|
||||||
let inferred_g = self.0.inferred.lock().await;
|
let inferred_g = self.0.inferred.lock().await;
|
||||||
if let Some(next) = self.0.members.get(next) {
|
if let Some(next) = self.0.members.get(next) {
|
||||||
@@ -102,7 +105,7 @@ impl ReflMod {
|
|||||||
if !*inferred_g {
|
if !*inferred_g {
|
||||||
return Err(InvalidPathError { keep_ancestry: true });
|
return Err(InvalidPathError { keep_ancestry: true });
|
||||||
}
|
}
|
||||||
let candidate = default_module(self.0.path.clone().suffix([next.clone()]));
|
let candidate = default_module(&ctx, self.0.path.clone().suffix([next.clone()]));
|
||||||
if tail.is_empty() {
|
if tail.is_empty() {
|
||||||
return match candidate.try_populate().await {
|
return match candidate.try_populate().await {
|
||||||
Ok(()) => {
|
Ok(()) => {
|
||||||
@@ -113,8 +116,8 @@ impl ReflMod {
|
|||||||
Err(api::LsModuleError::InvalidPath) => Err(InvalidPathError { keep_ancestry: false }),
|
Err(api::LsModuleError::InvalidPath) => Err(InvalidPathError { keep_ancestry: false }),
|
||||||
Err(api::LsModuleError::IsConstant) => {
|
Err(api::LsModuleError::IsConstant) => {
|
||||||
let const_mem = default_member(self.is_root(), ReflMemKind::Const);
|
let const_mem = default_member(self.is_root(), ReflMemKind::Const);
|
||||||
self.0.members.insert(next.clone(), const_mem.clone());
|
self.0.members.insert(next.clone(), const_mem);
|
||||||
Ok(const_mem)
|
Err(InvalidPathError { keep_ancestry: true })
|
||||||
},
|
},
|
||||||
Err(api::LsModuleError::TreeUnavailable) => unreachable!(),
|
Err(api::LsModuleError::TreeUnavailable) => unreachable!(),
|
||||||
};
|
};
|
||||||
@@ -130,17 +133,20 @@ impl ReflMod {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
struct ReflRoot(ReflMod);
|
struct ReflRoot(ReflMod);
|
||||||
impl SysCtxEntry for ReflRoot {}
|
impl SysCtxEntry for ReflRoot {}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct InvalidPathError {
|
pub struct InvalidPathError {
|
||||||
keep_ancestry: bool,
|
keep_ancestry: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_module(path: VPath) -> ReflMod {
|
fn default_module(ctx: &SysCtx, path: VPath) -> ReflMod {
|
||||||
ReflMod(Rc::new(ReflModData { inferred: Mutex::new(true), path, members: MemoMap::new() }))
|
ReflMod(Rc::new(ReflModData {
|
||||||
|
ctx: ctx.downgrade(),
|
||||||
|
inferred: Mutex::new(true),
|
||||||
|
path,
|
||||||
|
members: MemoMap::new(),
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_member(is_root: bool, kind: ReflMemKind) -> ReflMem {
|
fn default_member(is_root: bool, kind: ReflMemKind) -> ReflMem {
|
||||||
@@ -150,8 +156,8 @@ fn default_member(is_root: bool, kind: ReflMemKind) -> ReflMem {
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_root() -> ReflRoot {
|
fn get_root(ctx: &SysCtx) -> &ReflRoot {
|
||||||
ctx().get_or_insert(|| ReflRoot(default_module(VPath::new([])))).clone()
|
ctx.get_or_insert(|| ReflRoot(default_module(ctx, VPath::new([]))))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn refl() -> ReflMod { get_root().0.clone() }
|
pub fn refl(ctx: &SysCtx) -> ReflMod { get_root(ctx).0.clone() }
|
||||||
|
|||||||
@@ -1,18 +1,22 @@
|
|||||||
use std::any::{Any, TypeId};
|
use std::any::{Any, TypeId, type_name};
|
||||||
|
use std::fmt;
|
||||||
use std::future::Future;
|
use std::future::Future;
|
||||||
use std::num::NonZero;
|
use std::num::NonZero;
|
||||||
use std::pin::Pin;
|
use std::pin::Pin;
|
||||||
|
use std::rc::{Rc, Weak};
|
||||||
|
|
||||||
use futures::FutureExt;
|
|
||||||
use futures::future::LocalBoxFuture;
|
use futures::future::LocalBoxFuture;
|
||||||
use orchid_api_traits::{Coding, Decode, Encode, Request};
|
use memo_map::MemoMap;
|
||||||
|
use orchid_api_traits::{Coding, Decode};
|
||||||
use orchid_base::boxed_iter::BoxedIter;
|
use orchid_base::boxed_iter::BoxedIter;
|
||||||
|
use orchid_base::builtin::Spawner;
|
||||||
|
use orchid_base::interner::Interner;
|
||||||
|
use orchid_base::logging::Logger;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::reqnot::{Receipt, Requester};
|
use orchid_base::reqnot::{Receipt, ReqNot};
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId, AtomicFeatures, ForeignAtom, TAtom, get_info};
|
use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId, AtomicFeatures, ForeignAtom, TypAtom, get_info};
|
||||||
use crate::context::ctx;
|
|
||||||
use crate::coroutine_exec::Replier;
|
use crate::coroutine_exec::Replier;
|
||||||
use crate::entrypoint::ExtReq;
|
use crate::entrypoint::ExtReq;
|
||||||
use crate::func_atom::{Fun, Lambda};
|
use crate::func_atom::{Fun, Lambda};
|
||||||
@@ -28,7 +32,7 @@ pub trait SystemCard: Default + Send + Sync + 'static {
|
|||||||
fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>>;
|
fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait DynSystemCard: Send + Sync + Any + 'static {
|
pub trait DynSystemCard: Send + Sync + 'static {
|
||||||
fn name(&self) -> &'static str;
|
fn name(&self) -> &'static str;
|
||||||
/// Atoms explicitly defined by the system card. Do not rely on this for
|
/// Atoms explicitly defined by the system card. Do not rely on this for
|
||||||
/// querying atoms as it doesn't include the general atom types
|
/// querying atoms as it doesn't include the general atom types
|
||||||
@@ -67,7 +71,7 @@ pub async fn resolv_atom(
|
|||||||
sys: &(impl DynSystemCard + ?Sized),
|
sys: &(impl DynSystemCard + ?Sized),
|
||||||
atom: &api::Atom,
|
atom: &api::Atom,
|
||||||
) -> Box<dyn AtomDynfo> {
|
) -> Box<dyn AtomDynfo> {
|
||||||
let tid = AtomTypeId::decode(Pin::new(&mut &atom.data.0[..])).await;
|
let tid = AtomTypeId::decode(Pin::new(&mut &atom.data[..])).await;
|
||||||
atom_by_idx(sys, tid).expect("Value of nonexistent type found")
|
atom_by_idx(sys, tid).expect("Value of nonexistent type found")
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -80,16 +84,16 @@ impl<T: SystemCard> DynSystemCard for T {
|
|||||||
|
|
||||||
/// System as defined by author
|
/// System as defined by author
|
||||||
pub trait System: Send + Sync + SystemCard + 'static {
|
pub trait System: Send + Sync + SystemCard + 'static {
|
||||||
fn prelude() -> impl Future<Output = Vec<Sym>>;
|
fn prelude(i: &Interner) -> impl Future<Output = Vec<Sym>>;
|
||||||
fn env() -> impl Future<Output = Vec<GenMember>>;
|
fn env() -> Vec<GenMember>;
|
||||||
fn lexers() -> Vec<LexerObj>;
|
fn lexers() -> Vec<LexerObj>;
|
||||||
fn parsers() -> Vec<ParserObj>;
|
fn parsers() -> Vec<ParserObj>;
|
||||||
fn request(hand: ExtReq<'_>, req: Self::Req) -> impl Future<Output = Receipt<'_>>;
|
fn request(hand: ExtReq<'_>, req: Self::Req) -> impl Future<Output = Receipt<'_>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
|
pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
|
||||||
fn dyn_prelude(&self) -> LocalBoxFuture<'_, Vec<Sym>>;
|
fn dyn_prelude<'a>(&'a self, i: &'a Interner) -> LocalBoxFuture<'a, Vec<Sym>>;
|
||||||
fn dyn_env(&self) -> LocalBoxFuture<'_, Vec<GenMember>>;
|
fn dyn_env(&'_ self) -> Vec<GenMember>;
|
||||||
fn dyn_lexers(&self) -> Vec<LexerObj>;
|
fn dyn_lexers(&self) -> Vec<LexerObj>;
|
||||||
fn dyn_parsers(&self) -> Vec<ParserObj>;
|
fn dyn_parsers(&self) -> Vec<ParserObj>;
|
||||||
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>>;
|
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>>;
|
||||||
@@ -97,8 +101,10 @@ pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<T: System> DynSystem for T {
|
impl<T: System> DynSystem for T {
|
||||||
fn dyn_prelude(&self) -> LocalBoxFuture<'_, Vec<Sym>> { Box::pin(Self::prelude()) }
|
fn dyn_prelude<'a>(&'a self, i: &'a Interner) -> LocalBoxFuture<'a, Vec<Sym>> {
|
||||||
fn dyn_env(&self) -> LocalBoxFuture<'_, Vec<GenMember>> { Self::env().boxed_local() }
|
Box::pin(Self::prelude(i))
|
||||||
|
}
|
||||||
|
fn dyn_env(&'_ self) -> Vec<GenMember> { Self::env() }
|
||||||
fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() }
|
fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() }
|
||||||
fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() }
|
fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() }
|
||||||
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>> {
|
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>> {
|
||||||
@@ -109,10 +115,10 @@ impl<T: System> DynSystem for T {
|
|||||||
fn card(&self) -> &dyn DynSystemCard { self }
|
fn card(&self) -> &dyn DynSystemCard { self }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn downcast_atom<A>(foreign: ForeignAtom) -> Result<TAtom<A>, ForeignAtom>
|
pub async fn downcast_atom<A>(foreign: ForeignAtom) -> Result<TypAtom<A>, ForeignAtom>
|
||||||
where A: AtomicFeatures {
|
where A: AtomicFeatures {
|
||||||
let mut data = &foreign.atom.data.0[..];
|
let mut data = &foreign.atom.data[..];
|
||||||
let ctx = ctx();
|
let ctx = foreign.ctx().clone();
|
||||||
let value = AtomTypeId::decode(Pin::new(&mut data)).await;
|
let value = AtomTypeId::decode(Pin::new(&mut data)).await;
|
||||||
let own_inst = ctx.get::<CtedObj>().inst();
|
let own_inst = ctx.get::<CtedObj>().inst();
|
||||||
let owner = if *ctx.get::<api::SysId>() == foreign.atom.owner {
|
let owner = if *ctx.get::<api::SysId>() == foreign.atom.owner {
|
||||||
@@ -122,30 +128,74 @@ where A: AtomicFeatures {
|
|||||||
.ok_or_else(|| foreign.clone())?
|
.ok_or_else(|| foreign.clone())?
|
||||||
.get_card()
|
.get_card()
|
||||||
};
|
};
|
||||||
if owner.atoms().flatten().all(|dynfo| dynfo.tid() != TypeId::of::<A>()) {
|
|
||||||
return Err(foreign);
|
|
||||||
}
|
|
||||||
let (typ_id, dynfo) = get_info::<A>(owner);
|
let (typ_id, dynfo) = get_info::<A>(owner);
|
||||||
if value != typ_id {
|
if value != typ_id {
|
||||||
return Err(foreign);
|
return Err(foreign);
|
||||||
}
|
}
|
||||||
let val = dynfo.decode(AtomCtx(data, foreign.atom.drop)).await;
|
let val = dynfo.decode(AtomCtx(data, foreign.atom.drop, ctx)).await;
|
||||||
let value = *val.downcast::<A::Data>().expect("atom decode returned wrong type");
|
let value = *val.downcast::<A::Data>().expect("atom decode returned wrong type");
|
||||||
Ok(TAtom { value, untyped: foreign })
|
Ok(TypAtom { value, untyped: foreign })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn dep_req<Sys: SystemCard, Req: Request + Into<Sys::Req>>(req: Req) -> Req::Response {
|
#[derive(Clone)]
|
||||||
let ctx = ctx();
|
pub struct WeakSysCtx(Weak<MemoMap<TypeId, Box<dyn Any>>>);
|
||||||
let mut msg = Vec::new();
|
impl WeakSysCtx {
|
||||||
req.into().encode(std::pin::pin!(&mut msg)).await;
|
pub fn upgrade(&self) -> Option<SysCtx> { Some(SysCtx(self.0.upgrade()?)) }
|
||||||
let own_inst = ctx.get::<CtedObj>().inst();
|
|
||||||
let owner = if own_inst.card().type_id() == TypeId::of::<Sys>() {
|
|
||||||
ctx.sys_id()
|
|
||||||
} else {
|
|
||||||
(ctx.get::<CtedObj>().deps().find(|s| s.get_card().type_id() == TypeId::of::<Sys>()))
|
|
||||||
.expect("System not in dependency array")
|
|
||||||
.id()
|
|
||||||
};
|
|
||||||
let reply = ctx.reqnot().request(api::SysFwd(owner, msg)).await;
|
|
||||||
Req::Response::decode(std::pin::pin!(&reply[..])).await
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct SysCtx(Rc<MemoMap<TypeId, Box<dyn Any>>>);
|
||||||
|
impl SysCtx {
|
||||||
|
pub fn new(
|
||||||
|
id: api::SysId,
|
||||||
|
i: Interner,
|
||||||
|
reqnot: ReqNot<api::ExtMsgSet>,
|
||||||
|
spawner: Spawner,
|
||||||
|
logger: Logger,
|
||||||
|
cted: CtedObj,
|
||||||
|
) -> Self {
|
||||||
|
let this = Self(Rc::new(MemoMap::new()));
|
||||||
|
this.add(id).add(i).add(reqnot).add(spawner).add(logger).add(cted);
|
||||||
|
this
|
||||||
|
}
|
||||||
|
pub fn downgrade(&self) -> WeakSysCtx { WeakSysCtx(Rc::downgrade(&self.0)) }
|
||||||
|
pub fn add<T: SysCtxEntry>(&self, t: T) -> &Self {
|
||||||
|
assert!(self.0.insert(TypeId::of::<T>(), Box::new(t)), "Key already exists");
|
||||||
|
self
|
||||||
|
}
|
||||||
|
pub fn get_or_insert<T: SysCtxEntry>(&self, f: impl FnOnce() -> T) -> &T {
|
||||||
|
(self.0.get_or_insert_owned(TypeId::of::<T>(), || Box::new(f())).downcast_ref())
|
||||||
|
.expect("Keyed by TypeId")
|
||||||
|
}
|
||||||
|
pub fn get_or_default<T: SysCtxEntry + Default>(&self) -> &T { self.get_or_insert(T::default) }
|
||||||
|
pub fn try_get<T: SysCtxEntry>(&self) -> Option<&T> {
|
||||||
|
Some(self.0.get(&TypeId::of::<T>())?.downcast_ref().expect("Keyed by TypeId"))
|
||||||
|
}
|
||||||
|
pub fn get<T: SysCtxEntry>(&self) -> &T {
|
||||||
|
self.try_get().unwrap_or_else(|| panic!("Context {} missing", type_name::<T>()))
|
||||||
|
}
|
||||||
|
/// Shorthand to get the [Interner] instance
|
||||||
|
pub fn i(&self) -> &Interner { self.get::<Interner>() }
|
||||||
|
/// Shorthand to get the messaging link
|
||||||
|
pub fn reqnot(&self) -> &ReqNot<api::ExtMsgSet> { self.get::<ReqNot<api::ExtMsgSet>>() }
|
||||||
|
/// Shorthand to get the system ID
|
||||||
|
pub fn sys_id(&self) -> api::SysId { *self.get::<api::SysId>() }
|
||||||
|
/// Shorthand to get the task spawner callback
|
||||||
|
pub fn spawner(&self) -> &Spawner { self.get::<Spawner>() }
|
||||||
|
/// Shorthand to get the logger
|
||||||
|
pub fn logger(&self) -> &Logger { self.get::<Logger>() }
|
||||||
|
/// Shorthand to get the constructed system object
|
||||||
|
pub fn cted(&self) -> &CtedObj { self.get::<CtedObj>() }
|
||||||
|
}
|
||||||
|
impl fmt::Debug for SysCtx {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "SysCtx({:?})", self.sys_id())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub trait SysCtxEntry: 'static + Sized {}
|
||||||
|
impl SysCtxEntry for api::SysId {}
|
||||||
|
impl SysCtxEntry for ReqNot<api::ExtMsgSet> {}
|
||||||
|
impl SysCtxEntry for Spawner {}
|
||||||
|
impl SysCtxEntry for CtedObj {}
|
||||||
|
impl SysCtxEntry for Logger {}
|
||||||
|
impl SysCtxEntry for Interner {}
|
||||||
|
|||||||
@@ -62,8 +62,6 @@ pub trait SystemCtor: Send + Sync + 'static {
|
|||||||
type Instance: System;
|
type Instance: System;
|
||||||
const NAME: &'static str;
|
const NAME: &'static str;
|
||||||
const VERSION: f64;
|
const VERSION: f64;
|
||||||
/// Create a system instance. When this function is called, a context object
|
|
||||||
/// isn't yet available
|
|
||||||
fn inst(deps: <Self::Deps as DepDef>::Sat) -> Self::Instance;
|
fn inst(deps: <Self::Deps as DepDef>::Sat) -> Self::Instance;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -9,24 +9,25 @@ use itertools::Itertools;
|
|||||||
use orchid_base::interner::{Interner, Tok};
|
use orchid_base::interner::{Interner, Tok};
|
||||||
use orchid_base::location::SrcRange;
|
use orchid_base::location::SrcRange;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
|
use orchid_base::reqnot::ReqHandlish;
|
||||||
use orchid_base::tree::{TokTree, Token, TokenVariant};
|
use orchid_base::tree::{TokTree, Token, TokenVariant};
|
||||||
use substack::Substack;
|
use substack::Substack;
|
||||||
use trait_set::trait_set;
|
use trait_set::trait_set;
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::context::i;
|
|
||||||
use crate::conv::ToExpr;
|
use crate::conv::ToExpr;
|
||||||
use crate::entrypoint::MemberRecord;
|
use crate::entrypoint::MemberRecord;
|
||||||
use crate::expr::{BorrowedExprStore, Expr, ExprHandle};
|
use crate::expr::{BorrowedExprStore, Expr, ExprHandle};
|
||||||
use crate::func_atom::{ExprFunc, Fun};
|
use crate::func_atom::{ExprFunc, Fun};
|
||||||
use crate::gen_expr::{GExpr, sym_ref};
|
use crate::gen_expr::{GExpr, sym_ref};
|
||||||
|
use crate::system::SysCtx;
|
||||||
|
|
||||||
pub type GenTokTree = TokTree<Expr, GExpr>;
|
pub type GenTokTree = TokTree<Expr, GExpr>;
|
||||||
pub type GenTok = Token<Expr, GExpr>;
|
pub type GenTok = Token<Expr, GExpr>;
|
||||||
|
|
||||||
impl TokenVariant<api::Expression> for GExpr {
|
impl TokenVariant<api::Expression> for GExpr {
|
||||||
type FromApiCtx<'a> = ();
|
type FromApiCtx<'a> = ();
|
||||||
type ToApiCtx<'a> = ();
|
type ToApiCtx<'a> = (SysCtx, &'a dyn ReqHandlish);
|
||||||
async fn from_api(
|
async fn from_api(
|
||||||
_: &api::Expression,
|
_: &api::Expression,
|
||||||
_: &mut Self::FromApiCtx<'_>,
|
_: &mut Self::FromApiCtx<'_>,
|
||||||
@@ -35,31 +36,33 @@ impl TokenVariant<api::Expression> for GExpr {
|
|||||||
) -> Self {
|
) -> Self {
|
||||||
panic!("Received new expression from host")
|
panic!("Received new expression from host")
|
||||||
}
|
}
|
||||||
async fn into_api(self, _: &mut Self::ToApiCtx<'_>) -> api::Expression { self.serialize().await }
|
async fn into_api(self, (ctx, hand): &mut Self::ToApiCtx<'_>) -> api::Expression {
|
||||||
|
self.api_return(ctx.clone(), hand).await
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenVariant<api::ExprTicket> for Expr {
|
impl TokenVariant<api::ExprTicket> for Expr {
|
||||||
type FromApiCtx<'a> = &'a BorrowedExprStore;
|
type FromApiCtx<'a> = (SysCtx, &'a BorrowedExprStore);
|
||||||
async fn from_api(
|
async fn from_api(
|
||||||
api: &api::ExprTicket,
|
api: &api::ExprTicket,
|
||||||
exprs: &mut Self::FromApiCtx<'_>,
|
(ctx, exprs): &mut Self::FromApiCtx<'_>,
|
||||||
_: SrcRange,
|
_: SrcRange,
|
||||||
_: &Interner,
|
_: &Interner,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
// SAFETY: receiving trees from sublexers implies borrowing
|
// SAFETY: receiving trees from sublexers implies borrowing
|
||||||
Expr::from_handle(ExprHandle::borrowed(*api, exprs))
|
Expr::from_handle(ExprHandle::borrowed(ctx.clone(), *api, exprs))
|
||||||
}
|
}
|
||||||
type ToApiCtx<'a> = ();
|
type ToApiCtx<'a> = ();
|
||||||
async fn into_api(self, (): &mut Self::ToApiCtx<'_>) -> api::ExprTicket { self.handle().ticket() }
|
async fn into_api(self, (): &mut Self::ToApiCtx<'_>) -> api::ExprTicket { self.handle().tk }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn x_tok(x: impl ToExpr) -> GenTok { GenTok::NewExpr(x.to_gen().await) }
|
pub async fn x_tok(x: impl ToExpr) -> GenTok { GenTok::NewExpr(x.to_expr().await) }
|
||||||
pub async fn ref_tok(path: Sym) -> GenTok { GenTok::NewExpr(sym_ref(path)) }
|
pub async fn ref_tok(path: Sym) -> GenTok { GenTok::NewExpr(sym_ref(path)) }
|
||||||
|
|
||||||
pub fn lazy(
|
pub fn lazy(
|
||||||
public: bool,
|
public: bool,
|
||||||
name: &str,
|
name: &str,
|
||||||
cb: impl AsyncFnOnce(Sym) -> MemKind + Clone + 'static,
|
cb: impl AsyncFnOnce(Sym, SysCtx) -> MemKind + Clone + 'static,
|
||||||
) -> Vec<GenMember> {
|
) -> Vec<GenMember> {
|
||||||
vec![GenMember {
|
vec![GenMember {
|
||||||
name: name.to_string(),
|
name: name.to_string(),
|
||||||
@@ -69,7 +72,7 @@ pub fn lazy(
|
|||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
pub fn cnst(public: bool, name: &str, value: impl ToExpr + Clone + 'static) -> Vec<GenMember> {
|
pub fn cnst(public: bool, name: &str, value: impl ToExpr + Clone + 'static) -> Vec<GenMember> {
|
||||||
lazy(public, name, async |_| MemKind::Const(value.to_gen().await))
|
lazy(public, name, async |_, _| MemKind::Const(value.to_expr().await))
|
||||||
}
|
}
|
||||||
pub fn module(
|
pub fn module(
|
||||||
public: bool,
|
public: bool,
|
||||||
@@ -84,8 +87,8 @@ pub fn root_mod(name: &str, mems: impl IntoIterator<Item = Vec<GenMember>>) -> (
|
|||||||
(name.to_string(), kind)
|
(name.to_string(), kind)
|
||||||
}
|
}
|
||||||
pub fn fun<I, O>(public: bool, name: &str, xf: impl ExprFunc<I, O>) -> Vec<GenMember> {
|
pub fn fun<I, O>(public: bool, name: &str, xf: impl ExprFunc<I, O>) -> Vec<GenMember> {
|
||||||
let fac = LazyMemberFactory::new(async move |sym| {
|
let fac = LazyMemberFactory::new(async move |sym, ctx| {
|
||||||
MemKind::Const(Fun::new(sym, xf).await.to_gen().await)
|
MemKind::Const(Fun::new(sym, ctx, xf).await.to_expr().await)
|
||||||
});
|
});
|
||||||
vec![GenMember { name: name.to_string(), kind: MemKind::Lazy(fac), public, comments: vec![] }]
|
vec![GenMember { name: name.to_string(), kind: MemKind::Lazy(fac), public, comments: vec![] }]
|
||||||
}
|
}
|
||||||
@@ -147,14 +150,14 @@ pub fn merge_trivial(trees: impl IntoIterator<Item = Vec<GenMember>>) -> Vec<Gen
|
|||||||
|
|
||||||
trait_set! {
|
trait_set! {
|
||||||
trait LazyMemberCallback =
|
trait LazyMemberCallback =
|
||||||
FnOnce(Sym) -> LocalBoxFuture<'static, MemKind> + DynClone
|
FnOnce(Sym, SysCtx) -> LocalBoxFuture<'static, MemKind> + DynClone
|
||||||
}
|
}
|
||||||
pub struct LazyMemberFactory(Box<dyn LazyMemberCallback>);
|
pub struct LazyMemberFactory(Box<dyn LazyMemberCallback>);
|
||||||
impl LazyMemberFactory {
|
impl LazyMemberFactory {
|
||||||
pub fn new(cb: impl AsyncFnOnce(Sym) -> MemKind + Clone + 'static) -> Self {
|
pub fn new(cb: impl AsyncFnOnce(Sym, SysCtx) -> MemKind + Clone + 'static) -> Self {
|
||||||
Self(Box::new(|s| cb(s).boxed_local()))
|
Self(Box::new(|s, ctx| cb(s, ctx).boxed_local()))
|
||||||
}
|
}
|
||||||
pub async fn build(self, path: Sym) -> MemKind { (self.0)(path).await }
|
pub async fn build(self, path: Sym, ctx: SysCtx) -> MemKind { (self.0)(path, ctx).await }
|
||||||
}
|
}
|
||||||
impl Clone for LazyMemberFactory {
|
impl Clone for LazyMemberFactory {
|
||||||
fn clone(&self) -> Self { Self(clone_box(&*self.0)) }
|
fn clone(&self) -> Self { Self(clone_box(&*self.0)) }
|
||||||
@@ -167,10 +170,11 @@ pub struct GenMember {
|
|||||||
pub comments: Vec<String>,
|
pub comments: Vec<String>,
|
||||||
}
|
}
|
||||||
impl GenMember {
|
impl GenMember {
|
||||||
pub async fn into_api(self, tia_cx: &mut impl TreeIntoApiCtx) -> api::Member {
|
pub async fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::Member {
|
||||||
let name = i().i::<String>(&self.name).await;
|
let name = ctx.sys().i().i::<String>(&self.name).await;
|
||||||
let kind = self.kind.into_api(&mut tia_cx.push_path(name.clone())).await;
|
let kind = self.kind.into_api(&mut ctx.push_path(name.clone())).await;
|
||||||
let comments = join_all(self.comments.iter().map(async |cmt| i().i(cmt).await.to_api())).await;
|
let comments =
|
||||||
|
join_all(self.comments.iter().map(|cmt| async { ctx.sys().i().i(cmt).await.to_api() })).await;
|
||||||
api::Member { kind, name: name.to_api(), comments, exported: self.public }
|
api::Member { kind, name: name.to_api(), comments, exported: self.public }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -184,7 +188,7 @@ impl MemKind {
|
|||||||
pub async fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::MemberKind {
|
pub async fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::MemberKind {
|
||||||
match self {
|
match self {
|
||||||
Self::Lazy(lazy) => api::MemberKind::Lazy(ctx.with_lazy(lazy)),
|
Self::Lazy(lazy) => api::MemberKind::Lazy(ctx.with_lazy(lazy)),
|
||||||
Self::Const(c) => api::MemberKind::Const(c.serialize().await),
|
Self::Const(c) => api::MemberKind::Const(c.api_return(ctx.sys(), ctx.req()).await),
|
||||||
Self::Mod { members } => api::MemberKind::Module(api::Module {
|
Self::Mod { members } => api::MemberKind::Module(api::Module {
|
||||||
members: stream(async |mut cx| {
|
members: stream(async |mut cx| {
|
||||||
for m in members {
|
for m in members {
|
||||||
@@ -200,20 +204,27 @@ impl MemKind {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub trait TreeIntoApiCtx {
|
pub trait TreeIntoApiCtx {
|
||||||
|
fn sys(&self) -> SysCtx;
|
||||||
fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId;
|
fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId;
|
||||||
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx;
|
fn push_path(&mut self, seg: IStr) -> impl TreeIntoApiCtx;
|
||||||
|
fn req(&self) -> &impl ReqHandlish;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct TreeIntoApiCtxImpl<'a, 'b> {
|
pub struct TreeIntoApiCtxImpl<'a, 'b, RH: ReqHandlish> {
|
||||||
pub basepath: &'a [Tok<String>],
|
pub sys: SysCtx,
|
||||||
pub path: Substack<'a, Tok<String>>,
|
pub basepath: &'a [IStr],
|
||||||
|
pub path: Substack<'a, IStr>,
|
||||||
pub lazy_members: &'b mut HashMap<api::TreeId, MemberRecord>,
|
pub lazy_members: &'b mut HashMap<api::TreeId, MemberRecord>,
|
||||||
|
pub req: &'a RH,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TreeIntoApiCtx for TreeIntoApiCtxImpl<'_, '_> {
|
impl<RH: ReqHandlish> TreeIntoApiCtx for TreeIntoApiCtxImpl<'_, '_, RH> {
|
||||||
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx {
|
fn sys(&self) -> SysCtx { self.sys.clone() }
|
||||||
|
fn push_path(&mut self, seg: IStr) -> impl TreeIntoApiCtx {
|
||||||
TreeIntoApiCtxImpl {
|
TreeIntoApiCtxImpl {
|
||||||
|
req: self.req,
|
||||||
lazy_members: self.lazy_members,
|
lazy_members: self.lazy_members,
|
||||||
|
sys: self.sys.clone(),
|
||||||
basepath: self.basepath,
|
basepath: self.basepath,
|
||||||
path: self.path.push(seg),
|
path: self.path.push(seg),
|
||||||
}
|
}
|
||||||
@@ -224,4 +235,5 @@ impl TreeIntoApiCtx for TreeIntoApiCtxImpl<'_, '_> {
|
|||||||
self.lazy_members.insert(id, MemberRecord::Gen(path, fac));
|
self.lazy_members.insert(id, MemberRecord::Gen(path, fac));
|
||||||
id
|
id
|
||||||
}
|
}
|
||||||
|
fn req(&self) -> &impl ReqHandlish { self.req }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,12 +7,12 @@ edition = "2024"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
|
async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
|
||||||
|
async-lock = "3.4.1"
|
||||||
async-once-cell = "0.5.4"
|
async-once-cell = "0.5.4"
|
||||||
async-process = "2.4.0"
|
async-process = "2.4.0"
|
||||||
bound = "0.6.0"
|
bound = "0.6.0"
|
||||||
derive_destructure = "1.0.0"
|
derive_destructure = "1.0.0"
|
||||||
futures = { version = "0.3.31", features = ["std"], default-features = false }
|
futures = { version = "0.3.31", features = ["std"], default-features = false }
|
||||||
futures-locks = "0.7.1"
|
|
||||||
hashbrown = "0.16.0"
|
hashbrown = "0.16.0"
|
||||||
itertools = "0.14.0"
|
itertools = "0.14.0"
|
||||||
lazy_static = "1.5.0"
|
lazy_static = "1.5.0"
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::rc::{Rc, Weak};
|
use std::rc::{Rc, Weak};
|
||||||
|
|
||||||
use async_once_cell::OnceCell;
|
use async_lock::OnceCell;
|
||||||
use derive_destructure::destructure;
|
use derive_destructure::destructure;
|
||||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, take_first_fmt};
|
use orchid_base::format::{FmtCtx, FmtUnit, Format, take_first_fmt};
|
||||||
use orchid_base::location::Pos;
|
use orchid_base::location::Pos;
|
||||||
@@ -10,7 +10,7 @@ use orchid_base::tree::AtomRepr;
|
|||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::ctx::Ctx;
|
use crate::ctx::Ctx;
|
||||||
use crate::expr::{Expr, PathSetBuilder};
|
use crate::expr::{Expr, ExprParseCtx, PathSetBuilder};
|
||||||
use crate::extension::Extension;
|
use crate::extension::Extension;
|
||||||
use crate::system::System;
|
use crate::system::System;
|
||||||
|
|
||||||
@@ -25,11 +25,11 @@ impl AtomData {
|
|||||||
#[must_use]
|
#[must_use]
|
||||||
fn api(self) -> api::Atom {
|
fn api(self) -> api::Atom {
|
||||||
let (owner, drop, data, _display) = self.destructure();
|
let (owner, drop, data, _display) = self.destructure();
|
||||||
api::Atom { data: api::AtomData(data), drop, owner: owner.id() }
|
api::Atom { data, drop, owner: owner.id() }
|
||||||
}
|
}
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn api_ref(&self) -> api::Atom {
|
fn api_ref(&self) -> api::Atom {
|
||||||
api::Atom { data: api::AtomData(self.data.clone()), drop: self.drop, owner: self.owner.id() }
|
api::Atom { data: self.data.clone(), drop: self.drop, owner: self.owner.id() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Drop for AtomData {
|
impl Drop for AtomData {
|
||||||
@@ -58,22 +58,22 @@ impl AtomHand {
|
|||||||
#[must_use]
|
#[must_use]
|
||||||
pub async fn call(self, arg: Expr) -> Expr {
|
pub async fn call(self, arg: Expr) -> Expr {
|
||||||
let owner_sys = self.0.owner.clone();
|
let owner_sys = self.0.owner.clone();
|
||||||
let ctx = owner_sys.ctx();
|
|
||||||
let reqnot = owner_sys.reqnot();
|
let reqnot = owner_sys.reqnot();
|
||||||
ctx.exprs.give_expr(arg.clone());
|
owner_sys.ext().exprs().give_expr(arg.clone());
|
||||||
let ret = match Rc::try_unwrap(self.0) {
|
let ret = match Rc::try_unwrap(self.0) {
|
||||||
Ok(data) => reqnot.request(api::FinalCall(data.api(), arg.id())).await,
|
Ok(data) => reqnot.request(api::FinalCall(data.api(), arg.id())).await,
|
||||||
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), arg.id())).await,
|
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), arg.id())).await,
|
||||||
};
|
};
|
||||||
let val = Expr::from_api(&ret, PathSetBuilder::new(), ctx.clone()).await;
|
let mut parse_ctx = ExprParseCtx { ctx: owner_sys.ctx(), exprs: owner_sys.ext().exprs() };
|
||||||
ctx.exprs.take_expr(arg.id());
|
let val = Expr::from_api(&ret, PathSetBuilder::new(), &mut parse_ctx).await;
|
||||||
|
owner_sys.ext().exprs().take_expr(arg.id());
|
||||||
val
|
val
|
||||||
}
|
}
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn sys(&self) -> &System { &self.0.owner }
|
pub fn sys(&self) -> &System { &self.0.owner }
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn ext(&self) -> &Extension { self.sys().ext() }
|
pub fn ext(&self) -> &Extension { self.sys().ext() }
|
||||||
pub async fn req(&self, key: api::TStrv, req: Vec<u8>) -> Option<Vec<u8>> {
|
pub async fn req(&self, key: api::TVec, req: Vec<u8>) -> Option<Vec<u8>> {
|
||||||
self.0.owner.reqnot().request(api::Fwded(self.0.api_ref(), key, req)).await
|
self.0.owner.reqnot().request(api::Fwded(self.0.api_ref(), key, req)).await
|
||||||
}
|
}
|
||||||
#[must_use]
|
#[must_use]
|
||||||
@@ -85,7 +85,7 @@ impl AtomHand {
|
|||||||
}
|
}
|
||||||
impl Format for AtomHand {
|
impl Format for AtomHand {
|
||||||
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||||
(self.0.display.get_or_init(async {
|
(self.0.display.get_or_init(|| async {
|
||||||
FmtUnit::from_api(&self.0.owner.reqnot().request(api::AtomPrint(self.0.api_ref())).await)
|
FmtUnit::from_api(&self.0.owner.reqnot().request(api::AtomPrint(self.0.api_ref())).await)
|
||||||
}))
|
}))
|
||||||
.await
|
.await
|
||||||
@@ -97,11 +97,7 @@ impl AtomRepr for AtomHand {
|
|||||||
async fn from_api(atom: &api::Atom, _: Pos, ctx: &mut Self::Ctx) -> Self {
|
async fn from_api(atom: &api::Atom, _: Pos, ctx: &mut Self::Ctx) -> Self {
|
||||||
let api::Atom { data, drop, owner } = atom.clone();
|
let api::Atom { data, drop, owner } = atom.clone();
|
||||||
let sys = ctx.system_inst(owner).await.expect("Dropped system created atom");
|
let sys = ctx.system_inst(owner).await.expect("Dropped system created atom");
|
||||||
if let Some(id) = drop {
|
if let Some(id) = drop { sys.new_atom(data, id).await } else { AtomHand::new(data, sys, drop) }
|
||||||
sys.new_atom(data.0, id).await
|
|
||||||
} else {
|
|
||||||
AtomHand::new(data.0, sys, drop)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
async fn to_api(&self) -> orchid_api::Atom { self.api_ref() }
|
async fn to_api(&self) -> orchid_api::Atom { self.api_ref() }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ use std::num::{NonZero, NonZeroU16};
|
|||||||
use std::rc::{Rc, Weak};
|
use std::rc::{Rc, Weak};
|
||||||
use std::{fmt, ops};
|
use std::{fmt, ops};
|
||||||
|
|
||||||
use futures_locks::RwLock;
|
use async_lock::RwLock;
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use orchid_base::builtin::Spawner;
|
use orchid_base::builtin::Spawner;
|
||||||
use orchid_base::interner::Interner;
|
use orchid_base::interner::Interner;
|
||||||
@@ -18,7 +18,7 @@ pub struct CtxData {
|
|||||||
pub spawn: Spawner,
|
pub spawn: Spawner,
|
||||||
pub systems: RwLock<HashMap<api::SysId, WeakSystem>>,
|
pub systems: RwLock<HashMap<api::SysId, WeakSystem>>,
|
||||||
pub system_id: RefCell<NonZeroU16>,
|
pub system_id: RefCell<NonZeroU16>,
|
||||||
pub exprs: ExprStore,
|
pub common_exprs: ExprStore,
|
||||||
pub root: RwLock<WeakRoot>,
|
pub root: RwLock<WeakRoot>,
|
||||||
}
|
}
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@@ -43,7 +43,7 @@ impl Ctx {
|
|||||||
i: Interner::default(),
|
i: Interner::default(),
|
||||||
systems: RwLock::default(),
|
systems: RwLock::default(),
|
||||||
system_id: RefCell::new(NonZero::new(1).unwrap()),
|
system_id: RefCell::new(NonZero::new(1).unwrap()),
|
||||||
exprs: ExprStore::default(),
|
common_exprs: ExprStore::default(),
|
||||||
root: RwLock::default(),
|
root: RwLock::default(),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
use hashbrown::HashSet;
|
use hashbrown::HashSet;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_base::error::{OrcErrv, OrcRes, Reporter, mk_errv};
|
use orchid_base::error::{OrcErrv, OrcRes, Reporter, mk_err};
|
||||||
use orchid_base::interner::{Interner, Tok};
|
use orchid_base::interner::{Interner, Tok};
|
||||||
use orchid_base::location::Pos;
|
use orchid_base::location::Pos;
|
||||||
use orchid_base::name::VName;
|
use orchid_base::name::VName;
|
||||||
@@ -30,7 +30,7 @@ impl AbsPathError {
|
|||||||
format!("{path} is leading outside the root."),
|
format!("{path} is leading outside the root."),
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
mk_errv(descr, msg, [pos])
|
mk_err(descr, msg, [pos])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -42,21 +42,19 @@ impl AbsPathError {
|
|||||||
/// if the relative path contains as many or more `super` segments than the
|
/// if the relative path contains as many or more `super` segments than the
|
||||||
/// length of the absolute path.
|
/// length of the absolute path.
|
||||||
pub async fn absolute_path(
|
pub async fn absolute_path(
|
||||||
mut cwd: &[Tok<String>],
|
mut cwd: &[IStr],
|
||||||
mut rel: &[Tok<String>],
|
mut rel: &[IStr],
|
||||||
i: &Interner,
|
i: &Interner,
|
||||||
) -> Result<VName, AbsPathError> {
|
) -> Result<VName, AbsPathError> {
|
||||||
let i_self = i.i("self").await;
|
let i_self = i.i("self").await;
|
||||||
let i_super = i.i("super").await;
|
let i_super = i.i("super").await;
|
||||||
let mut relative = false;
|
let relative = rel.first().is_some_and(|s| *s != i_self && *s != i_super);
|
||||||
if let Some((_, tail)) = rel.split_first().filter(|(h, _)| **h == i_self) {
|
if let Some((_, tail)) = rel.split_first().filter(|(h, _)| **h != i_self) {
|
||||||
rel = tail;
|
rel = tail;
|
||||||
relative = true;
|
|
||||||
} else {
|
} else {
|
||||||
while let Some((_, tail)) = rel.split_first().filter(|(h, _)| **h == i_super) {
|
while let Some((_, tail)) = rel.split_first().filter(|(h, _)| **h == i_super) {
|
||||||
cwd = cwd.split_last().ok_or(AbsPathError::TooManySupers)?.1;
|
cwd = cwd.split_last().ok_or(AbsPathError::TooManySupers)?.1;
|
||||||
rel = tail;
|
rel = tail;
|
||||||
relative = true;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if relative { VName::new(cwd.iter().chain(rel).cloned()) } else { VName::new(rel.to_vec()) }
|
if relative { VName::new(cwd.iter().chain(rel).cloned()) } else { VName::new(rel.to_vec()) }
|
||||||
@@ -69,13 +67,13 @@ pub struct DealiasCtx<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn resolv_glob<Mod: Tree>(
|
pub async fn resolv_glob<Mod: Tree>(
|
||||||
cwd: &[Tok<String>],
|
cwd: &[IStr],
|
||||||
root: &Mod,
|
root: &Mod,
|
||||||
abs_path: &[Tok<String>],
|
abs_path: &[IStr],
|
||||||
pos: Pos,
|
pos: Pos,
|
||||||
i: &Interner,
|
i: &Interner,
|
||||||
ctx: &mut Mod::Ctx<'_>,
|
ctx: &mut Mod::Ctx<'_>,
|
||||||
) -> OrcRes<HashSet<Tok<String>>> {
|
) -> OrcRes<HashSet<IStr>> {
|
||||||
let coprefix_len = cwd.iter().zip(abs_path).take_while(|(a, b)| a == b).count();
|
let coprefix_len = cwd.iter().zip(abs_path).take_while(|(a, b)| a == b).count();
|
||||||
let (co_prefix, diff_path) = abs_path.split_at(abs_path.len().min(coprefix_len + 1));
|
let (co_prefix, diff_path) = abs_path.split_at(abs_path.len().min(coprefix_len + 1));
|
||||||
let fst_diff =
|
let fst_diff =
|
||||||
@@ -89,7 +87,7 @@ pub async fn resolv_glob<Mod: Tree>(
|
|||||||
ChildErrorKind::Missing => ("Invalid import path", format!("{path} not found")),
|
ChildErrorKind::Missing => ("Invalid import path", format!("{path} not found")),
|
||||||
ChildErrorKind::Private => ("Import inaccessible", format!("{path} is private")),
|
ChildErrorKind::Private => ("Import inaccessible", format!("{path} is private")),
|
||||||
};
|
};
|
||||||
return Err(mk_errv(i.i(tk).await, msg, [pos]));
|
return Err(mk_err(i.i(tk).await, msg, [pos]));
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
Ok(target_module.children(coprefix_len < abs_path.len()))
|
Ok(target_module.children(coprefix_len < abs_path.len()))
|
||||||
@@ -100,11 +98,11 @@ pub type ChildResult<'a, T> = Result<&'a T, ChildErrorKind>;
|
|||||||
pub trait Tree {
|
pub trait Tree {
|
||||||
type Ctx<'a>;
|
type Ctx<'a>;
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn children(&self, public_only: bool) -> HashSet<Tok<String>>;
|
fn children(&self, public_only: bool) -> HashSet<IStr>;
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn child(
|
fn child(
|
||||||
&self,
|
&self,
|
||||||
key: Tok<String>,
|
key: IStr,
|
||||||
public_only: bool,
|
public_only: bool,
|
||||||
ctx: &mut Self::Ctx<'_>,
|
ctx: &mut Self::Ctx<'_>,
|
||||||
) -> impl Future<Output = ChildResult<'_, Self>>;
|
) -> impl Future<Output = ChildResult<'_, Self>>;
|
||||||
@@ -135,7 +133,7 @@ pub struct ChildError {
|
|||||||
pub async fn walk<'a, T: Tree>(
|
pub async fn walk<'a, T: Tree>(
|
||||||
root: &'a T,
|
root: &'a T,
|
||||||
public_only: bool,
|
public_only: bool,
|
||||||
path: impl IntoIterator<Item = Tok<String>>,
|
path: impl IntoIterator<Item = IStr>,
|
||||||
ctx: &mut T::Ctx<'_>,
|
ctx: &mut T::Ctx<'_>,
|
||||||
) -> Result<&'a T, ChildError> {
|
) -> Result<&'a T, ChildError> {
|
||||||
let mut cur = root;
|
let mut cur = root;
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
|
use async_lock::RwLockWriteGuard;
|
||||||
use bound::Bound;
|
use bound::Bound;
|
||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
use futures_locks::{RwLockWriteGuard, TryLockError};
|
|
||||||
use orchid_base::error::OrcErrv;
|
use orchid_base::error::OrcErrv;
|
||||||
use orchid_base::format::{FmtCtxImpl, Format, take_first};
|
use orchid_base::format::{FmtCtxImpl, Format, take_first};
|
||||||
use orchid_base::location::Pos;
|
use orchid_base::location::Pos;
|
||||||
@@ -12,7 +12,7 @@ use crate::ctx::Ctx;
|
|||||||
use crate::expr::{Expr, ExprKind, PathSet, Step};
|
use crate::expr::{Expr, ExprKind, PathSet, Step};
|
||||||
use crate::tree::Root;
|
use crate::tree::Root;
|
||||||
|
|
||||||
type ExprGuard = Bound<RwLockWriteGuard<ExprKind>, Expr>;
|
type ExprGuard = Bound<RwLockWriteGuard<'static, ExprKind>, Expr>;
|
||||||
|
|
||||||
/// The stack operation associated with a transform
|
/// The stack operation associated with a transform
|
||||||
enum StackOp {
|
enum StackOp {
|
||||||
@@ -76,13 +76,13 @@ impl ExecCtx {
|
|||||||
#[must_use]
|
#[must_use]
|
||||||
pub async fn unpack_ident(&self, ex: &Expr) -> Expr {
|
pub async fn unpack_ident(&self, ex: &Expr) -> Expr {
|
||||||
match ex.kind().try_write().as_deref_mut() {
|
match ex.kind().try_write().as_deref_mut() {
|
||||||
Ok(ExprKind::Identity(ex)) => {
|
Some(ExprKind::Identity(ex)) => {
|
||||||
let val = self.unpack_ident(ex).boxed_local().await;
|
let val = self.unpack_ident(ex).boxed_local().await;
|
||||||
*ex = val.clone();
|
*ex = val.clone();
|
||||||
val
|
val
|
||||||
},
|
},
|
||||||
Ok(_) => ex.clone(),
|
Some(_) => ex.clone(),
|
||||||
Err(TryLockError) => panic!("Cycle encountered!"),
|
None => panic!("Cycle encountered!"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub async fn execute(&mut self) {
|
pub async fn execute(&mut self) {
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ use std::num::NonZeroU64;
|
|||||||
use std::rc::{Rc, Weak};
|
use std::rc::{Rc, Weak};
|
||||||
use std::{fmt, mem};
|
use std::{fmt, mem};
|
||||||
|
|
||||||
|
use async_lock::RwLock;
|
||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
use futures_locks::RwLock;
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_base::error::OrcErrv;
|
use orchid_base::error::OrcErrv;
|
||||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||||
@@ -21,6 +21,12 @@ use crate::atom::AtomHand;
|
|||||||
use crate::ctx::Ctx;
|
use crate::ctx::Ctx;
|
||||||
use crate::expr_store::ExprStore;
|
use crate::expr_store::ExprStore;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ExprParseCtx<'a> {
|
||||||
|
pub ctx: &'a Ctx,
|
||||||
|
pub exprs: &'a ExprStore,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct ExprData {
|
pub struct ExprData {
|
||||||
pos: Pos,
|
pos: Pos,
|
||||||
@@ -35,9 +41,9 @@ impl Expr {
|
|||||||
pub async fn try_into_owned_atom(self) -> Result<AtomHand, Self> {
|
pub async fn try_into_owned_atom(self) -> Result<AtomHand, Self> {
|
||||||
match Rc::try_unwrap(self.0) {
|
match Rc::try_unwrap(self.0) {
|
||||||
Err(e) => Err(Self(e)),
|
Err(e) => Err(Self(e)),
|
||||||
Ok(data) => match data.kind.try_unwrap().expect("This fields shouldn't be copied") {
|
Ok(data) => match data.kind.into_inner() {
|
||||||
ExprKind::Atom(a) => Ok(a),
|
ExprKind::Atom(a) => Ok(a),
|
||||||
inner => Err(Self(Rc::new(ExprData { kind: RwLock::new(inner), pos: data.pos }))),
|
inner => Err(Self(Rc::new(ExprData { kind: inner.into(), pos: data.pos }))),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -55,34 +61,39 @@ impl Expr {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub async fn from_api(api: &api::Expression, psb: PathSetBuilder<'_, u64>, ctx: Ctx) -> Self {
|
pub async fn from_api(
|
||||||
let pos = Pos::from_api(&api.location, &ctx.i).await;
|
api: &api::Expression,
|
||||||
|
psb: PathSetBuilder<'_, u64>,
|
||||||
|
ctx: &mut ExprParseCtx<'_>,
|
||||||
|
) -> Self {
|
||||||
|
let pos = Pos::from_api(&api.location, &ctx.ctx.i).await;
|
||||||
let kind = match &api.kind {
|
let kind = match &api.kind {
|
||||||
api::ExpressionKind::Arg(n) => {
|
api::ExpressionKind::Arg(n) => {
|
||||||
assert!(psb.register_arg(n), "Arguments must be enclosed in a matching lambda");
|
assert!(psb.register_arg(n), "Arguments must be enclosed in a matching lambda");
|
||||||
ExprKind::Arg
|
ExprKind::Arg
|
||||||
},
|
},
|
||||||
api::ExpressionKind::Bottom(bot) => ExprKind::Bottom(OrcErrv::from_api(bot, &ctx.i).await),
|
api::ExpressionKind::Bottom(bot) =>
|
||||||
|
ExprKind::Bottom(OrcErrv::from_api(bot, &ctx.ctx.i).await),
|
||||||
api::ExpressionKind::Call(f, x) => {
|
api::ExpressionKind::Call(f, x) => {
|
||||||
let (lpsb, rpsb) = psb.split();
|
let (lpsb, rpsb) = psb.split();
|
||||||
ExprKind::Call(
|
ExprKind::Call(
|
||||||
Expr::from_api(f, lpsb, ctx.clone()).boxed_local().await,
|
Expr::from_api(f, lpsb, ctx).boxed_local().await,
|
||||||
Expr::from_api(x, rpsb, ctx).boxed_local().await,
|
Expr::from_api(x, rpsb, ctx).boxed_local().await,
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
api::ExpressionKind::Const(name) => ExprKind::Const(Sym::from_api(*name, &ctx.i).await),
|
api::ExpressionKind::Const(name) => ExprKind::Const(Sym::from_api(*name, &ctx.ctx.i).await),
|
||||||
api::ExpressionKind::Lambda(x, body) => {
|
api::ExpressionKind::Lambda(x, body) => {
|
||||||
let lbuilder = psb.lambda(x);
|
let lbuilder = psb.lambda(x);
|
||||||
let body = Expr::from_api(body, lbuilder.stack(), ctx).boxed_local().await;
|
let body = Expr::from_api(body, lbuilder.stack(), ctx).boxed_local().await;
|
||||||
ExprKind::Lambda(lbuilder.collect(), body)
|
ExprKind::Lambda(lbuilder.collect(), body)
|
||||||
},
|
},
|
||||||
api::ExpressionKind::NewAtom(a) =>
|
api::ExpressionKind::NewAtom(a) =>
|
||||||
ExprKind::Atom(AtomHand::from_api(a, pos.clone(), &mut ctx.clone()).await),
|
ExprKind::Atom(AtomHand::from_api(a, pos.clone(), &mut ctx.ctx.clone()).await),
|
||||||
api::ExpressionKind::Slot(tk) => return ctx.exprs.take_expr(*tk).expect("Invalid slot"),
|
api::ExpressionKind::Slot(tk) => return ctx.exprs.get_expr(*tk).expect("Invalid slot"),
|
||||||
api::ExpressionKind::Seq(a, b) => {
|
api::ExpressionKind::Seq(a, b) => {
|
||||||
let (apsb, bpsb) = psb.split();
|
let (apsb, bpsb) = psb.split();
|
||||||
ExprKind::Seq(
|
ExprKind::Seq(
|
||||||
Expr::from_api(a, apsb, ctx.clone()).boxed_local().await,
|
Expr::from_api(a, apsb, ctx).boxed_local().await,
|
||||||
Expr::from_api(b, bpsb, ctx).boxed_local().await,
|
Expr::from_api(b, bpsb, ctx).boxed_local().await,
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
@@ -155,8 +166,8 @@ async fn print_exprkind<'a>(
|
|||||||
ExprKind::Bottom(e) if e.len() == 1 => format!("Bottom({e})").into(),
|
ExprKind::Bottom(e) if e.len() == 1 => format!("Bottom({e})").into(),
|
||||||
ExprKind::Bottom(e) => format!("Bottom(\n\t{}\n)", indent(&e.to_string())).into(),
|
ExprKind::Bottom(e) => format!("Bottom(\n\t{}\n)", indent(&e.to_string())).into(),
|
||||||
ExprKind::Call(f, x) => tl_cache!(Rc<Variants>: Rc::new(Variants::default()
|
ExprKind::Call(f, x) => tl_cache!(Rc<Variants>: Rc::new(Variants::default()
|
||||||
.unbounded("{0b} {1l}")
|
.unbounded("{0} {1l}")
|
||||||
.bounded("({0b} {1})")))
|
.bounded("({0} {1b})")))
|
||||||
.units([print_expr(f, c, visited).await, print_expr(x, c, visited).await]),
|
.units([print_expr(f, c, visited).await, print_expr(x, c, visited).await]),
|
||||||
ExprKind::Identity(id) =>
|
ExprKind::Identity(id) =>
|
||||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{{{0}}}"))).units([print_expr(
|
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{{{0}}}"))).units([print_expr(
|
||||||
@@ -166,11 +177,11 @@ async fn print_exprkind<'a>(
|
|||||||
.await]),
|
.await]),
|
||||||
ExprKind::Const(c) => format!("{c}").into(),
|
ExprKind::Const(c) => format!("{c}").into(),
|
||||||
ExprKind::Lambda(None, body) => tl_cache!(Rc<Variants>: Rc::new(Variants::default()
|
ExprKind::Lambda(None, body) => tl_cache!(Rc<Variants>: Rc::new(Variants::default()
|
||||||
// .unbounded("\\.{0l}")
|
.unbounded("\\.{0l}")
|
||||||
.bounded("(\\.{0b})")))
|
.bounded("(\\.{0b})")))
|
||||||
.units([print_expr(body, c, visited).await]),
|
.units([print_expr(body, c, visited).await]),
|
||||||
ExprKind::Lambda(Some(path), body) => tl_cache!(Rc<Variants>: Rc::new(Variants::default()
|
ExprKind::Lambda(Some(path), body) => tl_cache!(Rc<Variants>: Rc::new(Variants::default()
|
||||||
// .unbounded("\\{0b}. {1l}")
|
.unbounded("\\{0b}. {1l}")
|
||||||
.bounded("(\\{0b}. {1b})")))
|
.bounded("(\\{0b}. {1b})")))
|
||||||
.units([format!("{path}").into(), print_expr(body, c, visited).await]),
|
.units([format!("{path}").into(), print_expr(body, c, visited).await]),
|
||||||
ExprKind::Seq(l, r) =>
|
ExprKind::Seq(l, r) =>
|
||||||
@@ -347,14 +358,14 @@ impl TokenVariant<api::ExprTicket> for Expr {
|
|||||||
pub struct ExprWillPanic;
|
pub struct ExprWillPanic;
|
||||||
|
|
||||||
impl TokenVariant<api::Expression> for Expr {
|
impl TokenVariant<api::Expression> for Expr {
|
||||||
type FromApiCtx<'a> = Ctx;
|
type FromApiCtx<'a> = ExprParseCtx<'a>;
|
||||||
async fn from_api(
|
async fn from_api(
|
||||||
api: &api::Expression,
|
api: &api::Expression,
|
||||||
ctx: &mut Self::FromApiCtx<'_>,
|
ctx: &mut Self::FromApiCtx<'_>,
|
||||||
_: SrcRange,
|
_: SrcRange,
|
||||||
_: &Interner,
|
_: &Interner,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self::from_api(api, PathSetBuilder::new(), ctx.clone()).await
|
Self::from_api(api, PathSetBuilder::new(), ctx).await
|
||||||
}
|
}
|
||||||
type ToApiCtx<'a> = ExprWillPanic;
|
type ToApiCtx<'a> = ExprWillPanic;
|
||||||
async fn into_api(self, ExprWillPanic: &mut Self::ToApiCtx<'_>) -> api::Expression {
|
async fn into_api(self, ExprWillPanic: &mut Self::ToApiCtx<'_>) -> api::Expression {
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ use std::cell::RefCell;
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use bound::Bound;
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use hashbrown::hash_map::Entry;
|
use hashbrown::hash_map::Entry;
|
||||||
|
|
||||||
@@ -17,20 +16,11 @@ pub struct ExprStoreData {
|
|||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct ExprStore(Rc<ExprStoreData>);
|
pub struct ExprStore(Rc<ExprStoreData>);
|
||||||
impl ExprStore {
|
impl ExprStore {
|
||||||
/// If tracking_parent is false, get_expr can fall back to the parent if none
|
|
||||||
/// is found here.
|
|
||||||
///
|
|
||||||
/// If tracking_parent is true, get_expr can still fall back to the parent,
|
|
||||||
/// but operations on the parent can access the child exprs too until this
|
|
||||||
/// store is dropped.
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn derive(&self) -> Self {
|
pub fn derive(&self) -> Self {
|
||||||
Self(Rc::new(ExprStoreData { exprs: RefCell::default(), parent: Some(self.clone()) }))
|
Self(Rc::new(ExprStoreData { exprs: RefCell::default(), parent: Some(self.clone()) }))
|
||||||
}
|
}
|
||||||
pub fn give_expr(&self, expr: Expr) {
|
pub fn give_expr(&self, expr: Expr) {
|
||||||
if let Some(parent) = self.0.parent.as_ref() {
|
|
||||||
parent.give_expr(expr.clone())
|
|
||||||
}
|
|
||||||
match self.0.exprs.borrow_mut().entry(expr.id()) {
|
match self.0.exprs.borrow_mut().entry(expr.id()) {
|
||||||
Entry::Occupied(mut oe) => oe.get_mut().0 += 1,
|
Entry::Occupied(mut oe) => oe.get_mut().0 += 1,
|
||||||
Entry::Vacant(v) => {
|
Entry::Vacant(v) => {
|
||||||
@@ -39,11 +29,8 @@ impl ExprStore {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn take_expr(&self, ticket: api::ExprTicket) -> Option<Expr> {
|
pub fn take_expr(&self, ticket: api::ExprTicket) -> Option<Expr> {
|
||||||
if let Some(parent) = self.0.parent.as_ref() {
|
|
||||||
parent.take_expr(ticket);
|
|
||||||
}
|
|
||||||
match self.0.exprs.borrow_mut().entry(ticket) {
|
match self.0.exprs.borrow_mut().entry(ticket) {
|
||||||
Entry::Vacant(_) => panic!("Attempted to double-take expression"),
|
Entry::Vacant(_) => None,
|
||||||
Entry::Occupied(oe) if oe.get().0 == 1 => Some(oe.remove().1),
|
Entry::Occupied(oe) if oe.get().0 == 1 => Some(oe.remove().1),
|
||||||
Entry::Occupied(mut oe) => {
|
Entry::Occupied(mut oe) => {
|
||||||
oe.get_mut().0 -= 1;
|
oe.get_mut().0 -= 1;
|
||||||
@@ -56,11 +43,6 @@ impl ExprStore {
|
|||||||
(self.0.exprs.borrow().get(&ticket).map(|(_, expr)| expr.clone()))
|
(self.0.exprs.borrow().get(&ticket).map(|(_, expr)| expr.clone()))
|
||||||
.or_else(|| self.0.parent.as_ref()?.get_expr(ticket))
|
.or_else(|| self.0.parent.as_ref()?.get_expr(ticket))
|
||||||
}
|
}
|
||||||
pub fn iter(&self) -> impl Iterator<Item = (u32, Expr)> {
|
|
||||||
let r = Bound::new(self.clone(), |this| this.0.exprs.borrow());
|
|
||||||
let mut iter = Bound::new(r, |r| r.values());
|
|
||||||
std::iter::from_fn(move || iter.wrapped_mut().next().cloned())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
impl fmt::Display for ExprStore {
|
impl fmt::Display for ExprStore {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
@@ -69,17 +51,3 @@ impl fmt::Display for ExprStore {
|
|||||||
write!(f, "Store holding {rc} refs to {} exprs", r.len())
|
write!(f, "Store holding {rc} refs to {} exprs", r.len())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Drop for ExprStore {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
if 1 < Rc::strong_count(&self.0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if let Some(parent) = self.0.parent.as_ref() {
|
|
||||||
for (id, (count, _)) in self.0.exprs.borrow().iter() {
|
|
||||||
for _ in 0..*count {
|
|
||||||
parent.take_expr(*id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ use crate::api;
|
|||||||
use crate::atom::AtomHand;
|
use crate::atom::AtomHand;
|
||||||
use crate::ctx::Ctx;
|
use crate::ctx::Ctx;
|
||||||
use crate::dealias::{ChildError, ChildErrorKind, walk};
|
use crate::dealias::{ChildError, ChildErrorKind, walk};
|
||||||
use crate::expr::{Expr, PathSetBuilder};
|
use crate::expr_store::ExprStore;
|
||||||
use crate::system::SystemCtor;
|
use crate::system::SystemCtor;
|
||||||
use crate::tree::MemberKind;
|
use crate::tree::MemberKind;
|
||||||
|
|
||||||
@@ -40,12 +40,12 @@ pub struct ReqPair<R: Request>(R, Sender<R::Response>);
|
|||||||
/// upgrading fails.
|
/// upgrading fails.
|
||||||
#[derive(destructure)]
|
#[derive(destructure)]
|
||||||
pub struct ExtensionData {
|
pub struct ExtensionData {
|
||||||
name: String,
|
|
||||||
ctx: Ctx,
|
ctx: Ctx,
|
||||||
reqnot: ReqNot<api::HostMsgSet>,
|
reqnot: ReqNot<api::HostMsgSet>,
|
||||||
systems: Vec<SystemCtor>,
|
systems: Vec<SystemCtor>,
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
next_pars: RefCell<NonZeroU64>,
|
next_pars: RefCell<NonZeroU64>,
|
||||||
|
exprs: ExprStore,
|
||||||
exiting_snd: Sender<()>,
|
exiting_snd: Sender<()>,
|
||||||
lex_recur: Mutex<HashMap<api::ParsId, Sender<ReqPair<api::SubLex>>>>,
|
lex_recur: Mutex<HashMap<api::ParsId, Sender<ReqPair<api::SubLex>>>>,
|
||||||
}
|
}
|
||||||
@@ -55,7 +55,6 @@ impl Drop for ExtensionData {
|
|||||||
let mut exiting_snd = self.exiting_snd.clone();
|
let mut exiting_snd = self.exiting_snd.clone();
|
||||||
(self.ctx.spawn)(Box::pin(async move {
|
(self.ctx.spawn)(Box::pin(async move {
|
||||||
reqnot.notify(api::HostExtNotif::Exit).await;
|
reqnot.notify(api::HostExtNotif::Exit).await;
|
||||||
|
|
||||||
exiting_snd.send(()).await.unwrap()
|
exiting_snd.send(()).await.unwrap()
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@@ -68,14 +67,8 @@ impl Extension {
|
|||||||
Ok(Self(Rc::new_cyclic(|weak: &Weak<ExtensionData>| {
|
Ok(Self(Rc::new_cyclic(|weak: &Weak<ExtensionData>| {
|
||||||
let init = Rc::new(init);
|
let init = Rc::new(init);
|
||||||
let (exiting_snd, exiting_rcv) = channel::<()>(0);
|
let (exiting_snd, exiting_rcv) = channel::<()>(0);
|
||||||
(ctx.spawn)({
|
(ctx.spawn)(clone!(init, weak, ctx; Box::pin(async move {
|
||||||
clone!(init, weak, ctx);
|
let rcv_stream = stream(async |mut cx| loop { cx.emit( init.recv().await).await });
|
||||||
Box::pin(async move {
|
|
||||||
let rcv_stream = stream(async |mut cx| {
|
|
||||||
loop {
|
|
||||||
cx.emit(init.recv().await).await
|
|
||||||
}
|
|
||||||
});
|
|
||||||
let mut event_stream = pin!(stream::select(exiting_rcv.map(|()| None), rcv_stream));
|
let mut event_stream = pin!(stream::select(exiting_rcv.map(|()| None), rcv_stream));
|
||||||
while let Some(Some(msg)) = event_stream.next().await {
|
while let Some(Some(msg)) = event_stream.next().await {
|
||||||
if let Some(reqnot) = weak.upgrade().map(|rc| rc.reqnot.clone()) {
|
if let Some(reqnot) = weak.upgrade().map(|rc| rc.reqnot.clone()) {
|
||||||
@@ -85,11 +78,10 @@ impl Extension {
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})));
|
||||||
});
|
|
||||||
ExtensionData {
|
ExtensionData {
|
||||||
name: init.name.clone(),
|
|
||||||
exiting_snd,
|
exiting_snd,
|
||||||
|
exprs: ctx.common_exprs.derive(),
|
||||||
ctx: ctx.clone(),
|
ctx: ctx.clone(),
|
||||||
systems: (init.systems.iter().cloned())
|
systems: (init.systems.iter().cloned())
|
||||||
.map(|decl| SystemCtor { decl, ext: WeakExtension(weak.clone()) })
|
.map(|decl| SystemCtor { decl, ext: WeakExtension(weak.clone()) })
|
||||||
@@ -103,20 +95,21 @@ impl Extension {
|
|||||||
clone!(weak; move |notif, _| {
|
clone!(weak; move |notif, _| {
|
||||||
clone!(weak; Box::pin(async move {
|
clone!(weak; Box::pin(async move {
|
||||||
let this = Extension(weak.upgrade().unwrap());
|
let this = Extension(weak.upgrade().unwrap());
|
||||||
if !matches!(notif, api::ExtHostNotif::Log(_)) {
|
|
||||||
writeln!(this.reqnot().logger(), "Host received notif {notif:?}");
|
|
||||||
}
|
|
||||||
match notif {
|
match notif {
|
||||||
api::ExtHostNotif::ExprNotif(api::ExprNotif::Acquire(acq)) => {
|
api::ExtHostNotif::ExprNotif(api::ExprNotif::Acquire(acq)) => {
|
||||||
let target = this.0.ctx.exprs.get_expr(acq.1).expect("Invalid ticket");
|
let target = this.0.exprs.get_expr(acq.1).expect("Invalid ticket");
|
||||||
this.0.ctx.exprs.give_expr(target)
|
this.0.exprs.give_expr(target)
|
||||||
}
|
}
|
||||||
api::ExtHostNotif::ExprNotif(api::ExprNotif::Release(rel)) => {
|
api::ExtHostNotif::ExprNotif(api::ExprNotif::Release(rel)) => {
|
||||||
if this.is_own_sys(rel.0).await {
|
this.assert_own_sys(rel.0).await;
|
||||||
this.0.ctx.exprs.take_expr(rel.1);
|
this.0.exprs.take_expr(rel.1);
|
||||||
} else {
|
|
||||||
writeln!(this.reqnot().logger(), "Not our system {:?}", rel.0)
|
|
||||||
}
|
}
|
||||||
|
api::ExtHostNotif::ExprNotif(api::ExprNotif::Move(mov)) => {
|
||||||
|
this.assert_own_sys(mov.dec).await;
|
||||||
|
let recp = this.ctx().system_inst(mov.inc).await.expect("invallid recipient sys id");
|
||||||
|
let expr = this.0.exprs.get_expr(mov.expr).expect("invalid ticket");
|
||||||
|
recp.ext().0.exprs.give_expr(expr);
|
||||||
|
this.0.exprs.take_expr(mov.expr);
|
||||||
},
|
},
|
||||||
api::ExtHostNotif::Log(api::Log(str)) => this.logger().log(str),
|
api::ExtHostNotif::Log(api::Log(str)) => this.logger().log(str),
|
||||||
}
|
}
|
||||||
@@ -127,9 +120,7 @@ impl Extension {
|
|||||||
clone!(weak, ctx);
|
clone!(weak, ctx);
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let this = Self(weak.upgrade().unwrap());
|
let this = Self(weak.upgrade().unwrap());
|
||||||
if !matches!(req, api::ExtHostReq::ExtAtomPrint(_)) {
|
|
||||||
writeln!(this.reqnot().logger(), "Host received request {req:?}");
|
writeln!(this.reqnot().logger(), "Host received request {req:?}");
|
||||||
}
|
|
||||||
let i = this.ctx().i.clone();
|
let i = this.ctx().i.clone();
|
||||||
match req {
|
match req {
|
||||||
api::ExtHostReq::Ping(ping) => hand.handle(&ping, &()).await,
|
api::ExtHostReq::Ping(ping) => hand.handle(&ping, &()).await,
|
||||||
@@ -167,9 +158,10 @@ impl Extension {
|
|||||||
}
|
}
|
||||||
hand.handle(&sl, &rep_out.next().await.unwrap()).await
|
hand.handle(&sl, &rep_out.next().await.unwrap()).await
|
||||||
},
|
},
|
||||||
api::ExtHostReq::ExprReq(expr_req) => match expr_req {
|
api::ExtHostReq::ExprReq(api::ExprReq::Inspect(
|
||||||
api::ExprReq::Inspect(ins @ api::Inspect { target }) => {
|
ins @ api::Inspect { target },
|
||||||
let expr = ctx.exprs.get_expr(target).expect("Invalid ticket");
|
)) => {
|
||||||
|
let expr = this.exprs().get_expr(target).expect("Invalid ticket");
|
||||||
hand
|
hand
|
||||||
.handle(&ins, &api::Inspected {
|
.handle(&ins, &api::Inspected {
|
||||||
refcount: expr.strong_count() as u32,
|
refcount: expr.strong_count() as u32,
|
||||||
@@ -178,13 +170,6 @@ impl Extension {
|
|||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
},
|
},
|
||||||
api::ExprReq::Create(ref cre @ api::Create(ref expr)) => {
|
|
||||||
let expr = Expr::from_api(expr, PathSetBuilder::new(), ctx.clone()).await;
|
|
||||||
let expr_id = expr.id();
|
|
||||||
ctx.exprs.give_expr(expr);
|
|
||||||
hand.handle(cre, &expr_id).await
|
|
||||||
},
|
|
||||||
},
|
|
||||||
api::ExtHostReq::LsModule(ref ls @ api::LsModule(_sys, path)) => {
|
api::ExtHostReq::LsModule(ref ls @ api::LsModule(_sys, path)) => {
|
||||||
let reply: <api::LsModule as Request>::Response = 'reply: {
|
let reply: <api::LsModule as Request>::Response = 'reply: {
|
||||||
let path = i.ex(path).await;
|
let path = i.ex(path).await;
|
||||||
@@ -250,22 +235,23 @@ impl Extension {
|
|||||||
}
|
}
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
pub fn name(&self) -> &String { &self.0.name }
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { &self.0.reqnot }
|
pub(crate) fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { &self.0.reqnot }
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn ctx(&self) -> &Ctx { &self.0.ctx }
|
pub fn ctx(&self) -> &Ctx { &self.0.ctx }
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn logger(&self) -> &Logger { &self.0.logger }
|
pub fn logger(&self) -> &Logger { &self.0.logger }
|
||||||
pub fn system_ctors(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() }
|
pub fn system_ctors(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() }
|
||||||
#[must_use]
|
#[must_use]
|
||||||
|
pub fn exprs(&self) -> &ExprStore { &self.0.exprs }
|
||||||
|
#[must_use]
|
||||||
pub async fn is_own_sys(&self, id: api::SysId) -> bool {
|
pub async fn is_own_sys(&self, id: api::SysId) -> bool {
|
||||||
let Some(sys) = self.ctx().system_inst(id).await else {
|
let sys = self.ctx().system_inst(id).await.expect("invalid sender sys id");
|
||||||
writeln!(self.logger(), "Invalid system ID {id:?}");
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
Rc::ptr_eq(&self.0, &sys.ext().0)
|
Rc::ptr_eq(&self.0, &sys.ext().0)
|
||||||
}
|
}
|
||||||
|
pub async fn assert_own_sys(&self, id: api::SysId) {
|
||||||
|
assert!(self.is_own_sys(id).await, "Incoming message impersonates separate system");
|
||||||
|
}
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn next_pars(&self) -> NonZeroU64 {
|
pub fn next_pars(&self) -> NonZeroU64 {
|
||||||
let mut next_pars = self.0.next_pars.borrow_mut();
|
let mut next_pars = self.0.next_pars.borrow_mut();
|
||||||
@@ -274,7 +260,7 @@ impl Extension {
|
|||||||
}
|
}
|
||||||
pub(crate) async fn lex_req<F: Future<Output = Option<api::SubLexed>>>(
|
pub(crate) async fn lex_req<F: Future<Output = Option<api::SubLexed>>>(
|
||||||
&self,
|
&self,
|
||||||
source: Tok<String>,
|
source: IStr,
|
||||||
src: Sym,
|
src: Sym,
|
||||||
pos: u32,
|
pos: u32,
|
||||||
sys: api::SysId,
|
sys: api::SysId,
|
||||||
@@ -307,7 +293,7 @@ impl Extension {
|
|||||||
pub fn system_drop(&self, id: api::SysId) {
|
pub fn system_drop(&self, id: api::SysId) {
|
||||||
let rc = self.clone();
|
let rc = self.clone();
|
||||||
(self.ctx().spawn)(Box::pin(async move {
|
(self.ctx().spawn)(Box::pin(async move {
|
||||||
rc.reqnot().request(api::SystemDrop(id)).await;
|
rc.reqnot().notify(api::SystemDrop(id)).await;
|
||||||
rc.ctx().systems.write().await.remove(&id);
|
rc.ctx().systems.write().await.remove(&id);
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,25 +2,22 @@ use std::rc::Rc;
|
|||||||
|
|
||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
use futures::lock::Mutex;
|
use futures::lock::Mutex;
|
||||||
use orchid_base::clone;
|
use orchid_base::error::{OrcErrv, OrcRes, mk_err};
|
||||||
use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
|
|
||||||
use orchid_base::interner::Tok;
|
use orchid_base::interner::Tok;
|
||||||
use orchid_base::location::SrcRange;
|
use orchid_base::location::SrcRange;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::parse::{name_char, name_start, op_char, unrep_space};
|
use orchid_base::parse::{name_char, name_start, op_char, unrep_space};
|
||||||
use orchid_base::tokens::PARENS;
|
use orchid_base::tokens::PARENS;
|
||||||
use orchid_base::tree::recur;
|
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::ctx::Ctx;
|
use crate::ctx::Ctx;
|
||||||
use crate::expr::Expr;
|
use crate::expr::{Expr, ExprParseCtx};
|
||||||
use crate::expr_store::ExprStore;
|
|
||||||
use crate::parsed::{ParsTok, ParsTokTree, tt_to_api};
|
use crate::parsed::{ParsTok, ParsTokTree, tt_to_api};
|
||||||
use crate::system::System;
|
use crate::system::System;
|
||||||
|
|
||||||
pub struct LexCtx<'a> {
|
pub struct LexCtx<'a> {
|
||||||
pub systems: &'a [System],
|
pub systems: &'a [System],
|
||||||
pub source: &'a Tok<String>,
|
pub source: &'a IStr,
|
||||||
pub path: &'a Sym,
|
pub path: &'a Sym,
|
||||||
pub tail: &'a str,
|
pub tail: &'a str,
|
||||||
pub sub_trees: &'a mut Vec<Expr>,
|
pub sub_trees: &'a mut Vec<Expr>,
|
||||||
@@ -55,12 +52,19 @@ impl<'a> LexCtx<'a> {
|
|||||||
false
|
false
|
||||||
}
|
}
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub async fn ser_subtree(&mut self, subtree: ParsTokTree, exprs: ExprStore) -> api::TokenTree {
|
pub async fn ser_subtree(&mut self, subtree: ParsTokTree) -> api::TokenTree {
|
||||||
tt_to_api(&mut { exprs }, subtree).await
|
tt_to_api(&mut self.ctx.common_exprs.clone(), subtree).await
|
||||||
}
|
}
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub async fn des_subtree(&mut self, tree: &api::TokenTree, exprs: ExprStore) -> ParsTokTree {
|
pub async fn des_subtree(&mut self, tree: &api::TokenTree) -> ParsTokTree {
|
||||||
ParsTokTree::from_api(tree, &mut { exprs }, &mut self.ctx.clone(), self.path, &self.ctx.i).await
|
ParsTokTree::from_api(
|
||||||
|
tree,
|
||||||
|
&mut self.ctx.common_exprs.clone(),
|
||||||
|
&mut ExprParseCtx { ctx: self.ctx, exprs: &self.ctx.common_exprs },
|
||||||
|
self.path,
|
||||||
|
&self.ctx.i,
|
||||||
|
)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn strip_char(&mut self, tgt: char) -> bool {
|
pub fn strip_char(&mut self, tgt: char) -> bool {
|
||||||
@@ -101,7 +105,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
|||||||
ParsTok::NS(ctx.ctx.i.i(name).await, Box::new(body))
|
ParsTok::NS(ctx.ctx.i.i(name).await, Box::new(body))
|
||||||
} else if ctx.strip_prefix("--[") {
|
} else if ctx.strip_prefix("--[") {
|
||||||
let Some((cmt, tail)) = ctx.tail.split_once("]--") else {
|
let Some((cmt, tail)) = ctx.tail.split_once("]--") else {
|
||||||
return Err(mk_errv(
|
return Err(mk_err(
|
||||||
ctx.ctx.i.i("Unterminated block comment").await,
|
ctx.ctx.i.i("Unterminated block comment").await,
|
||||||
"This block comment has no ending ]--",
|
"This block comment has no ending ]--",
|
||||||
[SrcRange::new(start..start + 3, ctx.path)],
|
[SrcRange::new(start..start + 3, ctx.path)],
|
||||||
@@ -124,7 +128,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
|||||||
ctx.trim_ws();
|
ctx.trim_ws();
|
||||||
while !ctx.strip_char(*rp) {
|
while !ctx.strip_char(*rp) {
|
||||||
if ctx.tail.is_empty() {
|
if ctx.tail.is_empty() {
|
||||||
return Err(mk_errv(
|
return Err(mk_err(
|
||||||
ctx.ctx.i.i("unclosed paren").await,
|
ctx.ctx.i.i("unclosed paren").await,
|
||||||
format!("this {lp} has no matching {rp}"),
|
format!("this {lp} has no matching {rp}"),
|
||||||
[SrcRange::new(start..start + 1, ctx.path)],
|
[SrcRange::new(start..start + 1, ctx.path)],
|
||||||
@@ -139,26 +143,18 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
|||||||
let mut errors = Vec::new();
|
let mut errors = Vec::new();
|
||||||
if ctx.tail.starts_with(|c| sys.can_lex(c)) {
|
if ctx.tail.starts_with(|c| sys.can_lex(c)) {
|
||||||
let (source, pos, path) = (ctx.source.clone(), ctx.get_pos(), ctx.path.clone());
|
let (source, pos, path) = (ctx.source.clone(), ctx.get_pos(), ctx.path.clone());
|
||||||
let temp_store = ctx.ctx.exprs.derive();
|
|
||||||
let ctx_lck = &Mutex::new(&mut *ctx);
|
let ctx_lck = &Mutex::new(&mut *ctx);
|
||||||
let errors_lck = &Mutex::new(&mut errors);
|
let errors_lck = &Mutex::new(&mut errors);
|
||||||
let temp_store_cb = temp_store.clone();
|
|
||||||
let lx = sys
|
let lx = sys
|
||||||
.lex(source, path, pos, |pos| {
|
.lex(source, path, pos, |pos| async move {
|
||||||
clone!(temp_store_cb);
|
|
||||||
async move {
|
|
||||||
let mut ctx_g = ctx_lck.lock().await;
|
let mut ctx_g = ctx_lck.lock().await;
|
||||||
match lex_once(&mut ctx_g.push(pos)).boxed_local().await {
|
match lex_once(&mut ctx_g.push(pos)).boxed_local().await {
|
||||||
Ok(t) => Some(api::SubLexed {
|
Ok(t) => Some(api::SubLexed { pos: t.sr.end(), tree: ctx_g.ser_subtree(t).await }),
|
||||||
pos: t.sr.end(),
|
|
||||||
tree: ctx_g.ser_subtree(t, temp_store_cb.clone()).await,
|
|
||||||
}),
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
errors_lck.lock().await.push(e);
|
errors_lck.lock().await.push(e);
|
||||||
None
|
None
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
match lx {
|
match lx {
|
||||||
@@ -168,14 +164,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
|||||||
),
|
),
|
||||||
Ok(Some(lexed)) => {
|
Ok(Some(lexed)) => {
|
||||||
ctx.set_pos(lexed.pos);
|
ctx.set_pos(lexed.pos);
|
||||||
let lexed_tree = ctx.des_subtree(&lexed.expr, temp_store).await;
|
return Ok(ctx.des_subtree(&lexed.expr).await);
|
||||||
let stable_tree = recur(lexed_tree, &|tt, r| {
|
|
||||||
if let ParsTok::NewExpr(expr) = tt.tok {
|
|
||||||
return ParsTok::Handle(expr).at(tt.sr);
|
|
||||||
}
|
|
||||||
r(tt)
|
|
||||||
});
|
|
||||||
return Ok(stable_tree);
|
|
||||||
},
|
},
|
||||||
Ok(None) => match errors.into_iter().reduce(|a, b| a + b) {
|
Ok(None) => match errors.into_iter().reduce(|a, b| a + b) {
|
||||||
Some(errors) => return Err(errors),
|
Some(errors) => return Err(errors),
|
||||||
@@ -189,7 +178,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
|||||||
} else if ctx.tail.starts_with(op_char) {
|
} else if ctx.tail.starts_with(op_char) {
|
||||||
ParsTok::Name(ctx.ctx.i.i(ctx.get_start_matches(op_char)).await)
|
ParsTok::Name(ctx.ctx.i.i(ctx.get_start_matches(op_char)).await)
|
||||||
} else {
|
} else {
|
||||||
return Err(mk_errv(
|
return Err(mk_err(
|
||||||
ctx.ctx.i.i("Unrecognized character").await,
|
ctx.ctx.i.i("Unrecognized character").await,
|
||||||
"The following syntax is meaningless.",
|
"The following syntax is meaningless.",
|
||||||
[SrcRange::new(start..start + 1, ctx.path)],
|
[SrcRange::new(start..start + 1, ctx.path)],
|
||||||
@@ -199,12 +188,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
|||||||
Ok(ParsTokTree { tok, sr: SrcRange::new(start..ctx.get_pos(), ctx.path) })
|
Ok(ParsTokTree { tok, sr: SrcRange::new(start..ctx.get_pos(), ctx.path) })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn lex(
|
pub async fn lex(text: IStr, path: Sym, systems: &[System], ctx: &Ctx) -> OrcRes<Vec<ParsTokTree>> {
|
||||||
text: Tok<String>,
|
|
||||||
path: Sym,
|
|
||||||
systems: &[System],
|
|
||||||
ctx: &Ctx,
|
|
||||||
) -> OrcRes<Vec<ParsTokTree>> {
|
|
||||||
let mut sub_trees = Vec::new();
|
let mut sub_trees = Vec::new();
|
||||||
let mut ctx =
|
let mut ctx =
|
||||||
LexCtx { source: &text, sub_trees: &mut sub_trees, tail: &text[..], systems, path: &path, ctx };
|
LexCtx { source: &text, sub_trees: &mut sub_trees, tail: &text[..], systems, path: &path, ctx };
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
use futures::FutureExt;
|
use futures::future::join_all;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_base::error::{OrcRes, Reporter, mk_errv};
|
use orchid_base::error::{OrcRes, Reporter, mk_err};
|
||||||
use orchid_base::format::fmt;
|
use orchid_base::format::fmt;
|
||||||
use orchid_base::interner::{Interner, Tok};
|
use orchid_base::interner::{Interner, Tok};
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
@@ -47,23 +47,18 @@ pub trait HostParseCtx: ParseCtx {
|
|||||||
|
|
||||||
pub async fn parse_items(
|
pub async fn parse_items(
|
||||||
ctx: &impl HostParseCtx,
|
ctx: &impl HostParseCtx,
|
||||||
path: Substack<'_, Tok<String>>,
|
path: Substack<'_, IStr>,
|
||||||
items: ParsSnippet<'_>,
|
items: ParsSnippet<'_>,
|
||||||
) -> OrcRes<Vec<Item>> {
|
) -> OrcRes<Vec<Item>> {
|
||||||
let lines = line_items(ctx, items).await;
|
let lines = line_items(ctx, items).await;
|
||||||
let mut line_ok = Vec::new();
|
let line_res =
|
||||||
for Parsed { output: comments, tail } in lines {
|
join_all(lines.into_iter().map(|p| parse_item(ctx, path.clone(), p.output, p.tail))).await;
|
||||||
match parse_item(ctx, path.clone(), comments, tail).boxed_local().await {
|
Ok(line_res.into_iter().flat_map(|l| l.ok().into_iter().flatten()).collect())
|
||||||
Err(e) => ctx.rep().report(e),
|
|
||||||
Ok(l) => line_ok.extend(l),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(line_ok)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn parse_item(
|
pub async fn parse_item(
|
||||||
ctx: &impl HostParseCtx,
|
ctx: &impl HostParseCtx,
|
||||||
path: Substack<'_, Tok<String>>,
|
path: Substack<'_, IStr>,
|
||||||
comments: Vec<Comment>,
|
comments: Vec<Comment>,
|
||||||
item: ParsSnippet<'_>,
|
item: ParsSnippet<'_>,
|
||||||
) -> OrcRes<Vec<Item>> {
|
) -> OrcRes<Vec<Item>> {
|
||||||
@@ -72,7 +67,7 @@ pub async fn parse_item(
|
|||||||
n if *n == ctx.i().i("export").await => match try_pop_no_fluff(ctx, postdisc).await? {
|
n if *n == ctx.i().i("export").await => match try_pop_no_fluff(ctx, postdisc).await? {
|
||||||
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } =>
|
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } =>
|
||||||
parse_exportable_item(ctx, path, comments, true, n.clone(), tail).await,
|
parse_exportable_item(ctx, path, comments, true, n.clone(), tail).await,
|
||||||
Parsed { output, tail: _ } => Err(mk_errv(
|
Parsed { output, tail: _ } => Err(mk_err(
|
||||||
ctx.i().i("Malformed export").await,
|
ctx.i().i("Malformed export").await,
|
||||||
"`export` can either prefix other lines or list names inside ( )",
|
"`export` can either prefix other lines or list names inside ( )",
|
||||||
[output.sr()],
|
[output.sr()],
|
||||||
@@ -88,11 +83,10 @@ pub async fn parse_item(
|
|||||||
},
|
},
|
||||||
n => parse_exportable_item(ctx, path, comments, false, n.clone(), postdisc).await,
|
n => parse_exportable_item(ctx, path, comments, false, n.clone(), postdisc).await,
|
||||||
},
|
},
|
||||||
Some(_) => Err(mk_errv(
|
Some(_) =>
|
||||||
ctx.i().i("Expected a line type").await,
|
Err(mk_err(ctx.i().i("Expected a line type").await, "All lines must begin with a keyword", [
|
||||||
"All lines must begin with a keyword",
|
item.sr(),
|
||||||
[item.sr()],
|
])),
|
||||||
)),
|
|
||||||
None => unreachable!("These lines are filtered and aggregated in earlier stages"),
|
None => unreachable!("These lines are filtered and aggregated in earlier stages"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -108,10 +102,10 @@ pub async fn parse_import<'a>(
|
|||||||
|
|
||||||
pub async fn parse_exportable_item<'a>(
|
pub async fn parse_exportable_item<'a>(
|
||||||
ctx: &impl HostParseCtx,
|
ctx: &impl HostParseCtx,
|
||||||
path: Substack<'_, Tok<String>>,
|
path: Substack<'_, IStr>,
|
||||||
comments: Vec<Comment>,
|
comments: Vec<Comment>,
|
||||||
exported: bool,
|
exported: bool,
|
||||||
discr: Tok<String>,
|
discr: IStr,
|
||||||
tail: ParsSnippet<'a>,
|
tail: ParsSnippet<'a>,
|
||||||
) -> OrcRes<Vec<Item>> {
|
) -> OrcRes<Vec<Item>> {
|
||||||
let kind = if discr == ctx.i().i("mod").await {
|
let kind = if discr == ctx.i().i("mod").await {
|
||||||
@@ -126,7 +120,7 @@ pub async fn parse_exportable_item<'a>(
|
|||||||
.await;
|
.await;
|
||||||
} else {
|
} else {
|
||||||
let ext_lines = ctx.systems().flat_map(System::line_types).join(", ");
|
let ext_lines = ctx.systems().flat_map(System::line_types).join(", ");
|
||||||
return Err(mk_errv(
|
return Err(mk_err(
|
||||||
ctx.i().i("Unrecognized line type").await,
|
ctx.i().i("Unrecognized line type").await,
|
||||||
format!("Line types are: mod, {ext_lines}"),
|
format!("Line types are: mod, {ext_lines}"),
|
||||||
[tail.prev().sr()],
|
[tail.prev().sr()],
|
||||||
@@ -137,13 +131,13 @@ pub async fn parse_exportable_item<'a>(
|
|||||||
|
|
||||||
pub async fn parse_module<'a>(
|
pub async fn parse_module<'a>(
|
||||||
ctx: &impl HostParseCtx,
|
ctx: &impl HostParseCtx,
|
||||||
path: Substack<'_, Tok<String>>,
|
path: Substack<'_, IStr>,
|
||||||
tail: ParsSnippet<'a>,
|
tail: ParsSnippet<'a>,
|
||||||
) -> OrcRes<(Tok<String>, ParsedModule)> {
|
) -> OrcRes<(IStr, ParsedModule)> {
|
||||||
let (name, tail) = match try_pop_no_fluff(ctx, tail).await? {
|
let (name, tail) = match try_pop_no_fluff(ctx, tail).await? {
|
||||||
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } => (n.clone(), tail),
|
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } => (n.clone(), tail),
|
||||||
Parsed { output, .. } => {
|
Parsed { output, .. } => {
|
||||||
return Err(mk_errv(
|
return Err(mk_err(
|
||||||
ctx.i().i("Missing module name").await,
|
ctx.i().i("Missing module name").await,
|
||||||
format!("A name was expected, {} was found", fmt(output, ctx.i()).await),
|
format!("A name was expected, {} was found", fmt(output, ctx.i()).await),
|
||||||
[output.sr()],
|
[output.sr()],
|
||||||
@@ -153,7 +147,7 @@ pub async fn parse_module<'a>(
|
|||||||
let Parsed { output, tail: surplus } = try_pop_no_fluff(ctx, tail).await?;
|
let Parsed { output, tail: surplus } = try_pop_no_fluff(ctx, tail).await?;
|
||||||
expect_end(ctx, surplus).await?;
|
expect_end(ctx, surplus).await?;
|
||||||
let Some(body) = output.as_s(Paren::Round) else {
|
let Some(body) = output.as_s(Paren::Round) else {
|
||||||
return Err(mk_errv(
|
return Err(mk_err(
|
||||||
ctx.i().i("Expected module body").await,
|
ctx.i().i("Expected module body").await,
|
||||||
format!("A ( block ) was expected, {} was found", fmt(output, ctx.i()).await),
|
format!("A ( block ) was expected, {} was found", fmt(output, ctx.i()).await),
|
||||||
[output.sr()],
|
[output.sr()],
|
||||||
|
|||||||
@@ -69,14 +69,14 @@ impl Format for Item {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub struct ParsedMember {
|
pub struct ParsedMember {
|
||||||
pub name: Tok<String>,
|
pub name: IStr,
|
||||||
pub exported: bool,
|
pub exported: bool,
|
||||||
pub kind: ParsedMemberKind,
|
pub kind: ParsedMemberKind,
|
||||||
}
|
}
|
||||||
impl ParsedMember {
|
impl ParsedMember {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn name(&self) -> Tok<String> { self.name.clone() }
|
pub fn name(&self) -> IStr { self.name.clone() }
|
||||||
pub fn new(exported: bool, name: Tok<String>, kind: impl Into<ParsedMemberKind>) -> Self {
|
pub fn new(exported: bool, name: IStr, kind: impl Into<ParsedMemberKind>) -> Self {
|
||||||
Self { exported, name, kind: kind.into() }
|
Self { exported, name, kind: kind.into() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -90,14 +90,14 @@ impl Debug for ParsedMember {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) type ParsedExprCallback =
|
pub(crate) type ParsedExprCallback =
|
||||||
Rc<dyn for<'a> Fn(&'a [Tok<String>]) -> LocalBoxFuture<'a, Expr>>;
|
Rc<dyn for<'a> Fn(&'a [IStr]) -> LocalBoxFuture<'a, Expr>>;
|
||||||
|
|
||||||
pub struct ParsedExpr {
|
pub struct ParsedExpr {
|
||||||
pub(crate) debug: String,
|
pub(crate) debug: String,
|
||||||
pub(crate) callback: ParsedExprCallback,
|
pub(crate) callback: ParsedExprCallback,
|
||||||
}
|
}
|
||||||
impl ParsedExpr {
|
impl ParsedExpr {
|
||||||
pub async fn run(self, imported_names: &[Tok<String>]) -> Expr {
|
pub async fn run(self, imported_names: &[IStr]) -> Expr {
|
||||||
(self.callback)(imported_names).await
|
(self.callback)(imported_names).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -113,9 +113,10 @@ pub enum ParsedMemberKind {
|
|||||||
impl From<ParsedModule> for ParsedMemberKind {
|
impl From<ParsedModule> for ParsedMemberKind {
|
||||||
fn from(value: ParsedModule) -> Self { Self::Mod(value) }
|
fn from(value: ParsedModule) -> Self { Self::Mod(value) }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct ParsedModule {
|
pub struct ParsedModule {
|
||||||
pub exports: Vec<Tok<String>>,
|
pub exports: Vec<IStr>,
|
||||||
pub items: Vec<Item>,
|
pub items: Vec<Item>,
|
||||||
pub use_prelude: bool,
|
pub use_prelude: bool,
|
||||||
}
|
}
|
||||||
@@ -141,7 +142,7 @@ impl ParsedModule {
|
|||||||
(self.items.iter())
|
(self.items.iter())
|
||||||
.filter_map(|it| if let ItemKind::Import(i) = &it.kind { Some(i) } else { None })
|
.filter_map(|it| if let ItemKind::Import(i) = &it.kind { Some(i) } else { None })
|
||||||
}
|
}
|
||||||
pub fn default_item(self, name: Tok<String>, sr: SrcRange) -> Item {
|
pub fn default_item(self, name: IStr, sr: SrcRange) -> Item {
|
||||||
let mem = ParsedMember { exported: true, name, kind: ParsedMemberKind::Mod(self) };
|
let mem = ParsedMember { exported: true, name, kind: ParsedMemberKind::Mod(self) };
|
||||||
Item { comments: vec![], sr, kind: ItemKind::Member(mem) }
|
Item { comments: vec![], sr, kind: ItemKind::Member(mem) }
|
||||||
}
|
}
|
||||||
@@ -150,7 +151,7 @@ impl Tree for ParsedModule {
|
|||||||
type Ctx<'a> = ();
|
type Ctx<'a> = ();
|
||||||
async fn child(
|
async fn child(
|
||||||
&self,
|
&self,
|
||||||
key: Tok<String>,
|
key: IStr,
|
||||||
public_only: bool,
|
public_only: bool,
|
||||||
(): &mut Self::Ctx<'_>,
|
(): &mut Self::Ctx<'_>,
|
||||||
) -> ChildResult<'_, Self> {
|
) -> ChildResult<'_, Self> {
|
||||||
@@ -168,7 +169,7 @@ impl Tree for ParsedModule {
|
|||||||
}
|
}
|
||||||
ChildResult::Err(ChildErrorKind::Missing)
|
ChildResult::Err(ChildErrorKind::Missing)
|
||||||
}
|
}
|
||||||
fn children(&self, public_only: bool) -> HashSet<Tok<String>> {
|
fn children(&self, public_only: bool) -> HashSet<IStr> {
|
||||||
let mut public: HashSet<_> = self.exports.iter().cloned().collect();
|
let mut public: HashSet<_> = self.exports.iter().cloned().collect();
|
||||||
if !public_only {
|
if !public_only {
|
||||||
public.extend(
|
public.extend(
|
||||||
@@ -185,7 +186,7 @@ impl Tree for ParsedModule {
|
|||||||
impl Format for ParsedModule {
|
impl Format for ParsedModule {
|
||||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||||
let head_str = format!("export ::({})\n", self.exports.iter().join(", "));
|
let head_str = format!("export ::({})\n", self.exports.iter().join(", "));
|
||||||
Variants::default().sequence(self.items.len() + 1, "", "\n", "", None).units_own(
|
Variants::sequence(self.items.len() + 1, "\n", None).units(
|
||||||
[head_str.into()].into_iter().chain(join_all(self.items.iter().map(|i| i.print(c))).await),
|
[head_str.into()].into_iter().chain(join_all(self.items.iter().map(|i| i.print(c))).await),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -197,11 +198,11 @@ impl Format for ParsedModule {
|
|||||||
/// point to a module and rule_loc selects a macro rule within that module
|
/// point to a module and rule_loc selects a macro rule within that module
|
||||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||||
pub struct ConstPath {
|
pub struct ConstPath {
|
||||||
steps: Tok<Vec<Tok<String>>>,
|
steps: Tok<Vec<IStr>>,
|
||||||
}
|
}
|
||||||
impl ConstPath {
|
impl ConstPath {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn to_const(steps: Tok<Vec<Tok<String>>>) -> Self { Self { steps } }
|
pub fn to_const(steps: Tok<Vec<IStr>>) -> Self { Self { steps } }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn tt_to_api(exprs: &mut ExprStore, subtree: ParsTokTree) -> api::TokenTree {
|
pub async fn tt_to_api(exprs: &mut ExprStore, subtree: ParsTokTree) -> api::TokenTree {
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ use orchid_base::tree::ttv_from_api;
|
|||||||
use substack::Substack;
|
use substack::Substack;
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
|
use crate::expr::ExprParseCtx;
|
||||||
use crate::expr_store::ExprStore;
|
use crate::expr_store::ExprStore;
|
||||||
use crate::parse::HostParseCtx;
|
use crate::parse::HostParseCtx;
|
||||||
use crate::parsed::{
|
use crate::parsed::{
|
||||||
@@ -22,7 +23,7 @@ pub struct Parser {
|
|||||||
pub(crate) system: System,
|
pub(crate) system: System,
|
||||||
pub(crate) idx: u16,
|
pub(crate) idx: u16,
|
||||||
}
|
}
|
||||||
type ModPath<'a> = Substack<'a, Tok<String>>;
|
type ModPath<'a> = Substack<'a, IStr>;
|
||||||
|
|
||||||
impl Parser {
|
impl Parser {
|
||||||
pub async fn parse(
|
pub async fn parse(
|
||||||
@@ -34,10 +35,11 @@ impl Parser {
|
|||||||
comments: Vec<Comment>,
|
comments: Vec<Comment>,
|
||||||
callback: &mut impl AsyncFnMut(ModPath<'_>, Vec<ParsTokTree>) -> OrcRes<Vec<Item>>,
|
callback: &mut impl AsyncFnMut(ModPath<'_>, Vec<ParsTokTree>) -> OrcRes<Vec<Item>>,
|
||||||
) -> OrcRes<Vec<Item>> {
|
) -> OrcRes<Vec<Item>> {
|
||||||
let mut temp_store = self.system.ctx().exprs.derive();
|
|
||||||
let src_path = line.first().expect("cannot be empty").sr.path();
|
let src_path = line.first().expect("cannot be empty").sr.path();
|
||||||
let line =
|
let line = join_all(
|
||||||
join_all((line.into_iter()).map(|t| async { tt_to_api(&mut temp_store.clone(), t).await }))
|
(line.into_iter())
|
||||||
|
.map(|t| async { tt_to_api(&mut self.system.ext().exprs().clone(), t).await }),
|
||||||
|
)
|
||||||
.await;
|
.await;
|
||||||
let mod_path = ctx.src_path().suffix(path.unreverse(), self.system.i()).await;
|
let mod_path = ctx.src_path().suffix(path.unreverse(), self.system.i()).await;
|
||||||
let comments = comments.iter().map(Comment::to_api).collect_vec();
|
let comments = comments.iter().map(Comment::to_api).collect_vec();
|
||||||
@@ -51,15 +53,18 @@ impl Parser {
|
|||||||
line,
|
line,
|
||||||
};
|
};
|
||||||
match self.system.reqnot().request(req).await {
|
match self.system.reqnot().request(req).await {
|
||||||
Ok(parsed_v) =>
|
Ok(parsed_v) => {
|
||||||
|
let mut ext_exprs = self.system.ext().exprs().clone();
|
||||||
conv(parsed_v, path, callback, &mut ConvCtx {
|
conv(parsed_v, path, callback, &mut ConvCtx {
|
||||||
i: self.system.i(),
|
i: self.system.i(),
|
||||||
mod_path: &mod_path,
|
mod_path: &mod_path,
|
||||||
ext_exprs: &mut temp_store,
|
ext_exprs: &mut ext_exprs,
|
||||||
|
pctx: &mut ExprParseCtx { ctx: self.system.ctx(), exprs: self.system.ext().exprs() },
|
||||||
src_path: &src_path,
|
src_path: &src_path,
|
||||||
sys: &self.system,
|
sys: &self.system,
|
||||||
})
|
})
|
||||||
.await,
|
.await
|
||||||
|
},
|
||||||
Err(e) => Err(OrcErrv::from_api(&e, &self.system.ctx().i).await),
|
Err(e) => Err(OrcErrv::from_api(&e, &self.system.ctx().i).await),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -71,11 +76,12 @@ struct ConvCtx<'a> {
|
|||||||
src_path: &'a Sym,
|
src_path: &'a Sym,
|
||||||
i: &'a Interner,
|
i: &'a Interner,
|
||||||
ext_exprs: &'a mut ExprStore,
|
ext_exprs: &'a mut ExprStore,
|
||||||
|
pctx: &'a mut ExprParseCtx<'a>,
|
||||||
}
|
}
|
||||||
async fn conv(
|
async fn conv(
|
||||||
parsed_v: Vec<api::ParsedLine>,
|
parsed_v: Vec<api::ParsedLine>,
|
||||||
module: Substack<'_, Tok<String>>,
|
module: Substack<'_, IStr>,
|
||||||
callback: &'_ mut impl AsyncFnMut(Substack<'_, Tok<String>>, Vec<ParsTokTree>) -> OrcRes<Vec<Item>>,
|
callback: &'_ mut impl AsyncFnMut(Substack<'_, IStr>, Vec<ParsTokTree>) -> OrcRes<Vec<Item>>,
|
||||||
ctx: &mut ConvCtx<'_>,
|
ctx: &mut ConvCtx<'_>,
|
||||||
) -> OrcRes<Vec<Item>> {
|
) -> OrcRes<Vec<Item>> {
|
||||||
let mut items = Vec::new();
|
let mut items = Vec::new();
|
||||||
@@ -84,21 +90,19 @@ async fn conv(
|
|||||||
api::ParsedLineKind::Member(api::ParsedMember { name, exported, kind }) =>
|
api::ParsedLineKind::Member(api::ParsedMember { name, exported, kind }) =>
|
||||||
(name, exported, kind),
|
(name, exported, kind),
|
||||||
api::ParsedLineKind::Recursive(rec) => {
|
api::ParsedLineKind::Recursive(rec) => {
|
||||||
let tokens =
|
let tokens = ttv_from_api(rec, ctx.ext_exprs, ctx.pctx, ctx.src_path, ctx.i).await;
|
||||||
ttv_from_api(rec, ctx.ext_exprs, &mut ctx.sys.ctx().clone(), ctx.src_path, ctx.i).await;
|
|
||||||
items.extend(callback(module.clone(), tokens).await?);
|
items.extend(callback(module.clone(), tokens).await?);
|
||||||
continue;
|
continue;
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
let name = ctx.i.ex(name).await;
|
let name = ctx.i.ex(name).await;
|
||||||
let mem_path = module.push(name.clone());
|
|
||||||
let mkind = match kind {
|
let mkind = match kind {
|
||||||
api::ParsedMemberKind::Module { lines, use_prelude } => {
|
api::ParsedMemberKind::Module { lines, use_prelude } => {
|
||||||
let items = conv(lines, mem_path, callback, ctx).boxed_local().await?;
|
let items = conv(lines, module.push(name.clone()), callback, ctx).boxed_local().await?;
|
||||||
ParsedMemberKind::Mod(ParsedModule::new(use_prelude, items))
|
ParsedMemberKind::Mod(ParsedModule::new(use_prelude, items))
|
||||||
},
|
},
|
||||||
api::ParsedMemberKind::Constant(cid) => {
|
api::ParsedMemberKind::Constant(cid) => {
|
||||||
ctx.sys.0.const_paths.insert(cid, ctx.mod_path.suffix(mem_path.unreverse(), ctx.i).await);
|
ctx.sys.0.const_paths.insert(cid, ctx.mod_path.suffix(module.unreverse(), ctx.i).await);
|
||||||
ParsedMemberKind::Const(cid, ctx.sys.clone())
|
ParsedMemberKind::Const(cid, ctx.sys.clone())
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -3,14 +3,14 @@ use std::fmt;
|
|||||||
use std::future::Future;
|
use std::future::Future;
|
||||||
use std::rc::{Rc, Weak};
|
use std::rc::{Rc, Weak};
|
||||||
|
|
||||||
|
use async_lock::RwLock;
|
||||||
use derive_destructure::destructure;
|
use derive_destructure::destructure;
|
||||||
use futures::future::join_all;
|
use futures::future::join_all;
|
||||||
use futures_locks::RwLock;
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use memo_map::MemoMap;
|
use memo_map::MemoMap;
|
||||||
use orchid_base::char_filter::char_filter_match;
|
use orchid_base::char_filter::char_filter_match;
|
||||||
use orchid_base::error::{OrcRes, mk_errv_floating};
|
use orchid_base::error::{OrcRes, mk_err_floating};
|
||||||
use orchid_base::format::{FmtCtx, FmtUnit, Format};
|
use orchid_base::format::{FmtCtx, FmtUnit, Format};
|
||||||
use orchid_base::interner::{Interner, Tok};
|
use orchid_base::interner::{Interner, Tok};
|
||||||
use orchid_base::iter_utils::IteratorPrint;
|
use orchid_base::iter_utils::IteratorPrint;
|
||||||
@@ -35,7 +35,7 @@ pub(crate) struct SystemInstData {
|
|||||||
decl_id: api::SysDeclId,
|
decl_id: api::SysDeclId,
|
||||||
lex_filter: api::CharFilter,
|
lex_filter: api::CharFilter,
|
||||||
id: api::SysId,
|
id: api::SysId,
|
||||||
line_types: Vec<Tok<String>>,
|
line_types: Vec<IStr>,
|
||||||
prelude: Vec<Sym>,
|
prelude: Vec<Sym>,
|
||||||
owned_atoms: RwLock<HashMap<api::AtomId, WeakAtomHand>>,
|
owned_atoms: RwLock<HashMap<api::AtomId, WeakAtomHand>>,
|
||||||
pub(crate) const_paths: MemoMap<api::ParsedConstId, Sym>,
|
pub(crate) const_paths: MemoMap<api::ParsedConstId, Sym>,
|
||||||
@@ -57,10 +57,6 @@ impl fmt::Debug for SystemInstData {
|
|||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct System(pub(crate) Rc<SystemInstData>);
|
pub struct System(pub(crate) Rc<SystemInstData>);
|
||||||
impl System {
|
impl System {
|
||||||
#[must_use]
|
|
||||||
pub async fn atoms(&self) -> impl std::ops::Deref<Target = HashMap<api::AtomId, WeakAtomHand>> {
|
|
||||||
self.0.owned_atoms.read().await
|
|
||||||
}
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn id(&self) -> api::SysId { self.0.id }
|
pub fn id(&self) -> api::SysId { self.0.id }
|
||||||
#[must_use]
|
#[must_use]
|
||||||
@@ -92,7 +88,7 @@ impl System {
|
|||||||
/// [Self::can_lex] was called and returned true.
|
/// [Self::can_lex] was called and returned true.
|
||||||
pub async fn lex<F: Future<Output = Option<api::SubLexed>>>(
|
pub async fn lex<F: Future<Output = Option<api::SubLexed>>>(
|
||||||
&self,
|
&self,
|
||||||
source: Tok<String>,
|
source: IStr,
|
||||||
src: Sym,
|
src: Sym,
|
||||||
pos: u32,
|
pos: u32,
|
||||||
r: impl FnMut(u32) -> F,
|
r: impl FnMut(u32) -> F,
|
||||||
@@ -100,12 +96,12 @@ impl System {
|
|||||||
self.0.ext.lex_req(source, src, pos, self.id(), r).await
|
self.0.ext.lex_req(source, src, pos, self.id(), r).await
|
||||||
}
|
}
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn get_parser(&self, ltyp: Tok<String>) -> Option<Parser> {
|
pub fn get_parser(&self, ltyp: IStr) -> Option<Parser> {
|
||||||
(self.0.line_types.iter().enumerate())
|
(self.0.line_types.iter().enumerate())
|
||||||
.find(|(_, txt)| *txt == <yp)
|
.find(|(_, txt)| *txt == <yp)
|
||||||
.map(|(idx, _)| Parser { idx: idx as u16, system: self.clone() })
|
.map(|(idx, _)| Parser { idx: idx as u16, system: self.clone() })
|
||||||
}
|
}
|
||||||
pub fn line_types(&self) -> impl Iterator<Item = &Tok<String>> + '_ { self.0.line_types.iter() }
|
pub fn line_types(&self) -> impl Iterator<Item = &IStr> + '_ { self.0.line_types.iter() }
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub async fn request(&self, req: Vec<u8>) -> Vec<u8> {
|
pub async fn request(&self, req: Vec<u8>) -> Vec<u8> {
|
||||||
@@ -122,22 +118,19 @@ impl System {
|
|||||||
owned_g.insert(id, new.downgrade());
|
owned_g.insert(id, new.downgrade());
|
||||||
new
|
new
|
||||||
}
|
}
|
||||||
pub(crate) fn drop_atom(&self, dropped_atom_id: api::AtomId) {
|
pub(crate) fn drop_atom(&self, drop: api::AtomId) {
|
||||||
let this = self.0.clone();
|
let this = self.0.clone();
|
||||||
(self.0.ctx.spawn)(Box::pin(async move {
|
(self.0.ctx.spawn)(Box::pin(async move {
|
||||||
this.ext.reqnot().request(api::AtomDrop(this.id, dropped_atom_id)).await;
|
this.owned_atoms.write().await.remove(&drop);
|
||||||
this.owned_atoms.write().await.remove(&dropped_atom_id);
|
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn downgrade(&self) -> WeakSystem {
|
pub fn downgrade(&self) -> WeakSystem { WeakSystem(Rc::downgrade(&self.0)) }
|
||||||
WeakSystem(Rc::downgrade(&self.0), self.0.decl_id, self.ext().downgrade())
|
|
||||||
}
|
|
||||||
/// Implementation of [api::ResolveNames]
|
/// Implementation of [api::ResolveNames]
|
||||||
pub(crate) async fn name_resolver(
|
pub(crate) async fn name_resolver(
|
||||||
&self,
|
&self,
|
||||||
orig: api::ParsedConstId,
|
orig: api::ParsedConstId,
|
||||||
) -> impl AsyncFnMut(&[Tok<String>]) -> OrcRes<VName> + use<> {
|
) -> impl AsyncFnMut(&[IStr]) -> OrcRes<VName> + use<> {
|
||||||
let root = self.0.ctx.root.read().await.upgrade().expect("find_names when root not in context");
|
let root = self.0.ctx.root.read().await.upgrade().expect("find_names when root not in context");
|
||||||
let orig = self.0.const_paths.get(&orig).expect("origin for find_names invalid").clone();
|
let orig = self.0.const_paths.get(&orig).expect("origin for find_names invalid").clone();
|
||||||
let ctx = self.0.ctx.clone();
|
let ctx = self.0.ctx.clone();
|
||||||
@@ -154,7 +147,7 @@ impl System {
|
|||||||
match cmod.imports.get(selector) {
|
match cmod.imports.get(selector) {
|
||||||
Some(Ok(dest)) => return Ok(dest.target.to_vname().suffix(tail.iter().cloned())),
|
Some(Ok(dest)) => return Ok(dest.target.to_vname().suffix(tail.iter().cloned())),
|
||||||
Some(Err(dests)) =>
|
Some(Err(dests)) =>
|
||||||
return Err(mk_errv_floating(
|
return Err(mk_err_floating(
|
||||||
ctx.i.i("Ambiguous name").await,
|
ctx.i.i("Ambiguous name").await,
|
||||||
format!(
|
format!(
|
||||||
"{selector} could refer to {}",
|
"{selector} could refer to {}",
|
||||||
@@ -163,13 +156,10 @@ impl System {
|
|||||||
)),
|
)),
|
||||||
None => (),
|
None => (),
|
||||||
}
|
}
|
||||||
if root_data.root.members.get(selector).is_some() {
|
|
||||||
return Ok(VName::new(rel.iter().cloned()).expect("split_first was called above"));
|
|
||||||
}
|
|
||||||
if tail.is_empty() {
|
if tail.is_empty() {
|
||||||
return Ok(VPath::new(cwd.iter().cloned()).name_with_suffix(selector.clone()));
|
return Ok(VPath::new(cwd.iter().cloned()).name_with_suffix(selector.clone()));
|
||||||
}
|
}
|
||||||
Err(mk_errv_floating(
|
Err(mk_err_floating(
|
||||||
ctx.i.i("Invalid name").await,
|
ctx.i.i("Invalid name").await,
|
||||||
format!("{selector} doesn't refer to a module"),
|
format!("{selector} doesn't refer to a module"),
|
||||||
))
|
))
|
||||||
@@ -184,14 +174,10 @@ impl Format for System {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct WeakSystem(Weak<SystemInstData>, api::SysDeclId, WeakExtension);
|
pub struct WeakSystem(Weak<SystemInstData>);
|
||||||
impl WeakSystem {
|
impl WeakSystem {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn upgrade(&self) -> Option<System> { self.0.upgrade().map(System) }
|
pub fn upgrade(&self) -> Option<System> { self.0.upgrade().map(System) }
|
||||||
pub fn ext(&self) -> Option<Extension> { self.2.upgrade() }
|
|
||||||
pub fn ctor(&self) -> Option<SystemCtor> {
|
|
||||||
self.ext()?.system_ctors().find(|ctor| ctor.decl.id == self.1).cloned()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
|||||||
@@ -4,16 +4,15 @@ use std::cell::RefCell;
|
|||||||
use std::rc::{Rc, Weak};
|
use std::rc::{Rc, Weak};
|
||||||
use std::slice;
|
use std::slice;
|
||||||
|
|
||||||
|
use async_lock::RwLock;
|
||||||
use async_once_cell::OnceCell;
|
use async_once_cell::OnceCell;
|
||||||
use derive_destructure::destructure;
|
|
||||||
use futures::{FutureExt, StreamExt, stream};
|
use futures::{FutureExt, StreamExt, stream};
|
||||||
use futures_locks::RwLock;
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use hashbrown::hash_map::Entry;
|
use hashbrown::hash_map::Entry;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use memo_map::MemoMap;
|
use memo_map::MemoMap;
|
||||||
use orchid_base::clone;
|
use orchid_base::clone;
|
||||||
use orchid_base::error::{OrcRes, Reporter, mk_errv};
|
use orchid_base::error::{OrcRes, Reporter, mk_err};
|
||||||
use orchid_base::interner::Tok;
|
use orchid_base::interner::Tok;
|
||||||
use orchid_base::location::{CodeGenInfo, Pos};
|
use orchid_base::location::{CodeGenInfo, Pos};
|
||||||
use orchid_base::name::{NameLike, Sym, VPath};
|
use orchid_base::name::{NameLike, Sym, VPath};
|
||||||
@@ -22,7 +21,7 @@ use orchid_base::reqnot::Requester;
|
|||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::ctx::Ctx;
|
use crate::ctx::Ctx;
|
||||||
use crate::dealias::{ChildErrorKind, Tree, absolute_path, resolv_glob, walk};
|
use crate::dealias::{ChildErrorKind, Tree, absolute_path, resolv_glob, walk};
|
||||||
use crate::expr::{Expr, PathSetBuilder};
|
use crate::expr::{Expr, ExprParseCtx, PathSetBuilder};
|
||||||
use crate::parsed::{ItemKind, ParsedMemberKind, ParsedModule};
|
use crate::parsed::{ItemKind, ParsedMemberKind, ParsedModule};
|
||||||
use crate::system::System;
|
use crate::system::System;
|
||||||
|
|
||||||
@@ -89,8 +88,9 @@ impl Root {
|
|||||||
*this.ctx.root.write().await = new.downgrade();
|
*this.ctx.root.write().await = new.downgrade();
|
||||||
for (path, (sys_id, pc_id)) in deferred_consts {
|
for (path, (sys_id, pc_id)) in deferred_consts {
|
||||||
let sys = this.ctx.system_inst(sys_id).await.expect("System dropped since parsing");
|
let sys = this.ctx.system_inst(sys_id).await.expect("System dropped since parsing");
|
||||||
let api_expr = sys.reqnot().request(api::FetchParsedConst(sys.id(), pc_id)).await;
|
let api_expr = sys.reqnot().request(api::FetchParsedConst { id: pc_id, sys: sys.id() }).await;
|
||||||
let expr = Expr::from_api(&api_expr, PathSetBuilder::new(), this.ctx.clone()).await;
|
let mut xp_ctx = ExprParseCtx { ctx: &this.ctx, exprs: sys.ext().exprs() };
|
||||||
|
let expr = Expr::from_api(&api_expr, PathSetBuilder::new(), &mut xp_ctx).await;
|
||||||
new.0.write().await.consts.insert(path, expr);
|
new.0.write().await.consts.insert(path, expr);
|
||||||
}
|
}
|
||||||
new
|
new
|
||||||
@@ -109,7 +109,7 @@ impl Root {
|
|||||||
return Ok(val.clone());
|
return Ok(val.clone());
|
||||||
}
|
}
|
||||||
match module {
|
match module {
|
||||||
Ok(_) => Err(mk_errv(
|
Ok(_) => Err(mk_err(
|
||||||
ctx.i.i("module used as constant").await,
|
ctx.i.i("module used as constant").await,
|
||||||
format!("{name} is a module, not a constant"),
|
format!("{name} is a module, not a constant"),
|
||||||
[pos],
|
[pos],
|
||||||
@@ -117,7 +117,7 @@ impl Root {
|
|||||||
Err(e) => match e.kind {
|
Err(e) => match e.kind {
|
||||||
ChildErrorKind::Private => panic!("public_only is false"),
|
ChildErrorKind::Private => panic!("public_only is false"),
|
||||||
ChildErrorKind::Constant => panic!("Tree refers to constant not in table"),
|
ChildErrorKind::Constant => panic!("Tree refers to constant not in table"),
|
||||||
ChildErrorKind::Missing => Err(mk_errv(
|
ChildErrorKind::Missing => Err(mk_err(
|
||||||
ctx.i.i("Constant does not exist").await,
|
ctx.i.i("Constant does not exist").await,
|
||||||
format!("{name} does not refer to a constant"),
|
format!("{name} does not refer to a constant"),
|
||||||
[pos],
|
[pos],
|
||||||
@@ -144,11 +144,11 @@ impl Default for WeakRoot {
|
|||||||
pub struct TreeFromApiCtx<'a> {
|
pub struct TreeFromApiCtx<'a> {
|
||||||
pub sys: &'a System,
|
pub sys: &'a System,
|
||||||
pub consts: &'a MemoMap<Sym, Expr>,
|
pub consts: &'a MemoMap<Sym, Expr>,
|
||||||
pub path: Tok<Vec<Tok<String>>>,
|
pub path: Tok<Vec<IStr>>,
|
||||||
}
|
}
|
||||||
impl<'a> TreeFromApiCtx<'a> {
|
impl<'a> TreeFromApiCtx<'a> {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub async fn push<'c>(&'c self, name: Tok<String>) -> TreeFromApiCtx<'c> {
|
pub async fn push<'c>(&'c self, name: IStr) -> TreeFromApiCtx<'c> {
|
||||||
let path = self.sys.ctx().i.i(&self.path.iter().cloned().chain([name]).collect_vec()).await;
|
let path = self.sys.ctx().i.i(&self.path.iter().cloned().chain([name]).collect_vec()).await;
|
||||||
TreeFromApiCtx { path, consts: self.consts, sys: self.sys }
|
TreeFromApiCtx { path, consts: self.consts, sys: self.sys }
|
||||||
}
|
}
|
||||||
@@ -162,8 +162,8 @@ pub struct ResolvedImport {
|
|||||||
|
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct Module {
|
pub struct Module {
|
||||||
pub imports: HashMap<Tok<String>, Result<ResolvedImport, Vec<ResolvedImport>>>,
|
pub imports: HashMap<IStr, Result<ResolvedImport, Vec<ResolvedImport>>>,
|
||||||
pub members: HashMap<Tok<String>, Rc<Member>>,
|
pub members: HashMap<IStr, Rc<Member>>,
|
||||||
}
|
}
|
||||||
impl Module {
|
impl Module {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
@@ -177,7 +177,8 @@ impl Module {
|
|||||||
api::MemberKind::Lazy(id) =>
|
api::MemberKind::Lazy(id) =>
|
||||||
(Some(LazyMemberHandle { id, sys: ctx.sys.id(), path: name.clone() }), None),
|
(Some(LazyMemberHandle { id, sys: ctx.sys.id(), path: name.clone() }), None),
|
||||||
api::MemberKind::Const(val) => {
|
api::MemberKind::Const(val) => {
|
||||||
let expr = Expr::from_api(&val, PathSetBuilder::new(), ctx.sys.ctx().clone()).await;
|
let mut expr_ctx = ExprParseCtx { ctx: ctx.sys.ctx(), exprs: ctx.sys.ext().exprs() };
|
||||||
|
let expr = Expr::from_api(&val, PathSetBuilder::new(), &mut expr_ctx).await;
|
||||||
ctx.consts.insert(name.clone(), expr);
|
ctx.consts.insert(name.clone(), expr);
|
||||||
(None, Some(MemberKind::Const))
|
(None, Some(MemberKind::Const))
|
||||||
},
|
},
|
||||||
@@ -267,7 +268,7 @@ impl Module {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for item in values {
|
for item in values {
|
||||||
ctx.rep.report(mk_errv(
|
ctx.rep.report(mk_err(
|
||||||
conflicting_imports_msg.clone(),
|
conflicting_imports_msg.clone(),
|
||||||
format!("{key} is imported multiple times from different modules"),
|
format!("{key} is imported multiple times from different modules"),
|
||||||
[item.sr.pos()],
|
[item.sr.pos()],
|
||||||
@@ -297,7 +298,7 @@ impl Module {
|
|||||||
let Ok(import) = value else { continue };
|
let Ok(import) = value else { continue };
|
||||||
if import.target.strip_prefix(&path[..]).is_some_and(|t| t.starts_with(slice::from_ref(key)))
|
if import.target.strip_prefix(&path[..]).is_some_and(|t| t.starts_with(slice::from_ref(key)))
|
||||||
{
|
{
|
||||||
ctx.rep.report(mk_errv(
|
ctx.rep.report(mk_err(
|
||||||
self_referential_msg.clone(),
|
self_referential_msg.clone(),
|
||||||
format!("import {} points to itself or a path within itself", &import.target),
|
format!("import {} points to itself or a path within itself", &import.target),
|
||||||
[import.pos.clone()],
|
[import.pos.clone()],
|
||||||
@@ -395,7 +396,7 @@ impl Tree for Module {
|
|||||||
type Ctx<'a> = (Ctx, &'a MemoMap<Sym, Expr>);
|
type Ctx<'a> = (Ctx, &'a MemoMap<Sym, Expr>);
|
||||||
async fn child(
|
async fn child(
|
||||||
&self,
|
&self,
|
||||||
key: Tok<String>,
|
key: IStr,
|
||||||
public_only: bool,
|
public_only: bool,
|
||||||
(ctx, consts): &mut Self::Ctx<'_>,
|
(ctx, consts): &mut Self::Ctx<'_>,
|
||||||
) -> crate::dealias::ChildResult<'_, Self> {
|
) -> crate::dealias::ChildResult<'_, Self> {
|
||||||
@@ -410,7 +411,7 @@ impl Tree for Module {
|
|||||||
MemberKind::Const => Err(ChildErrorKind::Constant),
|
MemberKind::Const => Err(ChildErrorKind::Constant),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn children(&self, public_only: bool) -> hashbrown::HashSet<Tok<String>> {
|
fn children(&self, public_only: bool) -> hashbrown::HashSet<IStr> {
|
||||||
self.members.iter().filter(|(_, v)| !public_only || v.public).map(|(k, _)| k.clone()).collect()
|
self.members.iter().filter(|(_, v)| !public_only || v.public).map(|(k, _)| k.clone()).collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -449,7 +450,6 @@ impl MemberKind {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(destructure)]
|
|
||||||
pub struct LazyMemberHandle {
|
pub struct LazyMemberHandle {
|
||||||
id: api::TreeId,
|
id: api::TreeId,
|
||||||
sys: api::SysId,
|
sys: api::SysId,
|
||||||
@@ -457,25 +457,19 @@ pub struct LazyMemberHandle {
|
|||||||
}
|
}
|
||||||
impl LazyMemberHandle {
|
impl LazyMemberHandle {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub async fn run(mut self, ctx: Ctx, consts: &MemoMap<Sym, Expr>) -> MemberKind {
|
pub async fn run(self, ctx: Ctx, consts: &MemoMap<Sym, Expr>) -> MemberKind {
|
||||||
let sys = ctx.system_inst(self.sys).await.expect("Missing system for lazy member");
|
let sys = ctx.system_inst(self.sys).await.expect("Missing system for lazy member");
|
||||||
match sys.get_tree(self.id).await {
|
match sys.get_tree(self.id).await {
|
||||||
api::MemberKind::Const(c) => {
|
api::MemberKind::Const(c) => {
|
||||||
let expr = Expr::from_api(&c, PathSetBuilder::new(), ctx.clone()).await;
|
let mut pctx = ExprParseCtx { ctx: &ctx, exprs: sys.ext().exprs() };
|
||||||
let (.., path) = self.destructure();
|
let expr = Expr::from_api(&c, PathSetBuilder::new(), &mut pctx).await;
|
||||||
consts.insert(path, expr);
|
consts.insert(self.path, expr);
|
||||||
MemberKind::Const
|
MemberKind::Const
|
||||||
},
|
},
|
||||||
api::MemberKind::Module(m) => {
|
api::MemberKind::Module(m) => MemberKind::Module(
|
||||||
let (.., path) = self.destructure();
|
Module::from_api(m, &mut TreeFromApiCtx { sys: &sys, consts, path: self.path.tok() }).await,
|
||||||
MemberKind::Module(
|
),
|
||||||
Module::from_api(m, &mut TreeFromApiCtx { sys: &sys, consts, path: path.tok() }).await,
|
api::MemberKind::Lazy(id) => Self { id, ..self }.run(ctx, consts).boxed_local().await,
|
||||||
)
|
|
||||||
},
|
|
||||||
api::MemberKind::Lazy(id) => {
|
|
||||||
self.id = id;
|
|
||||||
self.run(ctx, consts).boxed_local().await
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[must_use]
|
#[must_use]
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ version = "0.1.0"
|
|||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
|
|
||||||
async-once-cell = "0.5.4"
|
async-once-cell = "0.5.4"
|
||||||
futures = { version = "0.3.31", features = ["std"], default-features = false }
|
futures = { version = "0.3.31", features = ["std"], default-features = false }
|
||||||
hashbrown = "0.16.0"
|
hashbrown = "0.16.0"
|
||||||
@@ -19,9 +18,7 @@ orchid-extension = { version = "0.1.0", path = "../orchid-extension", features =
|
|||||||
"tokio",
|
"tokio",
|
||||||
] }
|
] }
|
||||||
ordered-float = "5.0.0"
|
ordered-float = "5.0.0"
|
||||||
pastey = "0.1.1"
|
rust_decimal = "1.37.2"
|
||||||
rust_decimal = "1.38.0"
|
|
||||||
subslice-offset = "0.1.1"
|
|
||||||
substack = "1.1.1"
|
substack = "1.1.1"
|
||||||
tokio = { version = "1.47.1", features = ["full"] }
|
tokio = { version = "1.47.1", features = ["full"] }
|
||||||
|
|
||||||
|
|||||||
@@ -2,12 +2,8 @@ mod macros;
|
|||||||
mod std;
|
mod std;
|
||||||
|
|
||||||
pub use std::number::num_atom::{Float, HomoArray, Int, Num};
|
pub use std::number::num_atom::{Float, HomoArray, Int, Num};
|
||||||
pub use std::option::OrcOpt;
|
|
||||||
pub use std::reflection::sym_atom::{SymAtom, sym_expr};
|
|
||||||
pub use std::std_system::StdSystem;
|
pub use std::std_system::StdSystem;
|
||||||
pub use std::string::str_atom::OrcString;
|
pub use std::string::str_atom::OrcString;
|
||||||
pub use std::tuple::{HomoTpl, Tpl, Tuple, UntypedTuple};
|
|
||||||
|
|
||||||
pub use macros::macro_system::MacroSystem;
|
pub use macros::macro_system::MacroSystem;
|
||||||
pub use macros::mactree::{MacTok, MacTree};
|
pub use macros::mactree::{MacTok, MacTree};
|
||||||
use orchid_api as api;
|
|
||||||
|
|||||||
@@ -1,16 +1,13 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
|
||||||
use never::Never;
|
use never::Never;
|
||||||
use orchid_base::format::fmt;
|
use orchid_extension::atom::{Atomic, TypAtom};
|
||||||
use orchid_extension::atom::{Atomic, TAtom};
|
|
||||||
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
|
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
|
||||||
use orchid_extension::context::i;
|
use orchid_extension::conv::{ToExpr, TryFromExpr};
|
||||||
use orchid_extension::conv::ToExpr;
|
|
||||||
use orchid_extension::coroutine_exec::exec;
|
|
||||||
use orchid_extension::expr::Expr;
|
use orchid_extension::expr::Expr;
|
||||||
use orchid_extension::gen_expr::GExpr;
|
use orchid_extension::gen_expr::GExpr;
|
||||||
|
|
||||||
use crate::macros::mactree::{MacTok, MacTree};
|
use crate::macros::mactree::{MacTok, MacTree, map_mactree};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct InstantiateTplCall {
|
pub struct InstantiateTplCall {
|
||||||
@@ -27,33 +24,26 @@ impl OwnedAtom for InstantiateTplCall {
|
|||||||
type Refs = Never;
|
type Refs = Never;
|
||||||
// Technically must be supported but shouldn't actually ever be called
|
// Technically must be supported but shouldn't actually ever be called
|
||||||
async fn call_ref(&self, arg: Expr) -> GExpr {
|
async fn call_ref(&self, arg: Expr) -> GExpr {
|
||||||
if !self.argv.is_empty() {
|
|
||||||
eprintln!(
|
eprintln!(
|
||||||
"Copying partially applied instantiate_tpl call. This is an internal value.\
|
"Copying partially applied instantiate_tpl call. This is an internal value.\
|
||||||
\nIt should be fully consumed within generated code."
|
\nIt should be fully consumed within generated code."
|
||||||
);
|
);
|
||||||
}
|
|
||||||
self.clone().call(arg).await
|
self.clone().call(arg).await
|
||||||
}
|
}
|
||||||
async fn call(mut self, arg: Expr) -> GExpr {
|
async fn call(mut self, arg: Expr) -> GExpr {
|
||||||
exec(async move |mut h| {
|
match TypAtom::<MacTree>::try_from_expr(arg).await {
|
||||||
match h.exec::<TAtom<MacTree>>(arg.clone()).await {
|
Err(e) => return Err::<Never, _>(e).to_expr().await,
|
||||||
Err(_) => panic!("Expected a macro param, found {}", fmt(&arg, &i()).await),
|
Ok(t) => self.argv.push(own(t).await),
|
||||||
Ok(t) => self.argv.push(own(&t).await),
|
|
||||||
};
|
};
|
||||||
if self.argv.len() < self.argc {
|
if self.argv.len() < self.argc {
|
||||||
return self.to_gen().await;
|
return self.to_expr().await;
|
||||||
}
|
}
|
||||||
let mut args = self.argv.into_iter();
|
let mut args = self.argv.into_iter();
|
||||||
let ret = self.tpl.map(&mut false, &mut |mt| match mt.tok() {
|
let ret = map_mactree(&self.tpl, &mut false, &mut |mt| match mt.tok() {
|
||||||
MacTok::Slot => Some(args.next().expect("Not enough arguments to fill all slots")),
|
MacTok::Slot => Some(args.next().expect("Not enough arguments to fill all slots")),
|
||||||
_ => None,
|
_ => None,
|
||||||
});
|
});
|
||||||
assert!(args.next().is_none(), "Too many arguments for all slots");
|
assert!(args.next().is_none(), "Too many arguments for all slots");
|
||||||
ret.to_gen().await
|
ret.to_expr().await
|
||||||
})
|
|
||||||
.await
|
|
||||||
.to_gen()
|
|
||||||
.await
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,14 +10,10 @@ use orchid_base::parse::{
|
|||||||
};
|
};
|
||||||
use orchid_base::sym;
|
use orchid_base::sym;
|
||||||
use orchid_base::tree::Paren;
|
use orchid_base::tree::Paren;
|
||||||
use orchid_extension::atom::TAtom;
|
|
||||||
use orchid_extension::context::i;
|
|
||||||
use orchid_extension::conv::TryFromExpr;
|
|
||||||
use orchid_extension::gen_expr::{atom, call, sym_ref};
|
use orchid_extension::gen_expr::{atom, call, sym_ref};
|
||||||
use orchid_extension::parser::{ConstCtx, PSnippet, PTok, PTokTree, ParsCtx, ParsedLine, Parser};
|
use orchid_extension::parser::{ConstCtx, PSnippet, PTok, PTokTree, ParsCtx, ParsedLine, Parser};
|
||||||
|
|
||||||
use crate::macros::mactree::{MacTok, MacTree, MacTreeSeq};
|
use crate::macros::mactree::{MacTok, MacTree, glossary_v, map_mactree_v};
|
||||||
use crate::macros::ph_lexer::PhAtom;
|
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct LetLine;
|
pub struct LetLine;
|
||||||
@@ -41,18 +37,21 @@ impl Parser for LetLine {
|
|||||||
let aliased = parse_tokv(tail, &ctx).await;
|
let aliased = parse_tokv(tail, &ctx).await;
|
||||||
Ok(vec![ParsedLine::cnst(&line.sr(), &comments, exported, name, async move |ctx| {
|
Ok(vec![ParsedLine::cnst(&line.sr(), &comments, exported, name, async move |ctx| {
|
||||||
let rep = Reporter::new();
|
let rep = Reporter::new();
|
||||||
let macro_input =
|
let dealiased = dealias_mac_v(aliased, &ctx, &rep).await;
|
||||||
MacTok::S(Paren::Round, dealias_mac_v(&aliased, &ctx, &rep).await).at(sr.pos());
|
let macro_input = MacTok::S(Paren::Round, dealiased).at(sr.pos());
|
||||||
if let Some(e) = rep.errv() {
|
if let Some(e) = rep.res() {
|
||||||
return Err(e);
|
return Err(e);
|
||||||
}
|
}
|
||||||
Ok(call(sym_ref(sym!(macros::resolve; i())), [atom(macro_input)]))
|
Ok(call([
|
||||||
|
sym_ref(sym!(macros::lower; ctx.i()).await),
|
||||||
|
call([sym_ref(sym!(macros::resolve; ctx.i()).await), atom(macro_input)]),
|
||||||
|
]))
|
||||||
})])
|
})])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn dealias_mac_v(aliased: &MacTreeSeq, ctx: &ConstCtx, rep: &Reporter) -> MacTreeSeq {
|
pub async fn dealias_mac_v(aliased: Vec<MacTree>, ctx: &ConstCtx, rep: &Reporter) -> Vec<MacTree> {
|
||||||
let keys = aliased.glossary().iter().cloned().collect_vec();
|
let keys = glossary_v(&aliased).collect_vec();
|
||||||
let mut names: HashMap<_, _> = HashMap::new();
|
let mut names: HashMap<_, _> = HashMap::new();
|
||||||
let mut stream = pin!(ctx.names(&keys).zip(stream::iter(&keys)));
|
let mut stream = pin!(ctx.names(&keys).zip(stream::iter(&keys)));
|
||||||
while let Some((canonical, local)) = stream.next().await {
|
while let Some((canonical, local)) = stream.next().await {
|
||||||
@@ -63,13 +62,13 @@ pub async fn dealias_mac_v(aliased: &MacTreeSeq, ctx: &ConstCtx, rep: &Reporter)
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
aliased.map(&mut false, &mut |tree| match &*tree.tok {
|
map_mactree_v(&aliased, &mut false, &mut |tree| match &*tree.tok {
|
||||||
MacTok::Name(n) => names.get(n).map(|new_n| MacTok::Name(new_n.clone()).at(tree.pos())),
|
MacTok::Name(n) => names.get(n).map(|new_n| MacTok::Name(new_n.clone()).at(tree.pos())),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn parse_tokv(line: PSnippet<'_>, ctx: &impl ParseCtx) -> MacTreeSeq {
|
pub async fn parse_tokv(line: PSnippet<'_>, ctx: &impl ParseCtx) -> Vec<MacTree> {
|
||||||
if let Some((idx, arg)) = line.iter().enumerate().find_map(|(i, x)| Some((i, x.as_lambda()?))) {
|
if let Some((idx, arg)) = line.iter().enumerate().find_map(|(i, x)| Some((i, x.as_lambda()?))) {
|
||||||
let (head, lambda) = line.split_at(idx as u32);
|
let (head, lambda) = line.split_at(idx as u32);
|
||||||
let (_, body) = lambda.pop_front().unwrap();
|
let (_, body) = lambda.pop_front().unwrap();
|
||||||
@@ -84,14 +83,14 @@ pub async fn parse_tokv(line: PSnippet<'_>, ctx: &impl ParseCtx) -> MacTreeSeq {
|
|||||||
.await,
|
.await,
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
MacTreeSeq::new(all)
|
all
|
||||||
} else {
|
} else {
|
||||||
MacTreeSeq::new(parse_tokv_no_lambdas(&line, ctx).await)
|
parse_tokv_no_lambdas(&line, ctx).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn parse_tokv_no_lambdas(line: &[PTokTree], ctx: &impl ParseCtx) -> Vec<MacTree> {
|
async fn parse_tokv_no_lambdas(line: &[PTokTree], ctx: &impl ParseCtx) -> Vec<MacTree> {
|
||||||
stream::iter(line).filter_map(|tt| parse_tok(tt, ctx)).collect::<Vec<_>>().await
|
stream::iter(line).filter_map(|tt| parse_tok(tt, ctx)).collect().await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn parse_tok(tree: &PTokTree, ctx: &impl ParseCtx) -> Option<MacTree> {
|
pub async fn parse_tok(tree: &PTokTree, ctx: &impl ParseCtx) -> Option<MacTree> {
|
||||||
@@ -111,10 +110,7 @@ pub async fn parse_tok(tree: &PTokTree, ctx: &impl ParseCtx) -> Option<MacTree>
|
|||||||
return parse_tok(nested, ctx).boxed_local().await;
|
return parse_tok(nested, ctx).boxed_local().await;
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
PTok::Handle(expr) => match TAtom::<PhAtom>::try_from_expr(expr.clone()).await {
|
PTok::Handle(expr) => MacTok::Value(expr.clone()),
|
||||||
Err(_) => MacTok::Value(expr.clone()),
|
|
||||||
Ok(ta) => MacTok::Ph(ta.value.to_full().await),
|
|
||||||
},
|
|
||||||
PTok::NewExpr(never) => match *never {},
|
PTok::NewExpr(never) => match *never {},
|
||||||
PTok::LambdaHead(_) => panic!("Lambda-head handled in the sequence parser"),
|
PTok::LambdaHead(_) => panic!("Lambda-head handled in the sequence parser"),
|
||||||
PTok::S(p, body) =>
|
PTok::S(p, body) =>
|
||||||
|
|||||||
@@ -1,65 +1,83 @@
|
|||||||
|
use hashbrown::HashMap;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use orchid_base::error::Reporter;
|
||||||
use orchid_base::sym;
|
use orchid_base::sym;
|
||||||
use orchid_extension::atom::TAtom;
|
use orchid_extension::atom::TypAtom;
|
||||||
use orchid_extension::atom_owned::own;
|
use orchid_extension::atom_owned::own;
|
||||||
use orchid_extension::context::i;
|
|
||||||
use orchid_extension::conv::ToExpr;
|
use orchid_extension::conv::ToExpr;
|
||||||
use orchid_extension::gen_expr::{call, sym_ref};
|
use orchid_extension::coroutine_exec::exec;
|
||||||
use orchid_extension::tree::{GenMember, fun, prefix};
|
use orchid_extension::gen_expr::{atom, call, sym_ref};
|
||||||
|
use orchid_extension::reflection::{ReflMemKind, refl};
|
||||||
|
use orchid_extension::tree::{GenMember, comments, fun, prefix};
|
||||||
|
use substack::Substack;
|
||||||
|
|
||||||
use crate::macros::mactree::MacTree;
|
use crate::Int;
|
||||||
use crate::macros::resolve::resolve;
|
use crate::macros::instantiate_tpl::InstantiateTplCall;
|
||||||
use crate::macros::utils::{build_macro, mactree, mactreev};
|
use crate::macros::macro_line::{Macro, Matcher};
|
||||||
|
use crate::macros::mactree::{LowerCtx, MacTree};
|
||||||
|
use crate::macros::recur_state::RecurState;
|
||||||
|
use crate::macros::resolve::{ResolveCtx, resolve};
|
||||||
|
|
||||||
pub async fn gen_macro_lib() -> Vec<GenMember> {
|
pub fn gen_macro_lib() -> Vec<GenMember> {
|
||||||
prefix("macros", [
|
prefix("macros", [
|
||||||
fun(true, "resolve", async |tpl: TAtom<MacTree>| resolve(own(&tpl).await).await),
|
comments(
|
||||||
// TODO test whether any of this worked
|
["This is an internal function, you can't obtain a value of its argument type.", "hidden"],
|
||||||
prefix("common", [
|
fun(true, "instantiate_tpl", |tpl: TypAtom<MacTree>, right: Int| async move {
|
||||||
build_macro(None, ["..", "_"]).finish(),
|
InstantiateTplCall {
|
||||||
build_macro(Some(1), ["+"])
|
tpl: own(tpl).await,
|
||||||
.rule(mactreev!("...$" lhs 0 macros::common::+ "...$" rhs 1), [async |[lhs, rhs]| {
|
argc: right.0.try_into().unwrap(),
|
||||||
call(sym_ref(sym!(std::number::add; i())), [resolve(lhs).await, resolve(rhs).await])
|
argv: Vec::new(),
|
||||||
}])
|
}
|
||||||
.finish(),
|
}),
|
||||||
build_macro(Some(2), ["*"])
|
),
|
||||||
.rule(mactreev!("...$" lhs 0 macros::common::* "...$" rhs 1), [async |[lhs, rhs]| {
|
fun(true, "resolve", |tpl: TypAtom<MacTree>| async move {
|
||||||
call(sym_ref(sym!(std::number::mul; i())), [resolve(lhs).await, resolve(rhs).await])
|
call([
|
||||||
}])
|
sym_ref(sym!(macros::resolve_recur; tpl.untyped.ctx().i()).await),
|
||||||
.finish(),
|
atom(RecurState::Bottom),
|
||||||
build_macro(None, ["comma_list", ","])
|
tpl.untyped.ex().to_expr().await,
|
||||||
.rule(
|
|
||||||
mactreev!(macros::common::comma_list ( "...$" head 0 macros::common::, "...$" tail 1)),
|
|
||||||
[async |[head, tail]| {
|
|
||||||
call(sym_ref(sym!(std::tuple::cat; i())), [
|
|
||||||
call(sym_ref(sym!(std::tuple::one; i())), [head.to_gen().await]),
|
|
||||||
resolve(mactree!(macros::common::comma_list "push" tail ;)).await,
|
|
||||||
])
|
])
|
||||||
}],
|
}),
|
||||||
)
|
fun(true, "lower", |tpl: TypAtom<MacTree>| async move {
|
||||||
.rule(mactreev!(macros::common::comma_list ( "...$" final_tail 0 )), [async |[tail]| {
|
let ctx = LowerCtx { sys: tpl.untyped.ctx().clone(), rep: &Reporter::new() };
|
||||||
call(sym_ref(sym!(std::tuple::one; i())), [tail.to_gen().await])
|
let res = own(tpl).await.lower(ctx, Substack::Bottom).await;
|
||||||
}])
|
if let Some(e) = Reporter::new().res() { Err(e) } else { Ok(res) }
|
||||||
.rule(mactreev!(macros::common::comma_list()), [async |[]| {
|
}),
|
||||||
sym_ref(sym!(std::tuple::empty; i()))
|
fun(true, "resolve_recur", |state: TypAtom<RecurState>, tpl: TypAtom<MacTree>| async move {
|
||||||
}])
|
exec("macros::resolve_recur", async move |mut h| {
|
||||||
.finish(),
|
let ctx = tpl.ctx().clone();
|
||||||
build_macro(None, ["semi_list", ";"])
|
let root = refl(&ctx);
|
||||||
.rule(
|
let tpl = own(tpl.clone()).await;
|
||||||
mactreev!(macros::common::semi_list ( "...$" head 0 macros::common::; "...$" tail 1)),
|
let mut macros = HashMap::new();
|
||||||
[async |[head, tail]| {
|
for n in tpl.glossary() {
|
||||||
call(sym_ref(sym!(std::tuple::cat; i())), [
|
if let Ok(ReflMemKind::Const) = root.get_by_path(n).await.map(|m| m.kind()) {
|
||||||
call(sym_ref(sym!(std::tuple::one; i())), [resolve(head).await]),
|
let Ok(mac) = h.exec::<TypAtom<Macro>>(sym_ref(n.clone())).await else { continue };
|
||||||
resolve(mactree!(macros::common::semi_list "push" tail ;)).await,
|
let mac = own(mac).await;
|
||||||
])
|
macros.entry(mac.0.own_kws[0].clone()).or_insert(mac);
|
||||||
}],
|
}
|
||||||
)
|
}
|
||||||
.rule(mactreev!(macros::common::semi_list ( "...$" final_tail 0 )), [async |[tail]| {
|
let mut named = HashMap::new();
|
||||||
call(sym_ref(sym!(std::tuple::one; i())), [resolve(tail).await])
|
let mut priod = Vec::new();
|
||||||
}])
|
for (_, mac) in macros.iter() {
|
||||||
.rule(mactreev!(macros::common::semi_list()), [async |[]| {
|
for rule in mac.0.rules.iter() {
|
||||||
sym_ref(sym!(std::tuple::empty; i()))
|
if rule.glossary.is_subset(tpl.glossary()) {
|
||||||
}])
|
match &rule.pattern {
|
||||||
.finish(),
|
Matcher::Named(m) =>
|
||||||
]),
|
named.entry(m.head()).or_insert(Vec::new()).push((m, mac, rule)),
|
||||||
|
Matcher::Priod(p) => priod.push((mac.0.prio, (p, mac, rule))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let priod = priod.into_iter().sorted_unstable_by_key(|(p, _)| *p).map(|(_, r)| r).collect();
|
||||||
|
let mut rctx = ResolveCtx { h, recur: own(state).await, ctx: ctx.clone(), named, priod };
|
||||||
|
let resolve_res = resolve(&mut rctx, &tpl).await;
|
||||||
|
std::mem::drop(rctx);
|
||||||
|
match resolve_res {
|
||||||
|
Some(out_tree) => out_tree.to_expr().await,
|
||||||
|
None => tpl.to_expr().await,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
}),
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,26 +1,32 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use async_once_cell::OnceCell;
|
use async_once_cell::OnceCell;
|
||||||
use futures::{StreamExt, stream};
|
use futures::{StreamExt, stream};
|
||||||
|
use hashbrown::{HashMap, HashSet};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_base::error::{OrcRes, Reporter, mk_errv};
|
use never::Never;
|
||||||
|
use orchid_base::error::{OrcRes, Reporter, mk_err};
|
||||||
|
use orchid_base::interner::Tok;
|
||||||
|
use orchid_base::location::Pos;
|
||||||
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::parse::{
|
use orchid_base::parse::{
|
||||||
Comment, ParseCtx, Parsed, Snippet, expect_end, expect_tok, line_items, token_errv,
|
Comment, ParseCtx, Parsed, Snippet, expect_end, expect_tok, line_items, token_errv,
|
||||||
try_pop_no_fluff,
|
try_pop_no_fluff,
|
||||||
};
|
};
|
||||||
use orchid_base::tree::{Paren, Token};
|
use orchid_base::tree::{Paren, Token};
|
||||||
use orchid_base::{clone, sym};
|
use orchid_base::{clone, sym};
|
||||||
use orchid_extension::atom::TAtom;
|
use orchid_extension::atom::{Atomic, TypAtom};
|
||||||
use orchid_extension::context::i;
|
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
|
||||||
use orchid_extension::conv::{ToExpr, TryFromExpr};
|
use orchid_extension::conv::{ToExpr, TryFromExpr};
|
||||||
use orchid_extension::gen_expr::{atom, call, sym_ref};
|
use orchid_extension::gen_expr::{atom, call, sym_ref};
|
||||||
use orchid_extension::parser::{PSnippet, ParsCtx, ParsedLine, Parser};
|
use orchid_extension::parser::{PSnippet, ParsCtx, ParsedLine, Parser};
|
||||||
|
|
||||||
use crate::macros::let_line::{dealias_mac_v, parse_tokv};
|
use crate::macros::let_line::{dealias_mac_v, parse_tokv};
|
||||||
use crate::macros::macro_value::{Macro, MacroData, Rule};
|
use crate::macros::mactree::{glossary_v, map_mactree_v};
|
||||||
use crate::macros::mactree::MacTreeSeq;
|
use crate::macros::recur_state::{RecurState, RulePath};
|
||||||
use crate::macros::rule::matcher::Matcher;
|
use crate::macros::rule::matcher::{NamedMatcher, PriodMatcher};
|
||||||
use crate::{Int, MacTok};
|
use crate::{Int, MacTok};
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
@@ -34,7 +40,7 @@ impl Parser for MacroLine {
|
|||||||
line: PSnippet<'a>,
|
line: PSnippet<'a>,
|
||||||
) -> OrcRes<Vec<ParsedLine>> {
|
) -> OrcRes<Vec<ParsedLine>> {
|
||||||
if exported {
|
if exported {
|
||||||
return Err(mk_errv(
|
return Err(mk_err(
|
||||||
ctx.i().i("macros are always exported").await,
|
ctx.i().i("macros are always exported").await,
|
||||||
"The export keyword is forbidden here to avoid confusion\n\
|
"The export keyword is forbidden here to avoid confusion\n\
|
||||||
because macros are exported by default",
|
because macros are exported by default",
|
||||||
@@ -42,46 +48,42 @@ impl Parser for MacroLine {
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
let module = ctx.module();
|
let module = ctx.module();
|
||||||
let Parsed { output: prio_or_body, tail } = try_pop_no_fluff(&ctx, line).await?;
|
let Parsed { output, tail } = try_pop_no_fluff(&ctx, line).await?;
|
||||||
let bad_first_item_err = || {
|
let bad_first_item_err = || {
|
||||||
token_errv(&ctx, prio_or_body, "Expected priority or block", |s| {
|
token_errv(&ctx, output, "Expected priority or block", |s| {
|
||||||
format!("Expected a priority number or a () block, found {s}")
|
format!("Expected a priority number or a () block, found {s}")
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
let (prio, body) = match &prio_or_body.tok {
|
let (prio, body) = match &output.tok {
|
||||||
Token::S(Paren::Round, body) => {
|
Token::S(Paren::Round, body) => (None, body),
|
||||||
expect_end(&ctx, tail).await?;
|
Token::Handle(expr) => match TypAtom::<Int>::try_from_expr(expr.clone()).await {
|
||||||
(None, body)
|
|
||||||
},
|
|
||||||
Token::Handle(expr) => match TAtom::<Int>::try_from_expr(expr.clone()).await {
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
return Err(e + bad_first_item_err().await);
|
return Err(e + bad_first_item_err().await);
|
||||||
},
|
},
|
||||||
Ok(prio) => {
|
Ok(prio) => {
|
||||||
let Parsed { output: body, tail } = try_pop_no_fluff(&ctx, tail).await?;
|
let Token::S(Paren::Round, block) = &output.tok else {
|
||||||
let Token::S(Paren::Round, block) = &body.tok else {
|
|
||||||
return Err(
|
return Err(
|
||||||
token_errv(&ctx, prio_or_body, "Expected () block", |s| {
|
token_errv(&ctx, output, "Expected () block", |s| {
|
||||||
format!("Expected a () block, found {s}")
|
format!("Expected a () block, found {s}")
|
||||||
})
|
})
|
||||||
.await,
|
.await,
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
expect_end(&ctx, tail).await?;
|
|
||||||
(Some(prio), block)
|
(Some(prio), block)
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
_ => return Err(bad_first_item_err().await),
|
_ => return Err(bad_first_item_err().await),
|
||||||
};
|
};
|
||||||
let lines = line_items(&ctx, Snippet::new(prio_or_body, body)).await;
|
expect_end(&ctx, tail).await?;
|
||||||
|
let lines = line_items(&ctx, Snippet::new(output, body)).await;
|
||||||
let Some((kw_line, rule_lines)) = lines.split_first() else { return Ok(Vec::new()) };
|
let Some((kw_line, rule_lines)) = lines.split_first() else { return Ok(Vec::new()) };
|
||||||
let mut keywords = Vec::new();
|
let mut keywords = HashMap::new();
|
||||||
let Parsed { tail: kw_tail, .. } =
|
let Parsed { tail: kw_tail, .. } =
|
||||||
expect_tok(&ctx, kw_line.tail, ctx.i().i("keywords").await).await?;
|
expect_tok(&ctx, kw_line.tail, ctx.i().i("keywords").await).await?;
|
||||||
for kw_tok in kw_tail.iter().filter(|kw| !kw.is_fluff()) {
|
for kw_tok in kw_tail.iter().filter(|kw| !kw.is_fluff()) {
|
||||||
match kw_tok.as_name() {
|
match kw_tok.as_name() {
|
||||||
Some(kw) => {
|
Some(kw) => {
|
||||||
keywords.push((kw, kw_tok.sr()));
|
keywords.insert(kw, kw_tok.sr());
|
||||||
},
|
},
|
||||||
None => ctx.rep().report(
|
None => ctx.rep().report(
|
||||||
token_errv(&ctx, kw_tok, "invalid macro keywords list", |tok| {
|
token_errv(&ctx, kw_tok, "invalid macro keywords list", |tok| {
|
||||||
@@ -91,8 +93,8 @@ impl Parser for MacroLine {
|
|||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let Some((macro_name, _)) = keywords.first().cloned() else {
|
let Some(macro_name) = keywords.keys().next().cloned() else {
|
||||||
return Err(mk_errv(
|
return Err(mk_err(
|
||||||
ctx.i().i("macro with no keywords").await,
|
ctx.i().i("macro with no keywords").await,
|
||||||
"Macros must define at least one macro of their own.",
|
"Macros must define at least one macro of their own.",
|
||||||
[kw_line.tail.sr()],
|
[kw_line.tail.sr()],
|
||||||
@@ -101,12 +103,13 @@ impl Parser for MacroLine {
|
|||||||
let mut rules = Vec::new();
|
let mut rules = Vec::new();
|
||||||
let mut lines = Vec::new();
|
let mut lines = Vec::new();
|
||||||
for (idx, line) in rule_lines.iter().enumerate().map(|(n, v)| (n as u32, v)) {
|
for (idx, line) in rule_lines.iter().enumerate().map(|(n, v)| (n as u32, v)) {
|
||||||
|
let path = RulePath { module: module.clone(), main_kw: macro_name.clone(), rule: idx };
|
||||||
let sr = line.tail.sr();
|
let sr = line.tail.sr();
|
||||||
let name = ctx.i().i(&format!("rule::{}::{}", macro_name, idx)).await;
|
let name = ctx.i().i(&path.name()).await;
|
||||||
let Parsed { tail, .. } = expect_tok(&ctx, line.tail, ctx.i().i("rule").await).await?;
|
let Parsed { tail, .. } = expect_tok(&ctx, line.tail, ctx.i().i("rule").await).await?;
|
||||||
let arrow_token = ctx.i().i("=>").await;
|
let arrow_token = ctx.i().i("=>").await;
|
||||||
let Some((pattern, body)) = tail.split_once(|tok| tok.is_kw(arrow_token.clone())) else {
|
let Some((pattern, body)) = tail.split_once(|tok| tok.is_kw(arrow_token.clone())) else {
|
||||||
ctx.rep().report(mk_errv(
|
ctx.rep().report(mk_err(
|
||||||
ctx.i().i("Missing => in rule").await,
|
ctx.i().i("Missing => in rule").await,
|
||||||
"Rule lines are of the form `rule ...pattern => ...body`",
|
"Rule lines are of the form `rule ...pattern => ...body`",
|
||||||
[line.tail.sr()],
|
[line.tail.sr()],
|
||||||
@@ -115,7 +118,7 @@ impl Parser for MacroLine {
|
|||||||
};
|
};
|
||||||
let pattern = parse_tokv(pattern, &ctx).await;
|
let pattern = parse_tokv(pattern, &ctx).await;
|
||||||
let mut placeholders = Vec::new();
|
let mut placeholders = Vec::new();
|
||||||
pattern.map(&mut false, &mut |tok| {
|
map_mactree_v(&pattern, &mut false, &mut |tok| {
|
||||||
if let MacTok::Ph(ph) = tok.tok() {
|
if let MacTok::Ph(ph) = tok.tok() {
|
||||||
placeholders.push((ph.clone(), tok.pos()))
|
placeholders.push((ph.clone(), tok.pos()))
|
||||||
}
|
}
|
||||||
@@ -124,42 +127,52 @@ impl Parser for MacroLine {
|
|||||||
let mut body_mactree = parse_tokv(body, &ctx).await;
|
let mut body_mactree = parse_tokv(body, &ctx).await;
|
||||||
for (ph, ph_pos) in placeholders.iter().rev() {
|
for (ph, ph_pos) in placeholders.iter().rev() {
|
||||||
let name = ctx.module().suffix([ph.name.clone()], ctx.i()).await;
|
let name = ctx.module().suffix([ph.name.clone()], ctx.i()).await;
|
||||||
body_mactree =
|
body_mactree = vec![
|
||||||
MacTreeSeq::new([
|
MacTok::Lambda(MacTok::Name(name).at(ph_pos.clone()), body_mactree).at(ph_pos.clone()),
|
||||||
MacTok::Lambda(MacTok::Name(name).at(ph_pos.clone()), body_mactree).at(ph_pos.clone())
|
]
|
||||||
])
|
|
||||||
}
|
}
|
||||||
let body_sr = body.sr();
|
let body_sr = body.sr();
|
||||||
rules.push((name.clone(), placeholders, pattern));
|
rules.push((name.clone(), placeholders, rules.len() as u32, sr.pos(), pattern));
|
||||||
lines.push(ParsedLine::cnst(&sr, &line.output, true, name, async move |ctx| {
|
lines.push(ParsedLine::cnst(&sr, &line.output, true, name, async move |ctx| {
|
||||||
let rep = Reporter::new();
|
let rep = Reporter::new();
|
||||||
let body = dealias_mac_v(&body_mactree, &ctx, &rep).await;
|
let body = dealias_mac_v(body_mactree, &ctx, &rep).await;
|
||||||
let macro_input = MacTok::S(Paren::Round, body).at(body_sr.pos());
|
let macro_input = MacTok::S(Paren::Round, body).at(body_sr.pos());
|
||||||
if let Some(e) = rep.errv() {
|
if let Some(e) = rep.res() {
|
||||||
return Err(e);
|
return Err(e);
|
||||||
}
|
}
|
||||||
Ok(call(sym_ref(sym!(macros::resolve; i())), [macro_input.to_gen().await]))
|
Ok(call([
|
||||||
|
sym_ref(sym!(macros::resolve_recur; ctx.i()).await),
|
||||||
|
atom(RecurState::base(path)),
|
||||||
|
macro_input.to_expr().await,
|
||||||
|
]))
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
let mac_cell = Rc::new(OnceCell::new());
|
let mac_cell = Rc::new(OnceCell::new());
|
||||||
|
let keywords = Rc::new(keywords);
|
||||||
let rules = Rc::new(RefCell::new(Some(rules)));
|
let rules = Rc::new(RefCell::new(Some(rules)));
|
||||||
for (kw, sr) in &*keywords {
|
for (kw, sr) in &*keywords {
|
||||||
clone!(mac_cell, rules, module, prio);
|
clone!(mac_cell, keywords, rules, module, prio);
|
||||||
lines.push(ParsedLine::cnst(&sr.clone(), &comments, true, kw.clone(), async move |cctx| {
|
lines.push(ParsedLine::cnst(&sr.clone(), &comments, true, kw.clone(), async move |cctx| {
|
||||||
let mac = mac_cell
|
let mac = mac_cell
|
||||||
.get_or_init(async {
|
.get_or_init(async {
|
||||||
let rep = Reporter::new();
|
let rep = Reporter::new();
|
||||||
let rules = rules.borrow_mut().take().expect("once cell initializer runs");
|
let rules = rules.borrow_mut().take().expect("once cell initializer runs");
|
||||||
let rules = stream::iter(rules)
|
let rules = stream::iter(rules)
|
||||||
.then(|(body_name, placeholders, pattern_rel)| {
|
.then(|(body_name, placeholders, index, pos, pattern_macv)| {
|
||||||
let cctx = &cctx;
|
let cctx = &cctx;
|
||||||
let rep = &rep;
|
let rep = &rep;
|
||||||
|
let prio = &prio;
|
||||||
async move {
|
async move {
|
||||||
let pattern = dealias_mac_v(&pattern_rel, cctx, rep).await;
|
let pattern_abs = dealias_mac_v(pattern_macv, cctx, rep).await;
|
||||||
let pattern_res = Matcher::new(pattern.clone()).await;
|
let glossary = glossary_v(&pattern_abs).collect();
|
||||||
|
let pattern_res = match prio {
|
||||||
|
None => NamedMatcher::new(&pattern_abs, cctx.i()).await.map(Matcher::Named),
|
||||||
|
Some(_) => PriodMatcher::new(&pattern_abs, cctx.i()).await.map(Matcher::Priod),
|
||||||
|
};
|
||||||
let placeholders = placeholders.into_iter().map(|(ph, _)| ph.name).collect_vec();
|
let placeholders = placeholders.into_iter().map(|(ph, _)| ph.name).collect_vec();
|
||||||
match pattern_res {
|
match pattern_res {
|
||||||
Ok(matcher) => Some(Rule { body_name, matcher, pattern, placeholders }),
|
Ok(pattern) =>
|
||||||
|
Some(Rule { index, pos, body_name, pattern, glossary, placeholders }),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
rep.report(e);
|
rep.report(e);
|
||||||
None
|
None
|
||||||
@@ -170,7 +183,8 @@ impl Parser for MacroLine {
|
|||||||
.flat_map(stream::iter)
|
.flat_map(stream::iter)
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.await;
|
.await;
|
||||||
Macro(Rc::new(MacroData { module, prio: prio.map(|i| i.0 as u64), rules }))
|
let own_kws = keywords.keys().cloned().collect_vec();
|
||||||
|
Macro(Rc::new(MacroData { module, prio: prio.map(|i| i.0 as u64), rules, own_kws }))
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
atom(mac.clone())
|
atom(mac.clone())
|
||||||
@@ -179,3 +193,36 @@ impl Parser for MacroLine {
|
|||||||
Ok(lines)
|
Ok(lines)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct MacroData {
|
||||||
|
pub module: Sym,
|
||||||
|
pub prio: Option<u64>,
|
||||||
|
pub rules: Vec<Rule>,
|
||||||
|
pub own_kws: Vec<IStr>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Macro(pub Rc<MacroData>);
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Rule {
|
||||||
|
pub index: u32,
|
||||||
|
pub pos: Pos,
|
||||||
|
pub pattern: Matcher,
|
||||||
|
pub glossary: HashSet<Sym>,
|
||||||
|
pub placeholders: Vec<IStr>,
|
||||||
|
pub body_name: IStr,
|
||||||
|
}
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum Matcher {
|
||||||
|
Named(NamedMatcher),
|
||||||
|
Priod(PriodMatcher),
|
||||||
|
}
|
||||||
|
impl Atomic for Macro {
|
||||||
|
type Data = ();
|
||||||
|
type Variant = OwnedVariant;
|
||||||
|
}
|
||||||
|
impl OwnedAtom for Macro {
|
||||||
|
type Refs = Never;
|
||||||
|
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,27 +1,22 @@
|
|||||||
use never::Never;
|
use never::Never;
|
||||||
|
use orchid_base::interner::Interner;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::reqnot::Receipt;
|
use orchid_base::reqnot::Receipt;
|
||||||
use orchid_base::sym;
|
|
||||||
use orchid_extension::atom::{AtomDynfo, AtomicFeatures};
|
use orchid_extension::atom::{AtomDynfo, AtomicFeatures};
|
||||||
use orchid_extension::context::i;
|
|
||||||
use orchid_extension::entrypoint::ExtReq;
|
use orchid_extension::entrypoint::ExtReq;
|
||||||
use orchid_extension::lexer::LexerObj;
|
use orchid_extension::lexer::LexerObj;
|
||||||
use orchid_extension::other_system::SystemHandle;
|
use orchid_extension::other_system::SystemHandle;
|
||||||
use orchid_extension::parser::ParserObj;
|
use orchid_extension::parser::ParserObj;
|
||||||
use orchid_extension::system::{System, SystemCard};
|
use orchid_extension::system::{System, SystemCard};
|
||||||
use orchid_extension::system_ctor::SystemCtor;
|
use orchid_extension::system_ctor::SystemCtor;
|
||||||
use orchid_extension::tree::{GenMember, merge_trivial};
|
use orchid_extension::tree::GenMember;
|
||||||
|
|
||||||
use crate::macros::instantiate_tpl::InstantiateTplCall;
|
use crate::macros::instantiate_tpl::InstantiateTplCall;
|
||||||
use crate::macros::let_line::LetLine;
|
use crate::macros::let_line::LetLine;
|
||||||
use crate::macros::macro_lib::gen_macro_lib;
|
use crate::macros::macro_lib::gen_macro_lib;
|
||||||
use crate::macros::macro_line::MacroLine;
|
use crate::macros::macro_line::{Macro, MacroLine};
|
||||||
use crate::macros::macro_value::Macro;
|
|
||||||
use crate::macros::mactree_lexer::MacTreeLexer;
|
use crate::macros::mactree_lexer::MacTreeLexer;
|
||||||
use crate::macros::match_macros::gen_match_macro_lib;
|
use crate::macros::recur_state::RecurState;
|
||||||
use crate::macros::ph_lexer::{PhAtom, PhLexer};
|
|
||||||
use crate::macros::std_macros::gen_std_macro_lib;
|
|
||||||
use crate::macros::utils::MacroBodyArgCollector;
|
|
||||||
use crate::{MacTree, StdSystem};
|
use crate::{MacTree, StdSystem};
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
@@ -40,25 +35,15 @@ impl SystemCard for MacroSystem {
|
|||||||
[
|
[
|
||||||
Some(InstantiateTplCall::dynfo()),
|
Some(InstantiateTplCall::dynfo()),
|
||||||
Some(MacTree::dynfo()),
|
Some(MacTree::dynfo()),
|
||||||
|
Some(RecurState::dynfo()),
|
||||||
Some(Macro::dynfo()),
|
Some(Macro::dynfo()),
|
||||||
Some(PhAtom::dynfo()),
|
|
||||||
Some(MacroBodyArgCollector::dynfo()),
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl System for MacroSystem {
|
impl System for MacroSystem {
|
||||||
async fn request(_: ExtReq<'_>, req: Never) -> Receipt<'_> { match req {} }
|
async fn request(_: ExtReq<'_>, req: Self::Req) -> Receipt<'_> { match req {} }
|
||||||
async fn prelude() -> Vec<Sym> {
|
async fn prelude(_: &Interner) -> Vec<Sym> { vec![] }
|
||||||
vec![
|
fn lexers() -> Vec<LexerObj> { vec![&MacTreeLexer] }
|
||||||
sym!(macros::common::+; i()),
|
|
||||||
sym!(macros::common::*; i()),
|
|
||||||
sym!(macros::common::,; i()),
|
|
||||||
sym!(std::tuple::t; i()),
|
|
||||||
]
|
|
||||||
}
|
|
||||||
fn lexers() -> Vec<LexerObj> { vec![&MacTreeLexer, &PhLexer] }
|
|
||||||
fn parsers() -> Vec<ParserObj> { vec![&LetLine, &MacroLine] }
|
fn parsers() -> Vec<ParserObj> { vec![&LetLine, &MacroLine] }
|
||||||
async fn env() -> Vec<GenMember> {
|
fn env() -> Vec<GenMember> { gen_macro_lib() }
|
||||||
merge_trivial([gen_macro_lib().await, gen_std_macro_lib().await, gen_match_macro_lib().await])
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,43 +0,0 @@
|
|||||||
use std::borrow::Cow;
|
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use never::Never;
|
|
||||||
use orchid_base::interner::Tok;
|
|
||||||
use orchid_base::name::Sym;
|
|
||||||
use orchid_extension::atom::Atomic;
|
|
||||||
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
|
|
||||||
use orchid_extension::context::i;
|
|
||||||
|
|
||||||
use crate::macros::mactree::MacTreeSeq;
|
|
||||||
use crate::macros::rule::matcher::Matcher;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct MacroData {
|
|
||||||
pub module: Sym,
|
|
||||||
pub prio: Option<u64>,
|
|
||||||
pub rules: Vec<Rule>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Macro(pub Rc<MacroData>);
|
|
||||||
impl Macro {
|
|
||||||
pub async fn canonical_name(&self) -> Sym {
|
|
||||||
self.0.module.suffix([self.0.rules[0].body_name.clone()], &i()).await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Rule {
|
|
||||||
pub pattern: MacTreeSeq,
|
|
||||||
pub matcher: Matcher,
|
|
||||||
pub placeholders: Vec<Tok<String>>,
|
|
||||||
pub body_name: Tok<String>,
|
|
||||||
}
|
|
||||||
impl Atomic for Macro {
|
|
||||||
type Data = ();
|
|
||||||
type Variant = OwnedVariant;
|
|
||||||
}
|
|
||||||
impl OwnedAtom for Macro {
|
|
||||||
type Refs = Never;
|
|
||||||
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
|
|
||||||
}
|
|
||||||
@@ -5,9 +5,9 @@ use std::rc::Rc;
|
|||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
use futures::future::join_all;
|
use futures::future::join_all;
|
||||||
use hashbrown::HashSet;
|
use hashbrown::HashSet;
|
||||||
use orchid_api_derive::Coding;
|
use itertools::Itertools;
|
||||||
use orchid_base::error::OrcErrv;
|
use orchid_base::error::{OrcErrv, Reporter, mk_err};
|
||||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants, fmt};
|
||||||
use orchid_base::interner::Tok;
|
use orchid_base::interner::Tok;
|
||||||
use orchid_base::location::Pos;
|
use orchid_base::location::Pos;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
@@ -15,89 +15,16 @@ use orchid_base::tl_cache;
|
|||||||
use orchid_base::tree::{Paren, indent};
|
use orchid_base::tree::{Paren, indent};
|
||||||
use orchid_extension::atom::Atomic;
|
use orchid_extension::atom::Atomic;
|
||||||
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
|
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
|
||||||
|
use orchid_extension::conv::ToExpr;
|
||||||
use orchid_extension::expr::Expr;
|
use orchid_extension::expr::Expr;
|
||||||
|
use orchid_extension::gen_expr::{GExpr, arg, bot, call, lambda, sym_ref};
|
||||||
|
use orchid_extension::system::SysCtx;
|
||||||
|
use substack::Substack;
|
||||||
|
|
||||||
fn union_rc_sets(seq: impl IntoIterator<Item = Rc<HashSet<Sym>>>) -> Rc<HashSet<Sym>> {
|
#[derive(Clone)]
|
||||||
let mut acc = Rc::<HashSet<Sym>>::default();
|
pub struct LowerCtx<'a> {
|
||||||
for right in seq {
|
pub sys: SysCtx,
|
||||||
if acc.is_empty() {
|
pub rep: &'a Reporter,
|
||||||
acc = right;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if right.is_empty() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
acc = match (Rc::try_unwrap(acc), Rc::try_unwrap(right)) {
|
|
||||||
(Ok(mut left), Ok(right)) => {
|
|
||||||
left.extend(right);
|
|
||||||
Rc::new(left)
|
|
||||||
},
|
|
||||||
(Ok(mut owned), Err(borrowed)) | (Err(borrowed), Ok(mut owned)) => {
|
|
||||||
owned.extend(borrowed.iter().cloned());
|
|
||||||
Rc::new(owned)
|
|
||||||
},
|
|
||||||
(Err(left), Err(right)) => Rc::new(left.union(&right).cloned().collect()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
acc
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct MacTreeSeq {
|
|
||||||
pub items: Rc<Vec<MacTree>>,
|
|
||||||
pub top_glossary: Rc<HashSet<Sym>>,
|
|
||||||
pub glossary: Rc<HashSet<Sym>>,
|
|
||||||
}
|
|
||||||
impl MacTreeSeq {
|
|
||||||
pub fn new(i: impl IntoIterator<Item = MacTree>) -> Self {
|
|
||||||
let mut items = Vec::new();
|
|
||||||
let mut top_glossary = HashSet::new();
|
|
||||||
let mut glossary = HashSet::new();
|
|
||||||
for item in i {
|
|
||||||
glossary.extend(item.glossary().iter().cloned());
|
|
||||||
if let MacTok::Name(n) = item.tok() {
|
|
||||||
top_glossary.insert(n.clone());
|
|
||||||
}
|
|
||||||
items.push(item);
|
|
||||||
}
|
|
||||||
Self { items: Rc::new(items), top_glossary: Rc::new(top_glossary), glossary: Rc::new(glossary) }
|
|
||||||
}
|
|
||||||
pub fn map<F: FnMut(MacTree) -> Option<MacTree>>(&self, changed: &mut bool, map: &mut F) -> Self {
|
|
||||||
Self::new(self.items.iter().map(|tree| ro(changed, |changed| tree.map(changed, map))))
|
|
||||||
}
|
|
||||||
pub fn glossary(&self) -> &HashSet<Sym> { &self.glossary }
|
|
||||||
pub fn concat(self, other: Self) -> Self {
|
|
||||||
if self.items.is_empty() {
|
|
||||||
return other;
|
|
||||||
} else if other.items.is_empty() {
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
let items = match (Rc::try_unwrap(self.items), Rc::try_unwrap(other.items)) {
|
|
||||||
(Ok(mut left), Ok(mut right)) => {
|
|
||||||
left.append(&mut right);
|
|
||||||
left
|
|
||||||
},
|
|
||||||
(Ok(mut left), Err(right)) => {
|
|
||||||
left.extend_from_slice(&right[..]);
|
|
||||||
left
|
|
||||||
},
|
|
||||||
(Err(left), Ok(mut right)) => {
|
|
||||||
right.splice(0..0, left.iter().cloned());
|
|
||||||
right
|
|
||||||
},
|
|
||||||
(Err(left), Err(right)) => left.iter().chain(&right[..]).cloned().collect(),
|
|
||||||
};
|
|
||||||
Self {
|
|
||||||
items: Rc::new(items),
|
|
||||||
top_glossary: union_rc_sets([self.top_glossary, other.top_glossary]),
|
|
||||||
glossary: union_rc_sets([self.glossary, other.glossary]),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl Format for MacTreeSeq {
|
|
||||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
|
||||||
mtreev_fmt(&self.items[..], c).await
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@@ -110,21 +37,48 @@ impl MacTree {
|
|||||||
pub fn tok(&self) -> &MacTok { &self.tok }
|
pub fn tok(&self) -> &MacTok { &self.tok }
|
||||||
pub fn pos(&self) -> Pos { self.pos.clone() }
|
pub fn pos(&self) -> Pos { self.pos.clone() }
|
||||||
pub fn glossary(&self) -> &HashSet<Sym> { &self.glossary }
|
pub fn glossary(&self) -> &HashSet<Sym> { &self.glossary }
|
||||||
pub fn map<F: FnMut(Self) -> Option<Self>>(&self, changed: &mut bool, map: &mut F) -> Self {
|
pub async fn lower(&self, ctx: LowerCtx<'_>, args: Substack<'_, Sym>) -> GExpr {
|
||||||
let tok = match map(self.clone()) {
|
let expr = match self.tok() {
|
||||||
Some(new_tok) => {
|
MacTok::Bottom(e) => bot(e.clone()),
|
||||||
*changed = true;
|
MacTok::Lambda(arg, body) => {
|
||||||
return new_tok;
|
let MacTok::Name(name) = &*arg.tok else {
|
||||||
},
|
let err = mk_err(
|
||||||
None => match &*self.tok {
|
ctx.sys.i().i("Syntax error after macros").await,
|
||||||
MacTok::Lambda(arg, body) =>
|
"This token ends up as a binding, consider replacing it with a name",
|
||||||
MacTok::Lambda(ro(changed, |changed| arg.map(changed, map)), body.map(changed, map)),
|
[arg.pos()],
|
||||||
MacTok::Name(_) | MacTok::Value(_) => return self.clone(),
|
);
|
||||||
MacTok::Slot | MacTok::Ph(_) | MacTok::Bottom(_) => return self.clone(),
|
ctx.rep.report(err.clone());
|
||||||
MacTok::S(p, body) => MacTok::S(*p, body.map(changed, map)),
|
return bot(err);
|
||||||
},
|
|
||||||
};
|
};
|
||||||
if *changed { tok.at(self.pos()) } else { self.clone() }
|
lambda(args.len() as u64, lower_v(body, ctx, args.push(name.clone())).await)
|
||||||
|
},
|
||||||
|
MacTok::Name(name) => match args.iter().enumerate().find(|(_, n)| *n == name) {
|
||||||
|
None => sym_ref(name.clone()),
|
||||||
|
Some((i, _)) => arg((args.len() - i) as u64),
|
||||||
|
},
|
||||||
|
MacTok::Ph(ph) => {
|
||||||
|
let err = mk_err(
|
||||||
|
ctx.sys.i().i("Placeholder in value").await,
|
||||||
|
format!("Placeholder {ph} is only supported in macro patterns"),
|
||||||
|
[self.pos()],
|
||||||
|
);
|
||||||
|
ctx.rep.report(err.clone());
|
||||||
|
return bot(err);
|
||||||
|
},
|
||||||
|
MacTok::S(Paren::Round, body) => call(lower_v(body, ctx, args).await),
|
||||||
|
MacTok::S(..) => {
|
||||||
|
let err = mk_err(
|
||||||
|
ctx.sys.i().i("[] or {} after macros").await,
|
||||||
|
format!("{} didn't match any macro", fmt(self, ctx.sys.i()).await),
|
||||||
|
[self.pos()],
|
||||||
|
);
|
||||||
|
ctx.rep.report(err.clone());
|
||||||
|
return bot(err);
|
||||||
|
},
|
||||||
|
MacTok::Slot => panic!("Uninstantiated template should never be exposed"),
|
||||||
|
MacTok::Value(v) => v.clone().to_expr().await,
|
||||||
|
};
|
||||||
|
expr.at(self.pos())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Atomic for MacTree {
|
impl Atomic for MacTree {
|
||||||
@@ -136,8 +90,7 @@ impl OwnedAtom for MacTree {
|
|||||||
|
|
||||||
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
|
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
|
||||||
async fn print_atom<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
async fn print_atom<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("'{0}")))
|
self.tok.print(c).await
|
||||||
.units([self.tok.print(c).await])
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Format for MacTree {
|
impl Format for MacTree {
|
||||||
@@ -146,49 +99,57 @@ impl Format for MacTree {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn lower_v(v: &[MacTree], ctx: LowerCtx<'_>, args: Substack<'_, Sym>) -> Vec<GExpr> {
|
||||||
|
join_all(v.iter().map(|t| t.lower(ctx.clone(), args.clone())).collect::<Vec<_>>()).await
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum MacTok {
|
pub enum MacTok {
|
||||||
S(Paren, MacTreeSeq),
|
S(Paren, Vec<MacTree>),
|
||||||
Name(Sym),
|
Name(Sym),
|
||||||
/// Only permitted in arguments to `instantiate_tpl`
|
/// Only permitted in arguments to `instantiate_tpl`
|
||||||
Slot,
|
Slot,
|
||||||
Value(Expr),
|
Value(Expr),
|
||||||
Lambda(MacTree, MacTreeSeq),
|
Lambda(MacTree, Vec<MacTree>),
|
||||||
/// Only permitted in "pattern" values produced by macro blocks, which are
|
/// Only permitted in "pattern" values produced by macro blocks, which are
|
||||||
/// never accessed as variables by usercode
|
/// never accessed as variables by usercode
|
||||||
Ph(Ph),
|
Ph(Ph),
|
||||||
Bottom(OrcErrv),
|
Bottom(OrcErrv),
|
||||||
}
|
}
|
||||||
impl MacTok {
|
impl MacTok {
|
||||||
pub fn build_glossary(&self) -> Rc<HashSet<Sym>> {
|
pub fn build_glossary(&self) -> HashSet<Sym> {
|
||||||
match self {
|
match self {
|
||||||
MacTok::Bottom(_) | MacTok::Ph(_) | MacTok::Slot | MacTok::Value(_) => Rc::default(),
|
MacTok::Bottom(_) | MacTok::Ph(_) | MacTok::Slot | MacTok::Value(_) => HashSet::new(),
|
||||||
MacTok::Name(sym) => Rc::new(HashSet::from([sym.clone()])),
|
MacTok::Name(sym) => HashSet::from([sym.clone()]),
|
||||||
MacTok::S(_, body) => union_rc_sets(body.items.iter().map(|mt| mt.glossary.clone())),
|
MacTok::S(_, body) => body.iter().flat_map(|mt| &*mt.glossary).cloned().collect(),
|
||||||
MacTok::Lambda(arg, body) =>
|
MacTok::Lambda(arg, body) =>
|
||||||
union_rc_sets(body.items.iter().chain([arg]).map(|mt| mt.glossary.clone())),
|
body.iter().chain([arg]).flat_map(|mt| &*mt.glossary).cloned().collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn at(self, pos: impl Into<Pos>) -> MacTree {
|
pub fn at(self, pos: impl Into<Pos>) -> MacTree {
|
||||||
MacTree { pos: pos.into(), glossary: self.build_glossary(), tok: Rc::new(self) }
|
MacTree { pos: pos.into(), glossary: Rc::new(self.build_glossary()), tok: Rc::new(self) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Format for MacTok {
|
impl Format for MacTok {
|
||||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||||
match self {
|
match self {
|
||||||
Self::Value(v) => v.print(c).await,
|
Self::Value(v) => v.print(c).await,
|
||||||
Self::Lambda(arg, b) => tl_cache!(Rc<Variants>: Rc::new(Variants::default()
|
Self::Lambda(arg, b) => FmtUnit::new(
|
||||||
.unbounded("\\{0} {1l}")
|
tl_cache!(Rc<Variants>: Rc::new(Variants::default()
|
||||||
.bounded("(\\{0} {1b})")))
|
.unbounded("\\{0b}.{1l}")
|
||||||
.units([arg.print(c).boxed_local().await, b.print(c).await]),
|
.bounded("(\\{0b}.{1b})"))),
|
||||||
|
[arg.print(c).boxed_local().await, mtreev_fmt(b, c).await],
|
||||||
|
),
|
||||||
Self::Name(n) => format!("{n}").into(),
|
Self::Name(n) => format!("{n}").into(),
|
||||||
Self::Ph(ph) => format!("{ph}").into(),
|
Self::Ph(ph) => format!("{ph}").into(),
|
||||||
Self::S(p, body) => match *p {
|
Self::S(p, body) => FmtUnit::new(
|
||||||
Paren::Round => tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("({0b})"))),
|
match *p {
|
||||||
Paren::Curly => tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{{0b}}"))),
|
Paren::Round => Rc::new(Variants::default().bounded("({0b})")),
|
||||||
Paren::Square => tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("[{0b}]"))),
|
Paren::Curly => Rc::new(Variants::default().bounded("{{0b}}")),
|
||||||
}
|
Paren::Square => Rc::new(Variants::default().bounded("[{0b}]")),
|
||||||
.units([body.print(c).await]),
|
},
|
||||||
|
[mtreev_fmt(body, c).await],
|
||||||
|
),
|
||||||
Self::Slot => "$SLOT".into(),
|
Self::Slot => "$SLOT".into(),
|
||||||
Self::Bottom(err) if err.len() == 1 => format!("Bottom({}) ", err.one().unwrap()).into(),
|
Self::Bottom(err) if err.len() == 1 => format!("Bottom({}) ", err.one().unwrap()).into(),
|
||||||
Self::Bottom(err) => format!("Botttom(\n{}) ", indent(&err.to_string())).into(),
|
Self::Bottom(err) => format!("Botttom(\n{}) ", indent(&err.to_string())).into(),
|
||||||
@@ -200,12 +161,12 @@ pub async fn mtreev_fmt<'b>(
|
|||||||
v: impl IntoIterator<Item = &'b MacTree>,
|
v: impl IntoIterator<Item = &'b MacTree>,
|
||||||
c: &(impl FmtCtx + ?Sized),
|
c: &(impl FmtCtx + ?Sized),
|
||||||
) -> FmtUnit {
|
) -> FmtUnit {
|
||||||
FmtUnit::sequence("", " ", "", None, join_all(v.into_iter().map(|t| t.print(c))).await)
|
FmtUnit::sequence(" ", None, join_all(v.into_iter().map(|t| t.print(c))).await)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||||
pub struct Ph {
|
pub struct Ph {
|
||||||
pub name: Tok<String>,
|
pub name: IStr,
|
||||||
pub kind: PhKind,
|
pub kind: PhKind,
|
||||||
}
|
}
|
||||||
impl Display for Ph {
|
impl Display for Ph {
|
||||||
@@ -220,12 +181,42 @@ impl Display for Ph {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, Coding)]
|
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
|
||||||
pub enum PhKind {
|
pub enum PhKind {
|
||||||
Scalar,
|
Scalar,
|
||||||
Vector { at_least_one: bool, priority: u8 },
|
Vector { at_least_one: bool, priority: u8 },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn map_mactree<F: FnMut(MacTree) -> Option<MacTree>>(
|
||||||
|
src: &MacTree,
|
||||||
|
changed: &mut bool,
|
||||||
|
map: &mut F,
|
||||||
|
) -> MacTree {
|
||||||
|
let tok = match map(src.clone()) {
|
||||||
|
Some(new_tok) => {
|
||||||
|
*changed = true;
|
||||||
|
return new_tok;
|
||||||
|
},
|
||||||
|
None => match &*src.tok {
|
||||||
|
MacTok::Lambda(arg, body) => MacTok::Lambda(
|
||||||
|
ro(changed, |changed| map_mactree(arg, changed, map)),
|
||||||
|
map_mactree_v(body, changed, map),
|
||||||
|
),
|
||||||
|
MacTok::Name(_) | MacTok::Value(_) => return src.clone(),
|
||||||
|
MacTok::Slot | MacTok::Ph(_) | MacTok::Bottom(_) => return src.clone(),
|
||||||
|
MacTok::S(p, body) => MacTok::S(*p, map_mactree_v(body, changed, map)),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
if *changed { tok.at(src.pos()) } else { src.clone() }
|
||||||
|
}
|
||||||
|
pub fn map_mactree_v<F: FnMut(MacTree) -> Option<MacTree>>(
|
||||||
|
src: &[MacTree],
|
||||||
|
changed: &mut bool,
|
||||||
|
map: &mut F,
|
||||||
|
) -> Vec<MacTree> {
|
||||||
|
src.iter().map(|tree| ro(changed, |changed| map_mactree(tree, changed, map))).collect_vec()
|
||||||
|
}
|
||||||
|
|
||||||
/// reverse "or". Inside, the flag is always false, but raising it will raise
|
/// reverse "or". Inside, the flag is always false, but raising it will raise
|
||||||
/// the outside flag too.
|
/// the outside flag too.
|
||||||
fn ro<T>(flag: &mut bool, cb: impl FnOnce(&mut bool) -> T) -> T {
|
fn ro<T>(flag: &mut bool, cb: impl FnOnce(&mut bool) -> T) -> T {
|
||||||
@@ -234,3 +225,7 @@ fn ro<T>(flag: &mut bool, cb: impl FnOnce(&mut bool) -> T) -> T {
|
|||||||
*flag |= new_flag;
|
*flag |= new_flag;
|
||||||
val
|
val
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn glossary_v(src: &[MacTree]) -> impl Iterator<Item = Sym> {
|
||||||
|
src.iter().flat_map(|mt| mt.glossary()).cloned()
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,44 +1,46 @@
|
|||||||
use std::ops::RangeInclusive;
|
use std::ops::RangeInclusive;
|
||||||
|
|
||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
use itertools::chain;
|
use orchid_base::error::{OrcRes, mk_err};
|
||||||
use orchid_base::error::{OrcRes, mk_errv};
|
|
||||||
use orchid_base::parse::ParseCtx;
|
use orchid_base::parse::ParseCtx;
|
||||||
|
use orchid_base::sym;
|
||||||
use orchid_base::tokens::PARENS;
|
use orchid_base::tokens::PARENS;
|
||||||
use orchid_base::tree::Paren;
|
use orchid_base::tree::Paren;
|
||||||
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
|
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
|
||||||
use orchid_extension::parser::p_tree2gen;
|
use orchid_extension::parser::p_tree2gen;
|
||||||
use orchid_extension::tree::{GenTok, GenTokTree, x_tok};
|
use orchid_extension::tree::{GenTok, GenTokTree, ref_tok, x_tok};
|
||||||
|
|
||||||
use crate::macros::instantiate_tpl::InstantiateTplCall;
|
|
||||||
use crate::macros::let_line::parse_tok;
|
use crate::macros::let_line::parse_tok;
|
||||||
use crate::macros::mactree::{MacTok, MacTree, MacTreeSeq};
|
use crate::macros::mactree::{MacTok, MacTree};
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct MacTreeLexer;
|
pub struct MacTreeLexer;
|
||||||
impl Lexer for MacTreeLexer {
|
impl Lexer for MacTreeLexer {
|
||||||
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['\''..='\''];
|
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['\''..='\''];
|
||||||
async fn lex<'a>(tail: &'a str, lctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
async fn lex<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
||||||
let Some(tail2) = tail.strip_prefix('\'') else {
|
let Some(tail2) = tail.strip_prefix('\'') else {
|
||||||
return Err(err_not_applicable().await);
|
return Err(err_not_applicable(ctx.i()).await);
|
||||||
};
|
};
|
||||||
let tail3 = tail2.trim_start();
|
let tail3 = tail2.trim_start();
|
||||||
let mut args = Vec::new();
|
let mut args = Vec::new();
|
||||||
return match mac_tree(tail3, &mut args, lctx).await {
|
return match mac_tree(tail3, &mut args, ctx).await {
|
||||||
Ok((tail4, mactree)) => {
|
Ok((tail4, mactree)) => {
|
||||||
let range = lctx.pos_tt(tail, tail4);
|
let range = ctx.pos_tt(tail, tail4);
|
||||||
let tok = match &args[..] {
|
let tok = match &args[..] {
|
||||||
[] => x_tok(mactree).await,
|
[] => x_tok(mactree).await,
|
||||||
_ => {
|
_ => {
|
||||||
let instantiate_tpl_call =
|
let call = ([
|
||||||
InstantiateTplCall { argc: args.len(), argv: vec![], tpl: mactree };
|
ref_tok(sym!(macros::instantiate_tpl; ctx.i()).await).await.at(range.clone()),
|
||||||
let call = chain!([x_tok(instantiate_tpl_call).await.at(range.clone())], args);
|
x_tok(mactree).await.at(range.clone()),
|
||||||
|
]
|
||||||
|
.into_iter())
|
||||||
|
.chain(args.into_iter());
|
||||||
GenTok::S(Paren::Round, call.collect())
|
GenTok::S(Paren::Round, call.collect())
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
Ok((tail4, tok.at(range)))
|
Ok((tail4, tok.at(range)))
|
||||||
},
|
},
|
||||||
Err(e) => Ok((tail2, GenTok::Bottom(e).at(lctx.pos_lt(1, tail2)))),
|
Err(e) => Ok((tail2, GenTok::Bottom(e).at(ctx.pos_lt(1, tail2)))),
|
||||||
};
|
};
|
||||||
async fn mac_tree<'a>(
|
async fn mac_tree<'a>(
|
||||||
tail: &'a str,
|
tail: &'a str,
|
||||||
@@ -51,10 +53,9 @@ impl Lexer for MacTreeLexer {
|
|||||||
return loop {
|
return loop {
|
||||||
let tail2 = body_tail.trim_start();
|
let tail2 = body_tail.trim_start();
|
||||||
if let Some(tail3) = tail2.strip_prefix(*rp) {
|
if let Some(tail3) = tail2.strip_prefix(*rp) {
|
||||||
let tok = MacTok::S(*paren, MacTreeSeq::new(items));
|
break Ok((tail3, MacTok::S(*paren, items).at(ctx.pos_tt(tail, tail3).pos())));
|
||||||
break Ok((tail3, tok.at(ctx.pos_tt(tail, tail3).pos())));
|
|
||||||
} else if tail2.is_empty() {
|
} else if tail2.is_empty() {
|
||||||
return Err(mk_errv(
|
return Err(mk_err(
|
||||||
ctx.i().i("Unclosed block").await,
|
ctx.i().i("Unclosed block").await,
|
||||||
format!("Expected closing {rp}"),
|
format!("Expected closing {rp}"),
|
||||||
[ctx.pos_lt(1, tail)],
|
[ctx.pos_lt(1, tail)],
|
||||||
@@ -84,7 +85,7 @@ impl Lexer for MacTreeLexer {
|
|||||||
body.push(body_tok);
|
body.push(body_tok);
|
||||||
tail3 = tail5;
|
tail3 = tail5;
|
||||||
}
|
}
|
||||||
Ok((tail3, MacTok::Lambda(param, MacTreeSeq::new(body)).at(ctx.pos_tt(tail, tail3).pos())))
|
Ok((tail3, MacTok::Lambda(param, body).at(ctx.pos_tt(tail, tail3).pos())))
|
||||||
} else {
|
} else {
|
||||||
let (tail2, sub) = ctx.recurse(tail).await?;
|
let (tail2, sub) = ctx.recurse(tail).await?;
|
||||||
let parsed = parse_tok(&sub, ctx).await.expect("Unexpected invalid token");
|
let parsed = parse_tok(&sub, ctx).await.expect("Unexpected invalid token");
|
||||||
|
|||||||
@@ -1,178 +0,0 @@
|
|||||||
use std::borrow::Cow;
|
|
||||||
|
|
||||||
use async_fn_stream::stream;
|
|
||||||
use futures::future::join_all;
|
|
||||||
use futures::{Stream, StreamExt, stream};
|
|
||||||
use never::Never;
|
|
||||||
use orchid_api::ExprTicket;
|
|
||||||
use orchid_api_derive::Coding;
|
|
||||||
use orchid_base::error::{OrcRes, mk_errv};
|
|
||||||
use orchid_base::format::fmt;
|
|
||||||
use orchid_base::name::Sym;
|
|
||||||
use orchid_base::sym;
|
|
||||||
use orchid_extension::atom::{Atomic, TAtom};
|
|
||||||
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
|
|
||||||
use orchid_extension::context::i;
|
|
||||||
use orchid_extension::conv::ToExpr;
|
|
||||||
use orchid_extension::coroutine_exec::{ExecHandle, exec};
|
|
||||||
use orchid_extension::expr::{Expr, ExprHandle};
|
|
||||||
use orchid_extension::gen_expr::{GExpr, arg, bot, call, lambda, sym_ref};
|
|
||||||
use orchid_extension::tree::{GenMember, fun, prefix};
|
|
||||||
|
|
||||||
use crate::macros::resolve::resolve;
|
|
||||||
use crate::macros::utils::{build_macro, mactree, mactreev};
|
|
||||||
use crate::std::reflection::sym_atom::SymAtom;
|
|
||||||
use crate::std::tuple::Tuple;
|
|
||||||
use crate::{HomoTpl, MacTok, MacTree, OrcOpt, Tpl, UntypedTuple, api};
|
|
||||||
|
|
||||||
#[derive(Clone, Coding)]
|
|
||||||
pub struct MatcherData {
|
|
||||||
keys: Vec<api::TStrv>,
|
|
||||||
matcher: ExprTicket,
|
|
||||||
}
|
|
||||||
impl MatcherData {
|
|
||||||
async fn matcher(&self) -> Expr { Expr::from_handle(ExprHandle::from_ticket(self.matcher).await) }
|
|
||||||
pub async fn run_matcher(
|
|
||||||
&self,
|
|
||||||
h: &mut ExecHandle<'_>,
|
|
||||||
val: impl ToExpr,
|
|
||||||
) -> OrcRes<OrcOpt<HomoTpl<Expr>>> {
|
|
||||||
h.exec::<OrcOpt<HomoTpl<Expr>>>(call(self.matcher().await.to_gen().await, [val.to_gen().await]))
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
pub fn keys(&self) -> impl Stream<Item = Sym> {
|
|
||||||
stream(async |mut h| {
|
|
||||||
for tk in &self.keys {
|
|
||||||
h.emit(Sym::from_api(*tk, &i()).await).await
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct MatcherAtom {
|
|
||||||
/// The names that subresults may be bound to
|
|
||||||
pub(super) keys: Vec<Sym>,
|
|
||||||
/// Takes the value-to-be-matched, returns an `option (tuple T1..TN)` of the
|
|
||||||
/// subresults to be bound to the names returned by [Self::keys]
|
|
||||||
pub(super) matcher: Expr,
|
|
||||||
}
|
|
||||||
impl Atomic for MatcherAtom {
|
|
||||||
type Data = MatcherData;
|
|
||||||
type Variant = OwnedVariant;
|
|
||||||
}
|
|
||||||
impl OwnedAtom for MatcherAtom {
|
|
||||||
type Refs = Never;
|
|
||||||
async fn val(&self) -> std::borrow::Cow<'_, Self::Data> {
|
|
||||||
Cow::Owned(MatcherData {
|
|
||||||
keys: self.keys.iter().map(|t| t.to_api()).collect(),
|
|
||||||
matcher: self.matcher.handle().ticket(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn gen_match_macro_lib() -> Vec<GenMember> {
|
|
||||||
prefix("pattern", [
|
|
||||||
fun(
|
|
||||||
true,
|
|
||||||
"match_one",
|
|
||||||
async |mat: TAtom<MatcherAtom>, value: Expr, then: Expr, default: Expr| {
|
|
||||||
exec(async move |mut h| match mat.run_matcher(&mut h, value).await? {
|
|
||||||
OrcOpt(Some(values)) =>
|
|
||||||
Ok(call(then.to_gen().await, join_all(values.0.into_iter().map(|x| x.to_gen())).await)),
|
|
||||||
OrcOpt(None) => Ok(default.to_gen().await),
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
},
|
|
||||||
),
|
|
||||||
fun(true, "matcher", async |names: HomoTpl<TAtom<SymAtom>>, matcher: Expr| MatcherAtom {
|
|
||||||
keys: join_all(names.0.iter().map(async |atm| Sym::from_api(atm.0, &i()).await)).await,
|
|
||||||
matcher,
|
|
||||||
}),
|
|
||||||
build_macro(None, ["match", "match_rule", "_row", "=>"])
|
|
||||||
.rule(mactreev!("pattern::match" { "..$" rules 0 }), [async |[rules]| {
|
|
||||||
exec(async move |mut h| {
|
|
||||||
let rule_lines = h
|
|
||||||
.exec::<TAtom<Tuple>>(call(sym_ref(sym!(macros::resolve; i())), [
|
|
||||||
mactree!(macros::common::semi_list "push" rules.clone();).to_gen().await,
|
|
||||||
]))
|
|
||||||
.await?;
|
|
||||||
let mut rule_atoms = Vec::<(TAtom<MatcherAtom>, Expr)>::new();
|
|
||||||
for line_exprh in rule_lines.iter() {
|
|
||||||
let line_mac = h
|
|
||||||
.exec::<TAtom<MacTree>>(Expr::from_handle(ExprHandle::from_ticket(*line_exprh).await))
|
|
||||||
.await?;
|
|
||||||
let Tpl((matcher, body)) = h
|
|
||||||
.exec(call(sym_ref(sym!(macros::resolve; i())), [
|
|
||||||
mactree!(pattern::_row "push" own(&line_mac).await ;).to_gen().await,
|
|
||||||
]))
|
|
||||||
.await?;
|
|
||||||
rule_atoms.push((matcher, body));
|
|
||||||
}
|
|
||||||
let base_case = lambda(0, [bot(mk_errv(
|
|
||||||
i().i("No branches match").await,
|
|
||||||
"None of the pattern provided matches this value",
|
|
||||||
[rules.pos()],
|
|
||||||
))]);
|
|
||||||
let match_expr = stream::iter(rule_atoms.into_iter().rev())
|
|
||||||
.fold(base_case, async |tail, (mat, body)| {
|
|
||||||
lambda(0, [call(sym_ref(sym!(pattern::match_one; i())), [
|
|
||||||
mat.to_gen().await,
|
|
||||||
arg(0),
|
|
||||||
body.to_gen().await,
|
|
||||||
call(tail, [arg(0)]),
|
|
||||||
])])
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
Ok(match_expr)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
}])
|
|
||||||
.rule(mactreev!(pattern::match_rule (( "...$" pattern 0 ))), [async |[pattern]| {
|
|
||||||
resolve(mactree!(pattern::match_rule "push" pattern; )).await
|
|
||||||
}])
|
|
||||||
.rule(mactreev!(pattern::_row ( "...$" pattern 0 pattern::=> "...$" value 1 )), [
|
|
||||||
async |[pattern, mut value]| {
|
|
||||||
exec(async move |mut h| -> OrcRes<Tpl<(TAtom<MatcherAtom>, GExpr)>> {
|
|
||||||
let Ok(pat) = h
|
|
||||||
.exec::<TAtom<MatcherAtom>>(call(sym_ref(sym!(macros::resolve; i())), [
|
|
||||||
mactree!(pattern::match_rule "push" pattern.clone();).to_gen().await,
|
|
||||||
]))
|
|
||||||
.await
|
|
||||||
else {
|
|
||||||
return Err(mk_errv(
|
|
||||||
i().i("Invalid pattern").await,
|
|
||||||
format!("Could not parse {} as a match pattern", fmt(&pattern, &i()).await),
|
|
||||||
[pattern.pos()],
|
|
||||||
));
|
|
||||||
};
|
|
||||||
value = (pat.keys())
|
|
||||||
.fold(value, async |value, name| mactree!("l_" name; ( "push" value ; )))
|
|
||||||
.await;
|
|
||||||
Ok(Tpl((pat, resolve(value).await)))
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
},
|
|
||||||
])
|
|
||||||
.finish(),
|
|
||||||
fun(true, "ref_body", async |val| OrcOpt(Some(UntypedTuple(vec![val])))),
|
|
||||||
build_macro(None, ["ref"])
|
|
||||||
.rule(mactreev!(pattern::match_rule(pattern::ref "$" name)), [async |[name]| {
|
|
||||||
let MacTok::Name(name) = name.tok() else {
|
|
||||||
return Err(mk_errv(
|
|
||||||
i().i("pattern 'ref' requires a name to bind to").await,
|
|
||||||
format!(
|
|
||||||
"'ref' was interpreted as a binding matcher, \
|
|
||||||
but it was followed by {} instead of a name",
|
|
||||||
fmt(&name, &i()).await
|
|
||||||
),
|
|
||||||
[name.pos()],
|
|
||||||
));
|
|
||||||
};
|
|
||||||
Ok(MatcherAtom {
|
|
||||||
keys: vec![name.clone()],
|
|
||||||
matcher: sym_ref(sym!(pattern::ref_body; i())).to_expr().await,
|
|
||||||
})
|
|
||||||
}])
|
|
||||||
.finish(),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
@@ -3,14 +3,10 @@ mod let_line;
|
|||||||
mod macro_lib;
|
mod macro_lib;
|
||||||
mod macro_line;
|
mod macro_line;
|
||||||
pub mod macro_system;
|
pub mod macro_system;
|
||||||
mod macro_value;
|
|
||||||
pub mod mactree;
|
pub mod mactree;
|
||||||
mod mactree_lexer;
|
mod mactree_lexer;
|
||||||
pub mod match_macros;
|
pub mod recur_state;
|
||||||
mod ph_lexer;
|
|
||||||
mod resolve;
|
mod resolve;
|
||||||
mod rule;
|
mod rule;
|
||||||
pub mod std_macros;
|
|
||||||
mod utils;
|
|
||||||
|
|
||||||
use mactree::{MacTok, MacTree};
|
use mactree::{MacTok, MacTree};
|
||||||
|
|||||||
@@ -1,77 +0,0 @@
|
|||||||
use orchid_api_derive::Coding;
|
|
||||||
use orchid_base::error::{OrcRes, mk_errv};
|
|
||||||
use orchid_base::format::FmtUnit;
|
|
||||||
use orchid_base::parse::{name_char, name_start};
|
|
||||||
use orchid_extension::atom::Atomic;
|
|
||||||
use orchid_extension::atom_thin::{ThinAtom, ThinVariant};
|
|
||||||
use orchid_extension::context::i;
|
|
||||||
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
|
|
||||||
use orchid_extension::tree::{GenTokTree, x_tok};
|
|
||||||
|
|
||||||
use crate::macros::mactree::{Ph, PhKind};
|
|
||||||
|
|
||||||
#[derive(Clone, Coding)]
|
|
||||||
pub struct PhAtom(orchid_api::TStr, PhKind);
|
|
||||||
impl PhAtom {
|
|
||||||
pub async fn to_full(&self) -> Ph { Ph { kind: self.1, name: i().ex(self.0).await } }
|
|
||||||
}
|
|
||||||
impl Atomic for PhAtom {
|
|
||||||
type Data = Self;
|
|
||||||
type Variant = ThinVariant;
|
|
||||||
}
|
|
||||||
impl ThinAtom for PhAtom {
|
|
||||||
async fn print(&self) -> FmtUnit {
|
|
||||||
Ph { name: i().ex(self.0).await, kind: self.1 }.to_string().into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct PhLexer;
|
|
||||||
impl Lexer for PhLexer {
|
|
||||||
const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['$'..='$', '.'..='.'];
|
|
||||||
async fn lex<'a>(line: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
|
||||||
let (tail, name, phkind) = if let Some(tail) = line.strip_prefix("$")
|
|
||||||
&& tail.starts_with(name_start)
|
|
||||||
{
|
|
||||||
let name = tail.split_once(|c| !name_char(c)).map_or("", |(h, _)| h);
|
|
||||||
let tail = tail.split_at(name.len()).1;
|
|
||||||
(tail, name, PhKind::Scalar)
|
|
||||||
} else {
|
|
||||||
async fn name_and_prio<'a>(
|
|
||||||
tail: &'a str,
|
|
||||||
ctx: &'a LexContext<'a>,
|
|
||||||
) -> OrcRes<(&'a str, u8, &'a str)> {
|
|
||||||
let name = tail.split_once(|c| !name_char(c)).map_or("", |(h, _)| h);
|
|
||||||
let tail = tail.split_at(name.len()).1;
|
|
||||||
let (prio, tail) = match tail.strip_prefix(":") {
|
|
||||||
None => (0, tail),
|
|
||||||
Some(tail) => {
|
|
||||||
let prio = tail.split_once(|c: char| c.is_ascii_digit()).map_or("", |(h, _)| h);
|
|
||||||
let tail = tail.split_at(prio.len()).1;
|
|
||||||
if let Ok(prio_num) = prio.parse::<u8>() {
|
|
||||||
(prio_num, tail)
|
|
||||||
} else {
|
|
||||||
return Err(mk_errv(
|
|
||||||
i().i("Invalid priority, must be 0-255").await,
|
|
||||||
format!("{prio} is not a valid placeholder priority"),
|
|
||||||
[ctx.pos_lt(prio.len(), tail)],
|
|
||||||
));
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
|
||||||
Ok((name, prio, tail))
|
|
||||||
}
|
|
||||||
if let Some(tail) = line.strip_prefix("..$") {
|
|
||||||
let (name, priority, tail) = name_and_prio(tail, ctx).await?;
|
|
||||||
(tail, name, PhKind::Vector { at_least_one: false, priority })
|
|
||||||
} else if let Some(tail) = line.strip_prefix("...$") {
|
|
||||||
let (name, priority, tail) = name_and_prio(tail, ctx).await?;
|
|
||||||
(tail, name, PhKind::Vector { at_least_one: true, priority })
|
|
||||||
} else {
|
|
||||||
return Err(err_not_applicable().await);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let ph_atom = PhAtom(i().i::<String>(name).await.to_api(), phkind);
|
|
||||||
Ok((tail, x_tok(ph_atom).await.at(ctx.pos_tt(line, tail))))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
59
orchid-std/src/macros/recur_state.rs
Normal file
59
orchid-std/src/macros/recur_state.rs
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
|
use std::fmt;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use never::Never;
|
||||||
|
use orchid_base::interner::Tok;
|
||||||
|
use orchid_base::name::Sym;
|
||||||
|
use orchid_extension::atom::Atomic;
|
||||||
|
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
|
pub struct RulePath {
|
||||||
|
pub module: Sym,
|
||||||
|
pub main_kw: IStr,
|
||||||
|
pub rule: u32,
|
||||||
|
}
|
||||||
|
impl RulePath {
|
||||||
|
pub fn name(&self) -> String { format!("rule::{}::{}", self.main_kw, self.rule) }
|
||||||
|
}
|
||||||
|
impl fmt::Display for RulePath {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "Rule {}::({})::{}", self.module, self.main_kw, self.rule)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub enum RecurState {
|
||||||
|
Bottom,
|
||||||
|
Recursive { path: RulePath, prev: Rc<RecurState> },
|
||||||
|
}
|
||||||
|
impl RecurState {
|
||||||
|
pub fn base(path: RulePath) -> Self {
|
||||||
|
RecurState::Recursive { path, prev: Rc::new(RecurState::Bottom) }
|
||||||
|
}
|
||||||
|
pub fn push(&self, new: RulePath) -> Option<Self> {
|
||||||
|
let mut cur = self;
|
||||||
|
while let Self::Recursive { path, prev } = cur {
|
||||||
|
if &new == path {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
cur = prev;
|
||||||
|
}
|
||||||
|
Some(Self::Recursive { path: new, prev: Rc::new(self.clone()) })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Atomic for RecurState {
|
||||||
|
type Data = Option<()>;
|
||||||
|
type Variant = OwnedVariant;
|
||||||
|
}
|
||||||
|
impl OwnedAtom for RecurState {
|
||||||
|
type Refs = Never;
|
||||||
|
|
||||||
|
async fn val(&self) -> Cow<'_, Self::Data> {
|
||||||
|
Cow::Owned(match self {
|
||||||
|
Self::Bottom => None,
|
||||||
|
Self::Recursive { .. } => Some(()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,275 +1,110 @@
|
|||||||
use std::ops::{Add, Range};
|
use futures::FutureExt;
|
||||||
|
use hashbrown::HashMap;
|
||||||
use async_fn_stream::stream;
|
|
||||||
use futures::{FutureExt, StreamExt};
|
|
||||||
use hashbrown::{HashMap, HashSet};
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_base::error::mk_errv;
|
use orchid_base::error::mk_err;
|
||||||
use orchid_base::format::fmt;
|
|
||||||
use orchid_base::location::Pos;
|
use orchid_base::location::Pos;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
|
use orchid_base::sym;
|
||||||
use orchid_base::tree::Paren;
|
use orchid_base::tree::Paren;
|
||||||
use orchid_extension::atom::TAtom;
|
|
||||||
use orchid_extension::atom_owned::own;
|
|
||||||
use orchid_extension::context::i;
|
|
||||||
use orchid_extension::conv::ToExpr;
|
use orchid_extension::conv::ToExpr;
|
||||||
use orchid_extension::coroutine_exec::{ExecHandle, exec};
|
use orchid_extension::coroutine_exec::ExecHandle;
|
||||||
use orchid_extension::gen_expr::{GExpr, bot, call, lambda, sym_ref};
|
use orchid_extension::gen_expr::{GExpr, bot, call, sym_ref};
|
||||||
use orchid_extension::reflection::{ReflMemKind, refl};
|
use orchid_extension::system::SysCtx;
|
||||||
use subslice_offset::SubsliceOffset;
|
|
||||||
use substack::Substack;
|
|
||||||
|
|
||||||
use crate::macros::macro_value::{Macro, Rule};
|
use crate::macros::macro_line::{Macro, Rule};
|
||||||
use crate::macros::mactree::MacTreeSeq;
|
use crate::macros::recur_state::{RecurState, RulePath};
|
||||||
|
use crate::macros::rule::matcher::{NamedMatcher, PriodMatcher};
|
||||||
use crate::macros::rule::state::{MatchState, StateEntry};
|
use crate::macros::rule::state::{MatchState, StateEntry};
|
||||||
use crate::{MacTok, MacTree};
|
use crate::{MacTok, MacTree};
|
||||||
|
|
||||||
pub async fn resolve(tpl: MacTree) -> GExpr {
|
pub struct ResolveCtx<'a> {
|
||||||
exec(async move |mut h| {
|
pub ctx: SysCtx,
|
||||||
let root = refl();
|
pub recur: RecurState,
|
||||||
let mut macros = HashMap::new();
|
|
||||||
for n in tpl.glossary() {
|
|
||||||
if let Ok(ReflMemKind::Const) = root.get_by_path(n).await.map(|m| m.kind()) {
|
|
||||||
let Ok(mac) = h.exec::<TAtom<Macro>>(sym_ref(n.clone())).await else { continue };
|
|
||||||
let mac = own(&mac).await;
|
|
||||||
macros.entry(mac.canonical_name().await).or_insert(mac);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let mut exclusive = Vec::new();
|
|
||||||
let mut prios = Vec::<u64>::new();
|
|
||||||
let mut priod = Vec::<FilteredMacroRecord>::new();
|
|
||||||
for (_, mac) in macros.iter() {
|
|
||||||
let mut record = FilteredMacroRecord { mac, rules: Vec::new() };
|
|
||||||
for (rule_i, rule) in mac.0.rules.iter().enumerate() {
|
|
||||||
if rule.pattern.glossary.is_subset(tpl.glossary()) {
|
|
||||||
record.rules.push(rule_i);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !record.rules.is_empty() {
|
|
||||||
match mac.0.prio {
|
|
||||||
None => exclusive.push(record),
|
|
||||||
Some(prio) => {
|
|
||||||
let i = prios.partition_point(|p| *p > prio);
|
|
||||||
prios.insert(i, prio);
|
|
||||||
priod.insert(i, record);
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let mut rctx = ResolveCtx { h, exclusive, priod };
|
|
||||||
resolve_one(&mut rctx, Substack::Bottom, &tpl).await
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Rules belonging to one macro that passed a particular filter
|
|
||||||
pub struct FilteredMacroRecord<'a> {
|
|
||||||
mac: &'a Macro,
|
|
||||||
/// The rules in increasing order of index
|
|
||||||
rules: Vec<usize>,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct ResolveCtx<'a> {
|
|
||||||
pub h: ExecHandle<'a>,
|
pub h: ExecHandle<'a>,
|
||||||
/// If these overlap, that's a compile-time error
|
pub named: HashMap<Sym, Vec<(&'a NamedMatcher, &'a Macro, &'a Rule)>>,
|
||||||
pub exclusive: Vec<FilteredMacroRecord<'a>>,
|
pub priod: Vec<(&'a PriodMatcher, &'a Macro, &'a Rule)>,
|
||||||
/// If these overlap, the priorities decide the order. In case of a tie, the
|
|
||||||
/// order is unspecified
|
|
||||||
pub priod: Vec<FilteredMacroRecord<'a>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn resolve_one(
|
pub async fn resolve(ctx: &mut ResolveCtx<'_>, value: &MacTree) -> Option<MacTree> {
|
||||||
ctx: &mut ResolveCtx<'_>,
|
|
||||||
arg_stk: Substack<'_, Sym>,
|
|
||||||
value: &MacTree,
|
|
||||||
) -> GExpr {
|
|
||||||
match value.tok() {
|
match value.tok() {
|
||||||
MacTok::Ph(_) | MacTok::Slot => panic!("Forbidden element in value mactree"),
|
MacTok::Ph(_) | MacTok::Slot => panic!("Forbidden element in value mactree"),
|
||||||
MacTok::Bottom(err) => bot(err.clone()),
|
MacTok::Bottom(_) | MacTok::Value(_) | MacTok::Name(_) => None,
|
||||||
MacTok::Value(v) => v.clone().to_gen().await,
|
MacTok::Lambda(arg, body) =>
|
||||||
MacTok::Name(n) => sym_ref(n.clone()),
|
Some(MacTok::Lambda(arg.clone(), resolve_seq(ctx, body).await?).at(value.pos())),
|
||||||
MacTok::Lambda(arg, body) => {
|
MacTok::S(ptyp, body) => Some(MacTok::S(*ptyp, resolve_seq(ctx, body).await?).at(value.pos())),
|
||||||
let MacTok::Name(name) = &*arg.tok else {
|
|
||||||
return bot(mk_errv(
|
|
||||||
i().i("Syntax error after macros").await,
|
|
||||||
"This token ends up as a binding, consider replacing it with a name",
|
|
||||||
[arg.pos()],
|
|
||||||
));
|
|
||||||
};
|
|
||||||
let arg_pos = arg_stk.len() as u64;
|
|
||||||
let arg_stk = arg_stk.push(name.clone());
|
|
||||||
lambda(arg_pos, [resolve_seq(ctx, arg_stk, body.clone(), value.pos()).await])
|
|
||||||
},
|
|
||||||
MacTok::S(Paren::Round, body) => resolve_seq(ctx, arg_stk, body.clone(), value.pos()).await,
|
|
||||||
MacTok::S(..) => bot(mk_errv(
|
|
||||||
i().i("Leftover [] or {} not matched by macro").await,
|
|
||||||
format!("{} was not matched by any macro", fmt(value, &i()).await),
|
|
||||||
[value.pos()],
|
|
||||||
)),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type XMatches<'a> = Vec<(Range<usize>, &'a Macro, &'a Rule, MatchState<'a>)>;
|
pub async fn resolve_seq(ctx: &mut ResolveCtx<'_>, val: &[MacTree]) -> Option<Vec<MacTree>> {
|
||||||
|
let mut any_changed = false;
|
||||||
/// find the subsection of the slice that satisfies both the lower and upper
|
let mut i = 0;
|
||||||
/// limit.
|
let mut val = val.to_vec();
|
||||||
fn subsection<T>(
|
'all_named: while i < val.len() {
|
||||||
slice: &[T],
|
'one_named: {
|
||||||
lower_limit: impl FnMut(&T) -> bool,
|
let MacTok::Name(key) = val[i].tok() else { break 'one_named };
|
||||||
mut upper_limit: impl FnMut(&T) -> bool,
|
let Some(options) = ctx.named.get(key) else { break 'one_named };
|
||||||
) -> Range<usize> {
|
let matches = (options.iter())
|
||||||
let start = slice.partition_point(lower_limit);
|
.filter_map(|r| Some((r.1, r.2, r.0.apply(&val[i..], |_| false)?)))
|
||||||
let len = slice[start..].partition_point(|t| !upper_limit(t));
|
.collect_vec();
|
||||||
start..start + len
|
match matches.len() {
|
||||||
}
|
0 => break 'one_named,
|
||||||
|
1 => {
|
||||||
async fn resolve_seq(
|
any_changed = true;
|
||||||
ctx: &mut ResolveCtx<'_>,
|
let (mac, rule, (state, tail)) = matches.into_iter().exactly_one().unwrap();
|
||||||
arg_stk: Substack<'_, Sym>,
|
let end = val.len() - tail.len();
|
||||||
val: MacTreeSeq,
|
let body_call = mk_body_call(mac, rule, &state, &ctx.ctx, ctx.recur.clone()).await;
|
||||||
fallback_pos: Pos,
|
|
||||||
) -> GExpr {
|
|
||||||
if val.items.is_empty() {
|
|
||||||
return bot(mk_errv(
|
|
||||||
i().i("Empty sequence").await,
|
|
||||||
"() or (\\arg ) left after macro execution. \
|
|
||||||
This is usually caused by an incomplete call to a macro with bad error detection",
|
|
||||||
[fallback_pos],
|
|
||||||
));
|
|
||||||
}
|
|
||||||
// A sorted collection of overlapping but non-nested matches to exclusive
|
|
||||||
// macros
|
|
||||||
let mut x_matches: XMatches = Vec::new();
|
|
||||||
let top_glossary = val.top_glossary.clone();
|
|
||||||
let mut new_val = val.items.to_vec();
|
|
||||||
'x_macros: for x in &ctx.exclusive {
|
|
||||||
let mut rules_iter = x.rules.iter();
|
|
||||||
let ((before, state, after), rule) = 'rules: loop {
|
|
||||||
let Some(ridx) = rules_iter.next() else { continue 'x_macros };
|
|
||||||
let rule = &x.mac.0.rules[*ridx];
|
|
||||||
if rule.pattern.top_glossary.is_subset(&top_glossary)
|
|
||||||
&& let Some(record) = rule.matcher.apply(&val.items[..], &|_| true).await
|
|
||||||
{
|
|
||||||
break 'rules (record, rule);
|
|
||||||
};
|
|
||||||
};
|
|
||||||
let new_r = (before.len()..new_val.len() - after.len(), x.mac, rule, state);
|
|
||||||
// elements that overlap with us
|
|
||||||
let overlap =
|
|
||||||
subsection(&x_matches[..], |r| new_r.0.start < r.0.end, |r| r.0.start < new_r.0.end);
|
|
||||||
let overlapping = &x_matches[overlap.clone()];
|
|
||||||
// elements that fully contain us
|
|
||||||
let geq_range =
|
|
||||||
subsection(overlapping, |r| r.0.start <= new_r.0.start, |r| new_r.0.end <= r.0.end);
|
|
||||||
let geq = &overlapping[geq_range.clone()];
|
|
||||||
// if any of these is equal to us, all of them must be, otherwise the larger
|
|
||||||
// ranges would have overridden the smaller ones
|
|
||||||
if let Some(example) = geq.first() {
|
|
||||||
// if they are equal to us, record the conflict.
|
|
||||||
if example.0 == new_r.0 {
|
|
||||||
let idx = (x_matches.subslice_offset(geq))
|
|
||||||
.expect("this slice is statically derived from x_matches");
|
|
||||||
x_matches.insert(idx, new_r);
|
|
||||||
}
|
|
||||||
// either way, we matched so no further rules can run.
|
|
||||||
continue 'x_macros;
|
|
||||||
}
|
|
||||||
// elements we fully contain. Equal ranges have been handled above
|
|
||||||
let lt_range =
|
|
||||||
subsection(overlapping, |r| new_r.0.start <= r.0.start, |r| r.0.end <= new_r.0.end);
|
|
||||||
let lt = &overlapping[lt_range.clone()];
|
|
||||||
if lt.is_empty() {
|
|
||||||
// an empty range
|
|
||||||
let i = x_matches.partition_point(|r| r.0.start < new_r.0.start);
|
|
||||||
x_matches.insert(i, new_r);
|
|
||||||
} else {
|
|
||||||
let lt_start =
|
|
||||||
x_matches.subslice_offset(overlapping).expect("Slice statically derived from x_matches");
|
|
||||||
x_matches.splice(lt_start..lt_start + lt_range.len(), [new_r]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// apply exclusive matches
|
|
||||||
if !x_matches.is_empty() {
|
|
||||||
// ranges of indices into x_matches which setwise conflict with each other.
|
|
||||||
// Pairwise conflict reporting is excess noise, but a single conflict error
|
|
||||||
// doesn't reveal where within the parenthesized block to look, so it's easiest
|
|
||||||
// to group them setwise even if these sets may associate macros which don't
|
|
||||||
// directly conflict.
|
|
||||||
let conflict_sets = (0..x_matches.len()).map(|x| x..x + 1).coalesce(|lran, rran| {
|
|
||||||
// each index was mapped to a range that contains only itself. Now we check if
|
|
||||||
// the last match in the first range overlaps the first match in the second
|
|
||||||
// range, and combine them if this is the case.
|
|
||||||
if x_matches[rran.start].0.start < x_matches[lran.end].0.end {
|
|
||||||
Ok(lran.start..rran.end)
|
|
||||||
} else {
|
|
||||||
Err((lran, rran))
|
|
||||||
}
|
|
||||||
});
|
|
||||||
let mac_conflict_tk = i().i("Macro conflict").await;
|
|
||||||
let error = conflict_sets
|
|
||||||
.filter(|r| 1 < r.len())
|
|
||||||
.map(|set| {
|
|
||||||
mk_errv(
|
|
||||||
mac_conflict_tk.clone(),
|
|
||||||
"Multiple partially overlapping syntax elements detected. \n\
|
|
||||||
Try parenthesizing whichever side is supposed to be the subexpression.",
|
|
||||||
x_matches[set].iter().flat_map(|rec| rec.3.names()).flat_map(|name| name.1).cloned(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.reduce(|l, r| l + r);
|
|
||||||
if let Some(error) = error {
|
|
||||||
return bot(error);
|
|
||||||
}
|
|
||||||
// no conflicts, apply all exclusive matches
|
|
||||||
for (range, mac, rule, state) in x_matches.into_iter().rev() {
|
|
||||||
// backwards so that the non-overlapping ranges remain valid
|
|
||||||
let pos = (state.names().flat_map(|r| r.1).cloned().reduce(Pos::add))
|
|
||||||
.expect("All macro rules must contain at least one locally defined name");
|
|
||||||
let subex = ctx.h.register(mk_body_call(mac, rule, &state, pos.clone()).await).await;
|
|
||||||
new_val.splice(range, [MacTok::Value(subex).at(pos)]);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
// Does this glossary refresh actually pay off?
|
|
||||||
let top_glossary = (new_val.iter())
|
|
||||||
.flat_map(|t| if let MacTok::Name(t) = t.tok() { Some(t.clone()) } else { None })
|
|
||||||
.collect::<HashSet<_>>();
|
|
||||||
for FilteredMacroRecord { mac, rules } in &ctx.priod {
|
|
||||||
for ridx in rules {
|
|
||||||
let rule = &mac.0.rules[*ridx];
|
|
||||||
if !rule.pattern.top_glossary.is_subset(&top_glossary) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let Some((pre, state, suf)) = rule.matcher.apply(&new_val, &|_| true).await else { continue };
|
|
||||||
let range = pre.len()..new_val.len() - suf.len();
|
|
||||||
let pos = (state.names().flat_map(|pair| pair.1).cloned().reduce(Pos::add))
|
|
||||||
.expect("All macro rules must contain at least one locally defined name");
|
|
||||||
let subex = ctx.h.register(mk_body_call(mac, rule, &state, pos.clone()).await).await;
|
|
||||||
std::mem::drop(state);
|
std::mem::drop(state);
|
||||||
new_val.splice(range, [MacTok::Value(subex).at(pos)]);
|
val.splice(i..end, [MacTok::Value(ctx.h.register(body_call).await).at(Pos::None)]);
|
||||||
|
i = end;
|
||||||
|
},
|
||||||
|
2.. => todo!("Named macros conflict!"),
|
||||||
}
|
}
|
||||||
|
continue 'all_named;
|
||||||
}
|
}
|
||||||
let exprs = stream(async |mut h| {
|
i += 1;
|
||||||
for mt in new_val {
|
|
||||||
h.emit(resolve_one(ctx, arg_stk.clone(), &mt).await).await
|
|
||||||
}
|
}
|
||||||
})
|
for (matcher, mac, rule) in &ctx.priod {
|
||||||
.collect::<Vec<_>>()
|
let Some(state) = matcher.apply(&val, |_| false) else { continue };
|
||||||
.boxed_local()
|
return Some(vec![
|
||||||
.await;
|
MacTok::Value(
|
||||||
exprs.into_iter().reduce(|f, x| call(f, [x])).expect(
|
ctx.h.register(mk_body_call(mac, rule, &state, &ctx.ctx, ctx.recur.clone()).await).await,
|
||||||
"We checked first that it isn't empty, and named macros get replaced with their results",
|
|
||||||
)
|
)
|
||||||
|
.at(Pos::None),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
for expr in val.iter_mut() {
|
||||||
|
if let Some(new) = resolve(ctx, expr).boxed_local().await {
|
||||||
|
*expr = new;
|
||||||
|
any_changed = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if any_changed { Some(val) } else { None }
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn mk_body_call(mac: &Macro, rule: &Rule, state: &MatchState<'_>, pos: Pos) -> GExpr {
|
async fn mk_body_call(
|
||||||
let mut call_args = vec![];
|
mac: &Macro,
|
||||||
|
rule: &Rule,
|
||||||
|
state: &MatchState<'_>,
|
||||||
|
ctx: &SysCtx,
|
||||||
|
recur: RecurState,
|
||||||
|
) -> GExpr {
|
||||||
|
let rule_path =
|
||||||
|
RulePath { module: mac.0.module.clone(), main_kw: mac.0.own_kws[0].clone(), rule: rule.index };
|
||||||
|
let Some(new_recur) = recur.push(rule_path.clone()) else {
|
||||||
|
return bot(mk_err(
|
||||||
|
ctx.i().i("Circular macro dependency").await,
|
||||||
|
format!("The definition of {rule_path} is circular"),
|
||||||
|
[rule.pos.clone()],
|
||||||
|
));
|
||||||
|
};
|
||||||
|
let mut call_args = vec![sym_ref(mac.0.module.suffix([rule.body_name.clone()], ctx.i()).await)];
|
||||||
for name in rule.placeholders.iter() {
|
for name in rule.placeholders.iter() {
|
||||||
call_args.push(match state.get(name).expect("Missing state entry for placeholder") {
|
call_args.push(match state.get(name).expect("Missing state entry for placeholder") {
|
||||||
StateEntry::Scalar(scal) => (**scal).clone().to_gen().await,
|
StateEntry::Scalar(scal) => (**scal).clone().to_expr().await,
|
||||||
StateEntry::Vec(vec) =>
|
StateEntry::Vec(vec) => MacTok::S(Paren::Round, vec.to_vec()).at(Pos::None).to_expr().await,
|
||||||
MacTok::S(Paren::Round, MacTreeSeq::new(vec.iter().cloned())).at(Pos::None).to_gen().await,
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
call(sym_ref(mac.0.module.suffix([rule.body_name.clone()], &i()).await), call_args)
|
call_args
|
||||||
.at(pos.clone())
|
.push(call([sym_ref(sym!(macros::resolve_recur; ctx.i()).await), new_recur.to_expr().await]));
|
||||||
|
call(call_args)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,18 +1,17 @@
|
|||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
use futures::future::join_all;
|
use futures::future::join_all;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_base::error::{OrcRes, mk_errv};
|
use orchid_base::error::{OrcRes, mk_err};
|
||||||
use orchid_base::interner::Tok;
|
use orchid_base::interner::{Interner, Tok};
|
||||||
use orchid_base::join_ok;
|
use orchid_base::join_ok;
|
||||||
use orchid_base::side::Side;
|
use orchid_base::side::Side;
|
||||||
use orchid_extension::context::i;
|
|
||||||
|
|
||||||
use super::shared::{AnyMatcher, ScalMatcher, VecMatcher};
|
use super::shared::{AnyMatcher, ScalMatcher, VecMatcher};
|
||||||
use super::vec_attrs::vec_attrs;
|
use super::vec_attrs::vec_attrs;
|
||||||
use crate::macros::mactree::{Ph, PhKind};
|
use crate::macros::mactree::{Ph, PhKind};
|
||||||
use crate::macros::{MacTok, MacTree};
|
use crate::macros::{MacTok, MacTree};
|
||||||
|
|
||||||
pub type MaxVecSplit<'a> = (&'a [MacTree], (Tok<String>, u8, bool), &'a [MacTree]);
|
pub type MaxVecSplit<'a> = (&'a [MacTree], (IStr, u8, bool), &'a [MacTree]);
|
||||||
|
|
||||||
/// Derive the details of the central vectorial and the two sides from a
|
/// Derive the details of the central vectorial and the two sides from a
|
||||||
/// slice of Expr's
|
/// slice of Expr's
|
||||||
@@ -32,29 +31,29 @@ fn scal_cnt<'a>(iter: impl Iterator<Item = &'a MacTree>) -> usize {
|
|||||||
iter.take_while(|expr| vec_attrs(expr).is_none()).count()
|
iter.take_while(|expr| vec_attrs(expr).is_none()).count()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn mk_any(pattern: &[MacTree]) -> OrcRes<AnyMatcher> {
|
pub async fn mk_any(pattern: &[MacTree], i: &Interner) -> OrcRes<AnyMatcher> {
|
||||||
let left_split = scal_cnt(pattern.iter());
|
let left_split = scal_cnt(pattern.iter());
|
||||||
if pattern.len() <= left_split {
|
if pattern.len() <= left_split {
|
||||||
return Ok(AnyMatcher::Scalar(mk_scalv(pattern).await?));
|
return Ok(AnyMatcher::Scalar(mk_scalv(pattern, i).await?));
|
||||||
}
|
}
|
||||||
let (left, not_left) = pattern.split_at(left_split);
|
let (left, not_left) = pattern.split_at(left_split);
|
||||||
let right_split = not_left.len() - scal_cnt(pattern.iter().rev());
|
let right_split = not_left.len() - scal_cnt(pattern.iter().rev());
|
||||||
let (mid, right) = not_left.split_at(right_split);
|
let (mid, right) = not_left.split_at(right_split);
|
||||||
join_ok! {
|
join_ok! {
|
||||||
left = mk_scalv(left).await;
|
left = mk_scalv(left, i).await;
|
||||||
mid = mk_vec(mid).await;
|
mid = mk_vec(mid, i).await;
|
||||||
right = mk_scalv(right).await;
|
right = mk_scalv(right, i).await;
|
||||||
}
|
}
|
||||||
Ok(AnyMatcher::Vec { left, mid, right })
|
Ok(AnyMatcher::Vec { left, mid, right })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Pattern MUST NOT contain vectorial placeholders
|
/// Pattern MUST NOT contain vectorial placeholders
|
||||||
async fn mk_scalv(pattern: &[MacTree]) -> OrcRes<Vec<ScalMatcher>> {
|
async fn mk_scalv(pattern: &[MacTree], i: &Interner) -> OrcRes<Vec<ScalMatcher>> {
|
||||||
join_all(pattern.iter().map(mk_scalar)).await.into_iter().collect()
|
join_all(pattern.iter().map(|pat| mk_scalar(pat, i))).await.into_iter().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Pattern MUST start and end with a vectorial placeholder
|
/// Pattern MUST start and end with a vectorial placeholder
|
||||||
pub async fn mk_vec(pattern: &[MacTree]) -> OrcRes<VecMatcher> {
|
pub async fn mk_vec(pattern: &[MacTree], i: &Interner) -> OrcRes<VecMatcher> {
|
||||||
debug_assert!(!pattern.is_empty(), "pattern cannot be empty");
|
debug_assert!(!pattern.is_empty(), "pattern cannot be empty");
|
||||||
debug_assert!(pattern.first().map(vec_attrs).is_some(), "pattern must start with a vectorial");
|
debug_assert!(pattern.first().map(vec_attrs).is_some(), "pattern must start with a vectorial");
|
||||||
debug_assert!(pattern.last().map(vec_attrs).is_some(), "pattern must end with a vectorial");
|
debug_assert!(pattern.last().map(vec_attrs).is_some(), "pattern must end with a vectorial");
|
||||||
@@ -69,8 +68,8 @@ pub async fn mk_vec(pattern: &[MacTree]) -> OrcRes<VecMatcher> {
|
|||||||
(&[], &[]) => Ok(VecMatcher::Placeh { key, nonzero }),
|
(&[], &[]) => Ok(VecMatcher::Placeh { key, nonzero }),
|
||||||
(&[], _) => {
|
(&[], _) => {
|
||||||
join_ok! {
|
join_ok! {
|
||||||
sep = mk_scalv(r_sep).await;
|
sep = mk_scalv(r_sep, i).await;
|
||||||
right = mk_vec(r_side).boxed_local().await;
|
right = mk_vec(r_side, i).boxed_local().await;
|
||||||
}
|
}
|
||||||
Ok(VecMatcher::Scan {
|
Ok(VecMatcher::Scan {
|
||||||
direction: Side::Left,
|
direction: Side::Left,
|
||||||
@@ -81,8 +80,8 @@ pub async fn mk_vec(pattern: &[MacTree]) -> OrcRes<VecMatcher> {
|
|||||||
},
|
},
|
||||||
(_, &[]) => {
|
(_, &[]) => {
|
||||||
join_ok! {
|
join_ok! {
|
||||||
left = mk_vec(l_side).boxed_local().await;
|
left = mk_vec(l_side, i).boxed_local().await;
|
||||||
sep = mk_scalv(l_sep).await;
|
sep = mk_scalv(l_sep, i).await;
|
||||||
}
|
}
|
||||||
Ok(VecMatcher::Scan {
|
Ok(VecMatcher::Scan {
|
||||||
direction: Side::Right,
|
direction: Side::Right,
|
||||||
@@ -96,10 +95,10 @@ pub async fn mk_vec(pattern: &[MacTree]) -> OrcRes<VecMatcher> {
|
|||||||
l_side.iter().chain(r_side.iter()).filter_map(vec_attrs).collect::<Vec<_>>();
|
l_side.iter().chain(r_side.iter()).filter_map(vec_attrs).collect::<Vec<_>>();
|
||||||
key_order.sort_by_key(|(_, prio, _)| -(*prio as i64));
|
key_order.sort_by_key(|(_, prio, _)| -(*prio as i64));
|
||||||
join_ok! {
|
join_ok! {
|
||||||
left = mk_vec(l_side).boxed_local().await;
|
left = mk_vec(l_side, i).boxed_local().await;
|
||||||
left_sep = mk_scalv(l_sep).await;
|
left_sep = mk_scalv(l_sep, i).await;
|
||||||
right_sep = mk_scalv(r_sep).await;
|
right_sep = mk_scalv(r_sep, i).await;
|
||||||
right = mk_vec(r_side).boxed_local().await;
|
right = mk_vec(r_side, i).boxed_local().await;
|
||||||
}
|
}
|
||||||
Ok(VecMatcher::Middle {
|
Ok(VecMatcher::Middle {
|
||||||
left: Box::new(left),
|
left: Box::new(left),
|
||||||
@@ -114,7 +113,7 @@ pub async fn mk_vec(pattern: &[MacTree]) -> OrcRes<VecMatcher> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Pattern MUST NOT be a vectorial placeholder
|
/// Pattern MUST NOT be a vectorial placeholder
|
||||||
async fn mk_scalar(pattern: &MacTree) -> OrcRes<ScalMatcher> {
|
async fn mk_scalar(pattern: &MacTree, i: &Interner) -> OrcRes<ScalMatcher> {
|
||||||
Ok(match &*pattern.tok {
|
Ok(match &*pattern.tok {
|
||||||
MacTok::Name(n) => ScalMatcher::Name(n.clone()),
|
MacTok::Name(n) => ScalMatcher::Name(n.clone()),
|
||||||
MacTok::Ph(Ph { name, kind }) => match kind {
|
MacTok::Ph(Ph { name, kind }) => match kind {
|
||||||
@@ -123,10 +122,10 @@ async fn mk_scalar(pattern: &MacTree) -> OrcRes<ScalMatcher> {
|
|||||||
},
|
},
|
||||||
PhKind::Scalar => ScalMatcher::Placeh { key: name.clone() },
|
PhKind::Scalar => ScalMatcher::Placeh { key: name.clone() },
|
||||||
},
|
},
|
||||||
MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(&body.items).boxed_local().await?)),
|
MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body, i).boxed_local().await?)),
|
||||||
MacTok::Lambda(..) =>
|
MacTok::Lambda(..) =>
|
||||||
return Err(mk_errv(
|
return Err(mk_err(
|
||||||
i().i("Lambda in matcher").await,
|
i.i("Lambda in matcher").await,
|
||||||
"Lambdas can't be matched for, only generated in templates",
|
"Lambdas can't be matched for, only generated in templates",
|
||||||
[pattern.pos()],
|
[pattern.pos()],
|
||||||
)),
|
)),
|
||||||
@@ -137,52 +136,50 @@ async fn mk_scalar(pattern: &MacTree) -> OrcRes<ScalMatcher> {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use orchid_base::interner::Interner;
|
||||||
use orchid_base::location::SrcRange;
|
use orchid_base::location::SrcRange;
|
||||||
use orchid_base::sym;
|
use orchid_base::sym;
|
||||||
use orchid_base::tokens::Paren;
|
use orchid_base::tokens::Paren;
|
||||||
use orchid_extension::context::{i, mock_ctx, with_ctx};
|
|
||||||
use test_executors::spin_on;
|
use test_executors::spin_on;
|
||||||
|
|
||||||
use super::mk_any;
|
use super::mk_any;
|
||||||
use crate::macros::MacTok;
|
use crate::macros::MacTok;
|
||||||
use crate::macros::mactree::{MacTreeSeq, Ph, PhKind};
|
use crate::macros::mactree::{Ph, PhKind};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_scan() {
|
fn test_scan() {
|
||||||
spin_on(with_ctx(mock_ctx(), async {
|
spin_on(async {
|
||||||
let ex = |tok: MacTok| async { tok.at(SrcRange::mock(&i()).await.pos()) };
|
let i = Interner::new_master();
|
||||||
|
let ex = |tok: MacTok| async { tok.at(SrcRange::mock(&i).await.pos()) };
|
||||||
let pattern = vec![
|
let pattern = vec![
|
||||||
ex(MacTok::Ph(Ph {
|
ex(MacTok::Ph(Ph {
|
||||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||||
name: i().i("::prefix").await,
|
name: i.i("::prefix").await,
|
||||||
}))
|
}))
|
||||||
.await,
|
.await,
|
||||||
ex(MacTok::Name(sym!(prelude::do; i()))).await,
|
ex(MacTok::Name(sym!(prelude::do; i).await)).await,
|
||||||
ex(MacTok::S(
|
ex(MacTok::S(Paren::Round, vec![
|
||||||
Paren::Round,
|
|
||||||
MacTreeSeq::new([
|
|
||||||
ex(MacTok::Ph(Ph {
|
ex(MacTok::Ph(Ph {
|
||||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||||
name: i().i("expr").await,
|
name: i.i("expr").await,
|
||||||
}))
|
}))
|
||||||
.await,
|
.await,
|
||||||
ex(MacTok::Name(sym!(prelude::; ; i()))).await,
|
ex(MacTok::Name(sym!(prelude::; ; i).await)).await,
|
||||||
ex(MacTok::Ph(Ph {
|
ex(MacTok::Ph(Ph {
|
||||||
kind: PhKind::Vector { priority: 1, at_least_one: false },
|
kind: PhKind::Vector { priority: 1, at_least_one: false },
|
||||||
name: i().i("rest").await,
|
name: i.i("rest").await,
|
||||||
}))
|
}))
|
||||||
.await,
|
.await,
|
||||||
]),
|
]))
|
||||||
))
|
|
||||||
.await,
|
.await,
|
||||||
ex(MacTok::Ph(Ph {
|
ex(MacTok::Ph(Ph {
|
||||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||||
name: i().i("::suffix").await,
|
name: i.i("::suffix").await,
|
||||||
}))
|
}))
|
||||||
.await,
|
.await,
|
||||||
];
|
];
|
||||||
let matcher = mk_any(&pattern).await.expect("This matcher isn't broken");
|
let matcher = mk_any(&pattern, &i).await.expect("This matcher isn't broken");
|
||||||
println!("{matcher}");
|
println!("{matcher}");
|
||||||
}))
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,61 +1,87 @@
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
|
use itertools::Itertools;
|
||||||
use orchid_base::error::OrcRes;
|
use orchid_base::error::OrcRes;
|
||||||
|
use orchid_base::interner::{Interner, Tok};
|
||||||
|
use orchid_base::location::Pos;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
use orchid_extension::context::i;
|
|
||||||
|
|
||||||
use super::any_match::any_match;
|
use super::any_match::any_match;
|
||||||
use super::build::mk_any;
|
use super::build::{mk_any, mk_vec};
|
||||||
use super::shared::AnyMatcher;
|
use super::shared::{AnyMatcher, VecMatcher};
|
||||||
use super::state::{MatchState, StateEntry};
|
use super::state::{MatchState, StateEntry};
|
||||||
use super::vec_attrs::vec_attrs;
|
use super::vec_attrs::vec_attrs;
|
||||||
use crate::macros::mactree::{MacTreeSeq, Ph, PhKind};
|
use super::vec_match::vec_match;
|
||||||
|
use crate::macros::mactree::{Ph, PhKind};
|
||||||
use crate::macros::{MacTok, MacTree};
|
use crate::macros::{MacTok, MacTree};
|
||||||
|
|
||||||
pub struct Matcher {
|
pub struct NamedMatcher {
|
||||||
inner: AnyMatcher,
|
inner: AnyMatcher,
|
||||||
|
head: Sym,
|
||||||
|
after_tok: IStr,
|
||||||
}
|
}
|
||||||
|
impl NamedMatcher {
|
||||||
impl Matcher {
|
pub async fn new(pattern: &[MacTree], i: &Interner) -> OrcRes<Self> {
|
||||||
pub async fn new(pattern: MacTreeSeq) -> OrcRes<Self> {
|
let head = match pattern.first().map(|tree| tree.tok()) {
|
||||||
let mut pattern = Rc::unwrap_or_clone(pattern.items);
|
Some(MacTok::Name(name)) => name.clone(),
|
||||||
let kind = PhKind::Vector { at_least_one: false, priority: 0 };
|
_ => panic!("Named matchers must begin with a name"),
|
||||||
let first = pattern.first().expect("Empty pattern is not allowed");
|
};
|
||||||
if vec_attrs(first).is_none() {
|
let after_tok = i.i("::after").await;
|
||||||
let pos = first.pos();
|
let inner = match pattern.last().and_then(vec_attrs).is_some() {
|
||||||
pattern.insert(0, MacTok::Ph(Ph { name: i().i("::before").await, kind }).at(pos));
|
true => mk_any(pattern, i).await?,
|
||||||
|
false => {
|
||||||
|
let kind = PhKind::Vector { priority: 0, at_least_one: false };
|
||||||
|
let suffix = [MacTok::Ph(Ph { name: after_tok.clone(), kind }).at(Pos::None)];
|
||||||
|
mk_any(&pattern.iter().cloned().chain(suffix).collect_vec(), i).await?
|
||||||
|
},
|
||||||
|
};
|
||||||
|
Ok(Self { after_tok, inner, head })
|
||||||
}
|
}
|
||||||
let last = pattern.last().expect("first returned Some above");
|
pub fn head(&self) -> Sym { self.head.clone() }
|
||||||
if vec_attrs(last).is_none() {
|
/// Also returns the tail, if any, which should be matched further
|
||||||
let pos = last.pos();
|
/// Note that due to how priod works below, the main usable information from
|
||||||
pattern.insert(0, MacTok::Ph(Ph { name: i().i("::after").await, kind }).at(pos));
|
/// the tail is its length
|
||||||
}
|
pub fn apply<'a>(
|
||||||
Ok(Matcher { inner: mk_any(&pattern).await? })
|
|
||||||
}
|
|
||||||
/// Also returns the head and tail, which should be matched by overarching
|
|
||||||
/// matchers attempted later.
|
|
||||||
pub async fn apply<'a>(
|
|
||||||
&self,
|
&self,
|
||||||
seq: &'a [MacTree],
|
seq: &'a [MacTree],
|
||||||
save_loc: &dyn Fn(Sym) -> bool,
|
save_loc: impl Fn(Sym) -> bool,
|
||||||
) -> Option<(&'a [MacTree], MatchState<'a>, &'a [MacTree])> {
|
) -> Option<(MatchState<'a>, &'a [MacTree])> {
|
||||||
let mut result = any_match(&self.inner, seq, &save_loc)?;
|
let mut state = any_match(&self.inner, seq, &save_loc)?;
|
||||||
async fn remove_frame<'a>(result: &mut MatchState<'a>, key: &str) -> &'a [MacTree] {
|
match state.remove(self.after_tok.clone()) {
|
||||||
match result.remove(i().i(key).await) {
|
Some(StateEntry::Scalar(_)) => panic!("{} can never be a scalar entry!", self.after_tok),
|
||||||
Some(StateEntry::Scalar(_)) => panic!("{key} is defined in the constructor as a Vec"),
|
Some(StateEntry::Vec(v)) => Some((state, v)),
|
||||||
Some(StateEntry::Vec(v)) => v,
|
None => Some((state, &[][..])),
|
||||||
None => &[],
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let before = remove_frame(&mut result, "::before").await;
|
|
||||||
let after = remove_frame(&mut result, "::after").await;
|
|
||||||
Some((before, result, after))
|
|
||||||
}
|
}
|
||||||
}
|
impl fmt::Display for NamedMatcher {
|
||||||
impl fmt::Display for Matcher {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) }
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) }
|
||||||
}
|
}
|
||||||
impl fmt::Debug for Matcher {
|
impl fmt::Debug for NamedMatcher {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "NamedMatcher({self})") }
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "NamedMatcher({self})") }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct PriodMatcher(VecMatcher);
|
||||||
|
impl PriodMatcher {
|
||||||
|
pub async fn new(pattern: &[MacTree], i: &Interner) -> OrcRes<Self> {
|
||||||
|
assert!(
|
||||||
|
pattern.first().and_then(vec_attrs).is_some() && pattern.last().and_then(vec_attrs).is_some(),
|
||||||
|
"Prioritized matchers must start and end with a vectorial",
|
||||||
|
);
|
||||||
|
Ok(Self(mk_vec(pattern, i).await?))
|
||||||
|
}
|
||||||
|
/// tokens before the offset always match the prefix
|
||||||
|
pub fn apply<'a>(
|
||||||
|
&self,
|
||||||
|
seq: &'a [MacTree],
|
||||||
|
save_loc: impl Fn(Sym) -> bool,
|
||||||
|
) -> Option<MatchState<'a>> {
|
||||||
|
vec_match(&self.0, seq, &save_loc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl fmt::Display for PriodMatcher {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) }
|
||||||
|
}
|
||||||
|
impl fmt::Debug for PriodMatcher {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "PriodMatcher({self})") }
|
||||||
|
}
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ pub fn scal_match<'a>(
|
|||||||
(ScalMatcher::Placeh { key }, _) =>
|
(ScalMatcher::Placeh { key }, _) =>
|
||||||
Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))),
|
Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))),
|
||||||
(ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 =>
|
(ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 =>
|
||||||
any_match(b_mat, &body.items, save_loc),
|
any_match(b_mat, &body[..], save_loc),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,12 +11,12 @@ use orchid_base::tokens::{PARENS, Paren};
|
|||||||
pub enum ScalMatcher {
|
pub enum ScalMatcher {
|
||||||
Name(Sym),
|
Name(Sym),
|
||||||
S(Paren, Box<AnyMatcher>),
|
S(Paren, Box<AnyMatcher>),
|
||||||
Placeh { key: Tok<String> },
|
Placeh { key: IStr },
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum VecMatcher {
|
pub enum VecMatcher {
|
||||||
Placeh {
|
Placeh {
|
||||||
key: Tok<String>,
|
key: IStr,
|
||||||
nonzero: bool,
|
nonzero: bool,
|
||||||
},
|
},
|
||||||
Scan {
|
Scan {
|
||||||
@@ -41,7 +41,7 @@ pub enum VecMatcher {
|
|||||||
/// the length of matches on either side.
|
/// the length of matches on either side.
|
||||||
///
|
///
|
||||||
/// Vectorial keys that appear on either side, in priority order
|
/// Vectorial keys that appear on either side, in priority order
|
||||||
key_order: Vec<Tok<String>>,
|
key_order: Vec<IStr>,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -30,11 +30,11 @@ pub enum StateEntry<'a> {
|
|||||||
}
|
}
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct MatchState<'a> {
|
pub struct MatchState<'a> {
|
||||||
placeholders: HashMap<Tok<String>, StateEntry<'a>>,
|
placeholders: HashMap<IStr, StateEntry<'a>>,
|
||||||
name_posv: HashMap<Sym, Vec<Pos>>,
|
name_posv: HashMap<Sym, Vec<Pos>>,
|
||||||
}
|
}
|
||||||
impl<'a> MatchState<'a> {
|
impl<'a> MatchState<'a> {
|
||||||
pub fn from_ph(key: Tok<String>, entry: StateEntry<'a>) -> Self {
|
pub fn from_ph(key: IStr, entry: StateEntry<'a>) -> Self {
|
||||||
Self { placeholders: HashMap::from([(key, entry)]), name_posv: HashMap::new() }
|
Self { placeholders: HashMap::from([(key, entry)]), name_posv: HashMap::new() }
|
||||||
}
|
}
|
||||||
pub fn combine(self, s: Self) -> Self {
|
pub fn combine(self, s: Self) -> Self {
|
||||||
@@ -45,7 +45,7 @@ impl<'a> MatchState<'a> {
|
|||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn ph_len(&self, key: &Tok<String>) -> Option<usize> {
|
pub fn ph_len(&self, key: &IStr) -> Option<usize> {
|
||||||
match self.placeholders.get(key)? {
|
match self.placeholders.get(key)? {
|
||||||
StateEntry::Vec(slc) => Some(slc.len()),
|
StateEntry::Vec(slc) => Some(slc.len()),
|
||||||
_ => None,
|
_ => None,
|
||||||
@@ -54,11 +54,8 @@ impl<'a> MatchState<'a> {
|
|||||||
pub fn from_name(name: Sym, location: Pos) -> Self {
|
pub fn from_name(name: Sym, location: Pos) -> Self {
|
||||||
Self { name_posv: HashMap::from([(name, vec![location])]), placeholders: HashMap::new() }
|
Self { name_posv: HashMap::from([(name, vec![location])]), placeholders: HashMap::new() }
|
||||||
}
|
}
|
||||||
pub fn names(&self) -> impl Iterator<Item = (Sym, &[Pos])> {
|
pub fn get(&self, key: &IStr) -> Option<&StateEntry<'a>> { self.placeholders.get(key) }
|
||||||
self.name_posv.iter().map(|(sym, vec)| (sym.clone(), &vec[..]))
|
pub fn remove(&mut self, name: IStr) -> Option<StateEntry<'a>> {
|
||||||
}
|
|
||||||
pub fn get(&self, key: &Tok<String>) -> Option<&StateEntry<'a>> { self.placeholders.get(key) }
|
|
||||||
pub fn remove(&mut self, name: Tok<String>) -> Option<StateEntry<'a>> {
|
|
||||||
self.placeholders.remove(&name)
|
self.placeholders.remove(&name)
|
||||||
}
|
}
|
||||||
pub fn mk_owned(self) -> OwnedState {
|
pub fn mk_owned(self) -> OwnedState {
|
||||||
@@ -88,10 +85,10 @@ pub enum OwnedEntry {
|
|||||||
Scalar(MacTree),
|
Scalar(MacTree),
|
||||||
}
|
}
|
||||||
pub struct OwnedState {
|
pub struct OwnedState {
|
||||||
placeholders: HashMap<Tok<String>, OwnedEntry>,
|
placeholders: HashMap<IStr, OwnedEntry>,
|
||||||
name_posv: HashMap<Sym, Vec<Pos>>,
|
name_posv: HashMap<Sym, Vec<Pos>>,
|
||||||
}
|
}
|
||||||
impl OwnedState {
|
impl OwnedState {
|
||||||
pub fn get(&self, key: &Tok<String>) -> Option<&OwnedEntry> { self.placeholders.get(key) }
|
pub fn get(&self, key: &IStr) -> Option<&OwnedEntry> { self.placeholders.get(key) }
|
||||||
pub fn positions(&self, name: &Sym) -> &[Pos] { self.name_posv.get(name).map_or(&[], |v| &v[..]) }
|
pub fn positions(&self, name: &Sym) -> &[Pos] { self.name_posv.get(name).map_or(&[], |v| &v[..]) }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ use crate::macros::{MacTok, MacTree};
|
|||||||
/// Returns the name, priority and at_least_one of the expression if it is
|
/// Returns the name, priority and at_least_one of the expression if it is
|
||||||
/// a vectorial placeholder
|
/// a vectorial placeholder
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn vec_attrs(expr: &MacTree) -> Option<(Tok<String>, u8, bool)> {
|
pub fn vec_attrs(expr: &MacTree) -> Option<(IStr, u8, bool)> {
|
||||||
match (*expr.tok).clone() {
|
match (*expr.tok).clone() {
|
||||||
MacTok::Ph(Ph { kind: PhKind::Vector { priority, at_least_one }, name }) =>
|
MacTok::Ph(Ph { kind: PhKind::Vector { priority, at_least_one }, name }) =>
|
||||||
Some((name, priority, at_least_one)),
|
Some((name, priority, at_least_one)),
|
||||||
|
|||||||
@@ -36,6 +36,7 @@ pub fn vec_match<'a>(
|
|||||||
}
|
}
|
||||||
None
|
None
|
||||||
},
|
},
|
||||||
|
// XXX predict heap space usage and allocation count
|
||||||
VecMatcher::Middle { left, left_sep, mid, right_sep, right, key_order } => {
|
VecMatcher::Middle { left, left_sep, mid, right_sep, right, key_order } => {
|
||||||
if seq.len() < left_sep.len() + right_sep.len() {
|
if seq.len() < left_sep.len() + right_sep.len() {
|
||||||
return None;
|
return None;
|
||||||
|
|||||||
@@ -1,177 +0,0 @@
|
|||||||
use futures::{StreamExt, stream};
|
|
||||||
use orchid_base::error::OrcRes;
|
|
||||||
use orchid_base::sym;
|
|
||||||
use orchid_extension::atom::TAtom;
|
|
||||||
use orchid_extension::atom_owned::own;
|
|
||||||
use orchid_extension::context::i;
|
|
||||||
use orchid_extension::conv::ToExpr;
|
|
||||||
use orchid_extension::coroutine_exec::exec;
|
|
||||||
use orchid_extension::expr::Expr;
|
|
||||||
use orchid_extension::gen_expr::{GExpr, call, sym_ref};
|
|
||||||
use orchid_extension::tree::{GenMember, fun, prefix};
|
|
||||||
|
|
||||||
use crate::macros::match_macros::MatcherAtom;
|
|
||||||
use crate::macros::resolve::resolve;
|
|
||||||
use crate::macros::utils::{build_macro, mactree, mactreev};
|
|
||||||
use crate::{HomoTpl, MacTree, OrcOpt, Tpl};
|
|
||||||
|
|
||||||
pub async fn gen_std_macro_lib() -> Vec<GenMember> {
|
|
||||||
prefix("std", [
|
|
||||||
prefix("option", [
|
|
||||||
fun(false, "is_some_body", |sub: TAtom<MatcherAtom>, val: OrcOpt<Expr>| {
|
|
||||||
exec(async move |mut h| {
|
|
||||||
let Some(sub_val) = val.0 else { return Ok(OrcOpt(None)) };
|
|
||||||
h.exec::<OrcOpt<Expr>>(call(sub.to_gen().await, [sub_val.to_gen().await])).await
|
|
||||||
})
|
|
||||||
}),
|
|
||||||
fun(false, "is_none_body", async |val: OrcOpt<Expr>| {
|
|
||||||
if val.0.is_none() { OrcOpt(Some(Tpl(()))) } else { OrcOpt(None) }
|
|
||||||
}),
|
|
||||||
build_macro(None, ["of", "empty"])
|
|
||||||
.rule(mactreev!(pattern::match_rule ( std::option::of "...$" sub_pattern 0)), [
|
|
||||||
|[sub]: [_; _]| {
|
|
||||||
exec(async move |mut h| {
|
|
||||||
let sub = h
|
|
||||||
.exec::<TAtom<MatcherAtom>>(
|
|
||||||
resolve(mactree!(pattern::match_rule "push" sub;)).await,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
Ok(MatcherAtom {
|
|
||||||
keys: sub.keys().collect().await,
|
|
||||||
matcher: h
|
|
||||||
.register(call(sym_ref(sym!(std::option::is_some_body; i())), [sub
|
|
||||||
.to_gen()
|
|
||||||
.await]))
|
|
||||||
.await,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
},
|
|
||||||
])
|
|
||||||
.rule(mactreev!(pattern::match_rule(std::option::empty)), [|[]: [_; _]| {
|
|
||||||
exec(async |mut h| {
|
|
||||||
Ok(MatcherAtom {
|
|
||||||
keys: vec![],
|
|
||||||
matcher: h.register(sym_ref(sym!(std::option::is_none_body; i()))).await,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}])
|
|
||||||
.finish(),
|
|
||||||
]),
|
|
||||||
prefix("tuple", [
|
|
||||||
build_macro(None, ["t"])
|
|
||||||
.rule(mactreev!(std::tuple::t [ "...$" elements 0 ]), [|[elements]: [_; _]| {
|
|
||||||
exec(async move |mut h| {
|
|
||||||
let tup = h
|
|
||||||
.exec::<HomoTpl<TAtom<MacTree>>>(call(sym_ref(sym!(macros::resolve; i())), [
|
|
||||||
mactree!((macros::common::comma_list "push" elements ;)).to_gen().await,
|
|
||||||
]))
|
|
||||||
.await?;
|
|
||||||
let val = stream::iter(&tup.0[..])
|
|
||||||
.fold(sym_ref(sym!(std::tuple::empty; i())), async |head, new| {
|
|
||||||
call(sym_ref(sym!(std::tuple::cat; i())), [
|
|
||||||
head,
|
|
||||||
call(sym_ref(sym!(std::tuple::one; i())), [call(
|
|
||||||
sym_ref(sym!(macros::resolve; i())),
|
|
||||||
[new.clone().to_gen().await],
|
|
||||||
)]),
|
|
||||||
])
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
Ok(val)
|
|
||||||
})
|
|
||||||
}])
|
|
||||||
.rule(
|
|
||||||
mactreev!(pattern::match_rule(std::tuple::t[ "...$" elements 0 macros::common::..])),
|
|
||||||
[async |[elements]: [_; _]| parse_tpl(elements, Some(mactree!(macros::common::_))).await],
|
|
||||||
)
|
|
||||||
.rule(
|
|
||||||
mactreev!(pattern::match_rule(
|
|
||||||
std::tuple::t[ "...$" elements 1 macros::common::.. "...$" tail 0]
|
|
||||||
)),
|
|
||||||
[async |[elements, tail]: [_; _]| parse_tpl(elements, Some(tail)).await],
|
|
||||||
)
|
|
||||||
.rule(mactreev!(pattern::match_rule(std::tuple::t[ "...$" elements 0])), [
|
|
||||||
|[elements]: [_; _]| parse_tpl(elements, None),
|
|
||||||
])
|
|
||||||
.finish(),
|
|
||||||
fun(false, "matcher_body", tuple_matcher_body),
|
|
||||||
]),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_tpl(elements: MacTree, tail_matcher: Option<MacTree>) -> impl Future<Output = GExpr> {
|
|
||||||
exec(async move |mut h| -> OrcRes<MatcherAtom> {
|
|
||||||
let tup = h
|
|
||||||
.exec::<HomoTpl<TAtom<MacTree>>>(call(sym_ref(sym!(macros::resolve; i())), [
|
|
||||||
mactree!((macros::common::comma_list "push" elements ;)).to_gen().await,
|
|
||||||
]))
|
|
||||||
.await?;
|
|
||||||
let mut subs = Vec::with_capacity(tup.0.len());
|
|
||||||
for mac_a in &tup.0[..] {
|
|
||||||
let mac = own(mac_a).await;
|
|
||||||
let sub = h
|
|
||||||
.exec::<TAtom<MatcherAtom>>(call(sym_ref(sym!(macros::resolve; i())), [
|
|
||||||
mactree!(pattern::match_rule "push" mac ;).to_gen().await,
|
|
||||||
]))
|
|
||||||
.await?;
|
|
||||||
subs.push(sub);
|
|
||||||
}
|
|
||||||
let tail_matcher = match tail_matcher {
|
|
||||||
Some(mac) => Some(
|
|
||||||
h.exec::<TAtom<MatcherAtom>>(call(sym_ref(sym!(macros::resolve; i())), [
|
|
||||||
mactree!(pattern::match_rule "push" mac ;).to_gen().await,
|
|
||||||
]))
|
|
||||||
.await?,
|
|
||||||
),
|
|
||||||
None => None,
|
|
||||||
};
|
|
||||||
Ok(MatcherAtom {
|
|
||||||
keys: stream::iter(&subs[..])
|
|
||||||
.flat_map(|t| t.keys())
|
|
||||||
.chain(stream::iter(&tail_matcher).flat_map(|mat| mat.keys()))
|
|
||||||
.collect()
|
|
||||||
.await,
|
|
||||||
matcher: call(sym_ref(sym!(std::tuple::matcher_body; i())), [
|
|
||||||
HomoTpl(subs).to_gen().await,
|
|
||||||
OrcOpt(tail_matcher).to_gen().await,
|
|
||||||
])
|
|
||||||
.to_expr()
|
|
||||||
.await,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn tuple_matcher_body(
|
|
||||||
children: HomoTpl<TAtom<MatcherAtom>>,
|
|
||||||
tail: OrcOpt<TAtom<MatcherAtom>>,
|
|
||||||
value: HomoTpl<Expr>,
|
|
||||||
) -> impl Future<Output = GExpr> {
|
|
||||||
exec(async move |mut h| -> OrcRes<OrcOpt<GExpr>> {
|
|
||||||
if value.0.len() < children.0.len() {
|
|
||||||
return Ok(OrcOpt(None));
|
|
||||||
}
|
|
||||||
let mut binds = Vec::new();
|
|
||||||
for (sub_mat, sub_val) in children.0.iter().zip(&value.0) {
|
|
||||||
match sub_mat.run_matcher(&mut h, sub_val.clone()).await? {
|
|
||||||
OrcOpt(None) => return Ok(OrcOpt(None)),
|
|
||||||
OrcOpt(Some(subres)) => binds.extend(subres.0),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
match tail.0 {
|
|
||||||
None if children.0.len() < value.0.len() => return Ok(OrcOpt(None)),
|
|
||||||
None => (),
|
|
||||||
Some(tail_mat) => {
|
|
||||||
let tail_tpl = stream::iter(&value.0[children.0.len()..])
|
|
||||||
.fold(sym_ref(sym!(std::tuple::empty; i())), async |prefix, new| {
|
|
||||||
call(sym_ref(sym!(std::tuple::cat; i())), [prefix, new.clone().to_gen().await])
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
match tail_mat.run_matcher(&mut h, tail_tpl).await? {
|
|
||||||
OrcOpt(Some(tail_binds)) => binds.extend(tail_binds.0),
|
|
||||||
OrcOpt(None) => return Ok(OrcOpt(None)),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
|
||||||
todo!()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -1,275 +0,0 @@
|
|||||||
use std::borrow::Cow;
|
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use async_fn_stream::stream;
|
|
||||||
use futures::StreamExt;
|
|
||||||
use futures::future::LocalBoxFuture;
|
|
||||||
use itertools::{Itertools, chain};
|
|
||||||
use never::Never;
|
|
||||||
use orchid_base::name::{NameLike, Sym, VPath};
|
|
||||||
use orchid_extension::atom::{Atomic, TAtom};
|
|
||||||
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
|
|
||||||
use orchid_extension::context::i;
|
|
||||||
use orchid_extension::conv::ToExpr;
|
|
||||||
use orchid_extension::gen_expr::{GExpr, sym_ref};
|
|
||||||
use orchid_extension::tree::{GenMember, MemKind, cnst, lazy};
|
|
||||||
|
|
||||||
use crate::macros::macro_value::{Macro, MacroData, Rule};
|
|
||||||
use crate::macros::mactree::MacTreeSeq;
|
|
||||||
use crate::macros::rule::matcher::Matcher;
|
|
||||||
use crate::{MacTok, MacTree};
|
|
||||||
|
|
||||||
pub type Args = Vec<MacTree>;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct MacroBodyArgCollector {
|
|
||||||
argc: usize,
|
|
||||||
args: Args,
|
|
||||||
cb: Rc<dyn Fn(Args) -> LocalBoxFuture<'static, GExpr>>,
|
|
||||||
}
|
|
||||||
impl Atomic for MacroBodyArgCollector {
|
|
||||||
type Data = ();
|
|
||||||
type Variant = OwnedVariant;
|
|
||||||
}
|
|
||||||
impl OwnedAtom for MacroBodyArgCollector {
|
|
||||||
type Refs = Never;
|
|
||||||
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
|
|
||||||
async fn call_ref(&self, arg: orchid_extension::expr::Expr) -> GExpr {
|
|
||||||
eprintln!("This is an intermediary value. It should never be copied");
|
|
||||||
self.clone().call(arg).await
|
|
||||||
}
|
|
||||||
async fn call(mut self, arg: orchid_extension::expr::Expr) -> GExpr {
|
|
||||||
let atom = (TAtom::downcast(arg.handle()).await).unwrap_or_else(|_| {
|
|
||||||
panic!("This is an intermediary value, the argument types are known in advance")
|
|
||||||
});
|
|
||||||
self.args.push(own(&atom).await);
|
|
||||||
if self.argc == self.args.len() {
|
|
||||||
(self.cb)(self.args).await.to_gen().await
|
|
||||||
} else {
|
|
||||||
self.to_gen().await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn body_name(name: &str, counter: usize) -> String { format!("({name})::{counter}") }
|
|
||||||
|
|
||||||
pub(crate) fn build_macro(
|
|
||||||
prio: Option<u64>,
|
|
||||||
own_kws: impl IntoIterator<Item = &'static str>,
|
|
||||||
) -> MacroBuilder {
|
|
||||||
MacroBuilder {
|
|
||||||
prio,
|
|
||||||
own_kws: own_kws.into_iter().collect(),
|
|
||||||
patterns: Vec::new(),
|
|
||||||
body_consts: Vec::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub(crate) struct MacroBuilder {
|
|
||||||
prio: Option<u64>,
|
|
||||||
own_kws: Vec<&'static str>,
|
|
||||||
patterns: Vec<MacTreeSeq>,
|
|
||||||
body_consts: Vec<GenMember>,
|
|
||||||
}
|
|
||||||
impl MacroBuilder {
|
|
||||||
pub(crate) fn rule<const N: usize, R: ToExpr>(
|
|
||||||
mut self,
|
|
||||||
pat: MacTreeSeq,
|
|
||||||
body: [impl AsyncFn([MacTree; N]) -> R + 'static; 1],
|
|
||||||
) -> Self {
|
|
||||||
let [body] = body;
|
|
||||||
let body = Rc::new(body);
|
|
||||||
let name = &body_name(self.own_kws[0], self.body_consts.len());
|
|
||||||
self.body_consts.extend(match N {
|
|
||||||
0 => lazy(true, name, async move |_| {
|
|
||||||
let argv = [].into_iter().collect_array().expect("N is 0");
|
|
||||||
MemKind::Const(body(argv).await.to_gen().await)
|
|
||||||
}),
|
|
||||||
1.. => cnst(true, name, MacroBodyArgCollector {
|
|
||||||
argc: N,
|
|
||||||
args: Vec::new(),
|
|
||||||
cb: Rc::new(move |argv| {
|
|
||||||
let arr = argv.into_iter().collect_array::<N>().expect("argc should enforce the length");
|
|
||||||
let body = body.clone();
|
|
||||||
Box::pin(async move { body(arr).await.to_gen().await })
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
self.patterns.push(pat);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
pub(crate) fn finish(self) -> Vec<GenMember> {
|
|
||||||
let Self { own_kws, prio, patterns, body_consts } = self;
|
|
||||||
let name = own_kws[0];
|
|
||||||
let main_const = lazy(true, name, async move |path| {
|
|
||||||
let module = (Sym::new(path.split_last_seg().1.iter().cloned(), &i()).await)
|
|
||||||
.expect("Default macro in global root");
|
|
||||||
MemKind::Const(
|
|
||||||
Macro(Rc::new(MacroData {
|
|
||||||
module,
|
|
||||||
prio,
|
|
||||||
rules: stream(async |mut h| {
|
|
||||||
for (counter, pattern) in patterns.into_iter().enumerate() {
|
|
||||||
let mut placeholders = Vec::new();
|
|
||||||
pattern.map(&mut false, &mut |tt| {
|
|
||||||
if let MacTok::Ph(ph) = &*tt.tok {
|
|
||||||
placeholders.push(ph.name.clone())
|
|
||||||
}
|
|
||||||
None
|
|
||||||
});
|
|
||||||
h.emit(Rule {
|
|
||||||
matcher: Matcher::new(pattern.clone()).await.unwrap(),
|
|
||||||
pattern,
|
|
||||||
placeholders,
|
|
||||||
body_name: i().i(&format!("({name})::{counter}")).await,
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
.await,
|
|
||||||
}))
|
|
||||||
.to_gen()
|
|
||||||
.await,
|
|
||||||
)
|
|
||||||
});
|
|
||||||
let kw_consts = own_kws[1..].iter().flat_map(|kw| {
|
|
||||||
lazy(true, kw, async |path| {
|
|
||||||
let main_const_name = VPath::new(path.split_last_seg().1.iter().cloned())
|
|
||||||
.name_with_suffix(i().i(name).await)
|
|
||||||
.to_sym(&i())
|
|
||||||
.await;
|
|
||||||
MemKind::Const(sym_ref(main_const_name))
|
|
||||||
})
|
|
||||||
});
|
|
||||||
chain!(main_const, kw_consts, body_consts).collect()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! mactree {
|
|
||||||
($($body:tt)*) => {
|
|
||||||
$crate::macros::utils::mactreev!(($($body)*)).items[0].clone()
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! mactreev_impl {
|
|
||||||
(@RECUR $ret:ident) => {};
|
|
||||||
(@RECUR $ret:ident "..$" $name:ident $prio:literal $($tail:tt)*) => {
|
|
||||||
$ret.push($crate::macros::mactree::MacTok::Ph($crate::macros::mactree::Ph{
|
|
||||||
name: orchid_extension::context::i().i(stringify!($name)).await,
|
|
||||||
kind: $crate::macros::mactree::PhKind::Vector{ at_least_one: false, priority: $prio }
|
|
||||||
}).at(orchid_base::location::Pos::Inherit));
|
|
||||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
|
||||||
};
|
|
||||||
(@RECUR $ret:ident "...$" $name:ident $prio:literal $($tail:tt)*) => {
|
|
||||||
$ret.push($crate::macros::mactree::MacTok::Ph($crate::macros::mactree::Ph{
|
|
||||||
name: orchid_extension::context::i().i(stringify!($name)).await,
|
|
||||||
kind: $crate::macros::mactree::PhKind::Vector{ at_least_one: true, priority: $prio }
|
|
||||||
}).at(orchid_base::location::Pos::Inherit));
|
|
||||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
|
||||||
};
|
|
||||||
(@RECUR $ret:ident "$" $name:ident $($tail:tt)*) => {
|
|
||||||
$ret.push($crate::macros::mactree::MacTok::Ph($crate::macros::mactree::Ph{
|
|
||||||
name: orchid_extension::context::i().i(stringify!(name)).await,
|
|
||||||
kind: $crate::macros::mactree::PhKind::Scalar
|
|
||||||
}).at(orchid_base::location::Pos::Inherit));
|
|
||||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
|
||||||
};
|
|
||||||
(@RECUR $ret:ident "Val" $arg:expr ; $($tail:tt)*) => {
|
|
||||||
$ret.push(
|
|
||||||
$crate::macros::mactree::MacTok::Value($arg)
|
|
||||||
.at(orchid_base::location::Pos::Inherit)
|
|
||||||
);
|
|
||||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
|
||||||
};
|
|
||||||
(@RECUR $ret:ident "push" $arg:expr ; $($tail:tt)*) => {
|
|
||||||
$ret.push($arg);
|
|
||||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
|
||||||
};
|
|
||||||
(@RECUR $ret:ident "l_" $arg:expr ; ($($body:tt)*) $($tail:tt)*) => {
|
|
||||||
$ret.push(MacTok::Lambda(
|
|
||||||
MacTok::Name($arg).at(orchid_base::location::Pos::Inherit),
|
|
||||||
$crate::macros::utils::mactreev!($($body)*)
|
|
||||||
).at(orchid_base::location::Pos::Inherit));
|
|
||||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
|
||||||
};
|
|
||||||
(@RECUR $ret:ident "l" $argh:tt $(:: $arg:tt)+ ($($body:tt)*) $($tail:tt)*) => {
|
|
||||||
$ret.push(MacTok::Lambda(
|
|
||||||
MacTok::Name(sym!($argh $(:: $arg)+; orchid_extension::context::i()).await).at(orchid_base::location::Pos::Inherit),
|
|
||||||
$crate::macros::utils::mactreev!($($body)*)
|
|
||||||
).at(orchid_base::location::Pos::Inherit));
|
|
||||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
|
||||||
};
|
|
||||||
(@RECUR $ret:ident $name:literal $($tail:tt)*) => {
|
|
||||||
assert!(
|
|
||||||
$name.contains("::"),
|
|
||||||
"{} was treated as a name, but it doesn't have a namespace prefix",
|
|
||||||
$name
|
|
||||||
);
|
|
||||||
let sym = orchid_base::name::Sym::parse(
|
|
||||||
$name,
|
|
||||||
&orchid_extension::context::i()
|
|
||||||
).await.expect("Empty string in sym literal in Rust");
|
|
||||||
$ret.push(
|
|
||||||
$crate::macros::mactree::MacTok::Name(sym)
|
|
||||||
.at(orchid_base::location::Pos::Inherit)
|
|
||||||
);
|
|
||||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
|
||||||
};
|
|
||||||
(@RECUR $ret:ident ( $($body:tt)* ) $($tail:tt)*) => {
|
|
||||||
$ret.push(
|
|
||||||
$crate::macros::mactree::MacTok::S(
|
|
||||||
orchid_base::tree::Paren::Round,
|
|
||||||
$crate::macros::utils::mactreev!($($body)*)
|
|
||||||
)
|
|
||||||
.at(orchid_base::location::Pos::Inherit)
|
|
||||||
);
|
|
||||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
|
||||||
};
|
|
||||||
(@RECUR $ret:ident [ $($body:tt)* ] $($tail:tt)*) => {
|
|
||||||
$ret.push(
|
|
||||||
$crate::macros::mactree::MacTok::S(
|
|
||||||
orchid_base::tree::Paren::Square,
|
|
||||||
$crate::macros::utils::mactreev!($($body)*)
|
|
||||||
)
|
|
||||||
.at(orchid_base::location::Pos::Inherit)
|
|
||||||
);
|
|
||||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
|
||||||
};
|
|
||||||
(@RECUR $ret:ident { $($body:tt)* } $($tail:tt)*) => {
|
|
||||||
$ret.push(
|
|
||||||
$crate::macros::mactree::MacTok::S(
|
|
||||||
orchid_base::tree::Paren::Curly,
|
|
||||||
$crate::macros::utils::mactreev!($($body)*)
|
|
||||||
)
|
|
||||||
.at(orchid_base::location::Pos::Inherit)
|
|
||||||
);
|
|
||||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
|
||||||
};
|
|
||||||
(@RECUR $ret:ident $ns:ident :: $nhead:tt $($tail:tt)*) => {
|
|
||||||
$crate::macros::utils::mactreev_impl!(@NAME_MUNCHER $ret ($ns :: $nhead) $($tail)*)
|
|
||||||
};
|
|
||||||
(@NAME_MUNCHER $ret:ident ($($munched:tt)*) :: $name:tt $($tail:tt)*) => {
|
|
||||||
$crate::macros::utils::mactreev_impl!(@NAME_MUNCHER $ret ($($munched)* :: $name) $($tail)*)
|
|
||||||
};
|
|
||||||
(@NAME_MUNCHER $ret:ident ($($munched:tt)*) $($tail:tt)*) => {
|
|
||||||
let sym = orchid_base::sym!($($munched)* ; orchid_extension::context::i());
|
|
||||||
$ret.push(
|
|
||||||
$crate::macros::mactree::MacTok::Name(sym)
|
|
||||||
.at(orchid_base::location::Pos::Inherit)
|
|
||||||
);
|
|
||||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
|
||||||
};
|
|
||||||
() => { Vec::new() };
|
|
||||||
}
|
|
||||||
macro_rules! mactreev {
|
|
||||||
($($tail:tt)*) => {
|
|
||||||
{
|
|
||||||
let mut ret = Vec::<$crate::macros::mactree::MacTree>::new();
|
|
||||||
ret.extend([]); // silence unneeded mut warning
|
|
||||||
$crate::macros::utils::mactreev_impl!(@RECUR ret $($tail)*);
|
|
||||||
$crate::macros::mactree::MacTreeSeq::new(ret)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) use {mactree, mactreev, mactreev_impl};
|
|
||||||
@@ -1,8 +1,4 @@
|
|||||||
pub mod number;
|
pub mod number;
|
||||||
pub mod option;
|
|
||||||
pub mod protocol;
|
|
||||||
pub mod record;
|
|
||||||
pub mod reflection;
|
|
||||||
pub mod std_system;
|
|
||||||
pub mod string;
|
pub mod string;
|
||||||
pub mod tuple;
|
|
||||||
|
pub mod std_system;
|
||||||
|
|||||||
@@ -1,20 +1,15 @@
|
|||||||
use orchid_api_derive::Coding;
|
use orchid_api_derive::Coding;
|
||||||
use orchid_api_traits::Request;
|
|
||||||
use orchid_base::error::OrcRes;
|
use orchid_base::error::OrcRes;
|
||||||
use orchid_base::format::FmtUnit;
|
use orchid_base::format::FmtUnit;
|
||||||
use orchid_base::name::Sym;
|
|
||||||
use orchid_base::number::Numeric;
|
use orchid_base::number::Numeric;
|
||||||
use orchid_extension::atom::{AtomFactory, Atomic, AtomicFeatures, Supports, TAtom, ToAtom};
|
use orchid_extension::atom::{AtomFactory, Atomic, AtomicFeatures, ToAtom, TypAtom};
|
||||||
use orchid_extension::atom_thin::{ThinAtom, ThinVariant};
|
use orchid_extension::atom_thin::{ThinAtom, ThinVariant};
|
||||||
use orchid_extension::context::i;
|
|
||||||
use orchid_extension::conv::TryFromExpr;
|
use orchid_extension::conv::TryFromExpr;
|
||||||
use orchid_extension::expr::Expr;
|
use orchid_extension::expr::Expr;
|
||||||
|
use orchid_extension::system::SysCtx;
|
||||||
use ordered_float::NotNan;
|
use ordered_float::NotNan;
|
||||||
use rust_decimal::prelude::Zero;
|
use rust_decimal::prelude::Zero;
|
||||||
|
|
||||||
use crate::std::protocol::types::GetTagIdMethod;
|
|
||||||
use crate::std::string::to_string::ToStringMethod;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding)]
|
#[derive(Clone, Debug, Coding)]
|
||||||
pub struct Int(pub i64);
|
pub struct Int(pub i64);
|
||||||
impl Atomic for Int {
|
impl Atomic for Int {
|
||||||
@@ -22,21 +17,11 @@ impl Atomic for Int {
|
|||||||
type Data = Self;
|
type Data = Self;
|
||||||
}
|
}
|
||||||
impl ThinAtom for Int {
|
impl ThinAtom for Int {
|
||||||
async fn print(&self) -> FmtUnit { self.0.to_string().into() }
|
async fn print(&self, _: SysCtx) -> FmtUnit { self.0.to_string().into() }
|
||||||
}
|
}
|
||||||
impl TryFromExpr for Int {
|
impl TryFromExpr for Int {
|
||||||
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
||||||
TAtom::<Int>::try_from_expr(expr).await.map(|t| t.value)
|
TypAtom::<Int>::try_from_expr(expr).await.map(|t| t.value)
|
||||||
}
|
|
||||||
}
|
|
||||||
impl Supports<GetTagIdMethod> for Int {
|
|
||||||
async fn handle(&self, _: GetTagIdMethod) -> <GetTagIdMethod as Request>::Response {
|
|
||||||
Sym::parse("std::number::Int", &i()).await.unwrap().to_api()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl Supports<ToStringMethod> for Int {
|
|
||||||
async fn handle(&self, _: ToStringMethod) -> <ToStringMethod as Request>::Response {
|
|
||||||
self.0.to_string()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -47,18 +32,13 @@ impl Atomic for Float {
|
|||||||
type Data = Self;
|
type Data = Self;
|
||||||
}
|
}
|
||||||
impl ThinAtom for Float {
|
impl ThinAtom for Float {
|
||||||
async fn print(&self) -> FmtUnit { self.0.to_string().into() }
|
async fn print(&self, _: SysCtx) -> FmtUnit { self.0.to_string().into() }
|
||||||
}
|
}
|
||||||
impl TryFromExpr for Float {
|
impl TryFromExpr for Float {
|
||||||
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
||||||
Ok(Self(Num::try_from_expr(expr).await?.0.to_f64()))
|
Ok(Self(Num::try_from_expr(expr).await?.0.to_f64()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Supports<ToStringMethod> for Float {
|
|
||||||
async fn handle(&self, _: ToStringMethod) -> <ToStringMethod as Request>::Response {
|
|
||||||
self.0.to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Num(pub Numeric);
|
pub struct Num(pub Numeric);
|
||||||
impl TryFromExpr for Num {
|
impl TryFromExpr for Num {
|
||||||
@@ -67,7 +47,7 @@ impl TryFromExpr for Num {
|
|||||||
Ok(t) => return Ok(Num(Numeric::Int(t.0))),
|
Ok(t) => return Ok(Num(Numeric::Int(t.0))),
|
||||||
Err(e) => e,
|
Err(e) => e,
|
||||||
};
|
};
|
||||||
match TAtom::<Float>::try_from_expr(expr).await {
|
match TypAtom::<Float>::try_from_expr(expr).await {
|
||||||
Ok(t) => Ok(Num(Numeric::Float(t.0))),
|
Ok(t) => Ok(Num(Numeric::Float(t.0))),
|
||||||
Err(e2) => Err(e + e2),
|
Err(e2) => Err(e + e2),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ use std::ops::RangeInclusive;
|
|||||||
use orchid_base::error::OrcRes;
|
use orchid_base::error::OrcRes;
|
||||||
use orchid_base::number::{num_to_errv, parse_num};
|
use orchid_base::number::{num_to_errv, parse_num};
|
||||||
use orchid_extension::atom::ToAtom;
|
use orchid_extension::atom::ToAtom;
|
||||||
use orchid_extension::context::i;
|
|
||||||
use orchid_extension::lexer::{LexContext, Lexer};
|
use orchid_extension::lexer::{LexContext, Lexer};
|
||||||
use orchid_extension::tree::{GenTokTree, x_tok};
|
use orchid_extension::tree::{GenTokTree, x_tok};
|
||||||
|
|
||||||
@@ -13,13 +12,13 @@ use super::num_atom::Num;
|
|||||||
pub struct NumLexer;
|
pub struct NumLexer;
|
||||||
impl Lexer for NumLexer {
|
impl Lexer for NumLexer {
|
||||||
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['0'..='9'];
|
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['0'..='9'];
|
||||||
async fn lex<'a>(all: &'a str, lxcx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
async fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
||||||
let ends_at = all.find(|c: char| !c.is_ascii_hexdigit() && !"xX._pP".contains(c));
|
let ends_at = all.find(|c: char| !c.is_ascii_hexdigit() && !"xX._pP".contains(c));
|
||||||
let (chars, tail) = all.split_at(ends_at.unwrap_or(all.len()));
|
let (chars, tail) = all.split_at(ends_at.unwrap_or(all.len()));
|
||||||
let fac = match parse_num(chars) {
|
let fac = match parse_num(chars) {
|
||||||
Ok(numeric) => Num(numeric).to_atom_factory(),
|
Ok(numeric) => Num(numeric).to_atom_factory(),
|
||||||
Err(e) => return Err(num_to_errv(e, lxcx.pos(all), lxcx.src(), &i()).await),
|
Err(e) => return Err(num_to_errv(e, ctx.pos(all), ctx.src(), ctx.ctx.i()).await),
|
||||||
};
|
};
|
||||||
Ok((tail, x_tok(fac).await.at(lxcx.pos_lt(chars.len(), tail))))
|
Ok((tail, x_tok(fac).await.at(ctx.pos_lt(chars.len(), tail))))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,28 +6,28 @@ use super::num_atom::{Float, HomoArray, Int, Num};
|
|||||||
|
|
||||||
pub fn gen_num_lib() -> Vec<GenMember> {
|
pub fn gen_num_lib() -> Vec<GenMember> {
|
||||||
prefix("std::number", [
|
prefix("std::number", [
|
||||||
fun(true, "add", async |a: Num, b: Num| {
|
fun(true, "add", |a: Num, b: Num| async move {
|
||||||
Num(match HomoArray::new([a.0, b.0]) {
|
Num(match HomoArray::new([a.0, b.0]) {
|
||||||
HomoArray::Int([a, b]) => Numeric::Int(a + b),
|
HomoArray::Int([a, b]) => Numeric::Int(a + b),
|
||||||
HomoArray::Float([a, b]) => Numeric::Float(a + b),
|
HomoArray::Float([a, b]) => Numeric::Float(a + b),
|
||||||
})
|
})
|
||||||
}),
|
}),
|
||||||
fun(true, "neg", async |a: Num| {
|
fun(true, "neg", |a: Num| async move {
|
||||||
Num(match a.0 {
|
Num(match a.0 {
|
||||||
Numeric::Int(i) => Numeric::Int(-i),
|
Numeric::Int(i) => Numeric::Int(-i),
|
||||||
Numeric::Float(f) => Numeric::Float(-f),
|
Numeric::Float(f) => Numeric::Float(-f),
|
||||||
})
|
})
|
||||||
}),
|
}),
|
||||||
fun(true, "mul", async |a: Num, b: Num| {
|
fun(true, "mul", |a: Num, b: Num| async move {
|
||||||
Num(match HomoArray::new([a.0, b.0]) {
|
Num(match HomoArray::new([a.0, b.0]) {
|
||||||
HomoArray::Int([a, b]) => Numeric::Int(a * b),
|
HomoArray::Int([a, b]) => Numeric::Int(a * b),
|
||||||
HomoArray::Float([a, b]) => Numeric::Float(a * b),
|
HomoArray::Float([a, b]) => Numeric::Float(a * b),
|
||||||
})
|
})
|
||||||
}),
|
}),
|
||||||
fun(true, "idiv", async |a: Int, b: Int| Int(a.0 / b.0)),
|
fun(true, "idiv", |a: Int, b: Int| async move { Int(a.0 / b.0) }),
|
||||||
fun(true, "imod", async |a: Int, b: Int| Int(a.0 % b.0)),
|
fun(true, "imod", |a: Int, b: Int| async move { Int(a.0 % b.0) }),
|
||||||
fun(true, "fdiv", async |a: Float, b: Float| Float(a.0 / b.0)),
|
fun(true, "fdiv", |a: Float, b: Float| async move { Float(a.0 / b.0) }),
|
||||||
fun(true, "fmod", async |a: Float, b: Float| {
|
fun(true, "fmod", |a: Float, b: Float| async move {
|
||||||
Float(a.0 - NotNan::new((a.0 / b.0).trunc()).unwrap() * b.0)
|
Float(a.0 - NotNan::new((a.0 / b.0).trunc()).unwrap() * b.0)
|
||||||
}),
|
}),
|
||||||
])
|
])
|
||||||
|
|||||||
@@ -1,75 +0,0 @@
|
|||||||
use std::borrow::Cow;
|
|
||||||
use std::pin::Pin;
|
|
||||||
|
|
||||||
use futures::AsyncWrite;
|
|
||||||
use orchid_api_traits::Encode;
|
|
||||||
use orchid_base::error::mk_errv;
|
|
||||||
use orchid_base::sym;
|
|
||||||
use orchid_extension::atom::{Atomic, ForeignAtom, TAtom};
|
|
||||||
use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
|
|
||||||
use orchid_extension::context::i;
|
|
||||||
use orchid_extension::conv::{ToExpr, TryFromExpr};
|
|
||||||
use orchid_extension::expr::{Expr, ExprHandle};
|
|
||||||
use orchid_extension::gen_expr::{call, sym_ref};
|
|
||||||
use orchid_extension::tree::{GenMember, cnst, fun, prefix};
|
|
||||||
|
|
||||||
use crate::{OrcString, api};
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct OptAtom(Option<Expr>);
|
|
||||||
impl Atomic for OptAtom {
|
|
||||||
type Data = Option<api::ExprTicket>;
|
|
||||||
type Variant = OwnedVariant;
|
|
||||||
}
|
|
||||||
impl OwnedAtom for OptAtom {
|
|
||||||
type Refs = Vec<Expr>;
|
|
||||||
async fn val(&self) -> Cow<'_, Self::Data> {
|
|
||||||
Cow::Owned(self.0.as_ref().map(|ex| ex.handle().ticket()))
|
|
||||||
}
|
|
||||||
async fn deserialize(mut ctx: impl DeserializeCtx, refs: Self::Refs) -> Self {
|
|
||||||
Self(ctx.read::<bool>().await.then(|| refs.into_iter().next().unwrap()))
|
|
||||||
}
|
|
||||||
async fn serialize(&self, write: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
|
|
||||||
self.0.is_some().encode(write).await;
|
|
||||||
self.0.iter().cloned().collect()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct OrcOpt<T>(pub Option<T>);
|
|
||||||
impl<T: TryFromExpr> TryFromExpr for OrcOpt<T> {
|
|
||||||
async fn try_from_expr(expr: Expr) -> orchid_base::error::OrcRes<Self> {
|
|
||||||
let atom = TAtom::<OptAtom>::try_from_expr(expr).await?;
|
|
||||||
match atom.value {
|
|
||||||
None => Ok(OrcOpt(None)),
|
|
||||||
Some(tk) => Ok(OrcOpt(Some(
|
|
||||||
T::try_from_expr(Expr::from_handle(ExprHandle::from_ticket(tk).await)).await?,
|
|
||||||
))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<T: ToExpr + 'static> ToExpr for OrcOpt<T> {
|
|
||||||
async fn to_gen(self) -> orchid_extension::gen_expr::GExpr {
|
|
||||||
if let Some(val) = self.0 {
|
|
||||||
call(sym_ref(sym!(std::option::some; i())), [val.to_gen().await])
|
|
||||||
} else {
|
|
||||||
sym_ref(sym!(std::option::none; i()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn gen_option_lib() -> Vec<GenMember> {
|
|
||||||
prefix("std::option", [
|
|
||||||
cnst(true, "none", OptAtom(None)),
|
|
||||||
fun(true, "some", async |ex: Expr| OptAtom(Some(ex))),
|
|
||||||
fun(true, "expect", async |opt: ForeignAtom, msg: OrcString| {
|
|
||||||
match OrcOpt::try_from_expr(opt.clone().ex()).await? {
|
|
||||||
OrcOpt(Some(ex)) => Ok::<Expr, _>(ex),
|
|
||||||
OrcOpt(None) => Err(mk_errv(
|
|
||||||
i().i("Unwrapped std::option::none").await,
|
|
||||||
msg.get_string().await.as_str(),
|
|
||||||
[opt.pos()],
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
pub mod parse_impls;
|
|
||||||
pub mod proto_parser;
|
|
||||||
pub mod type_parser;
|
|
||||||
pub mod types;
|
|
||||||
@@ -1,78 +0,0 @@
|
|||||||
use itertools::{Itertools, chain};
|
|
||||||
use orchid_base::error::{OrcRes, mk_errv};
|
|
||||||
use orchid_base::interner::Tok;
|
|
||||||
use orchid_base::name::Sym;
|
|
||||||
use orchid_base::parse::{
|
|
||||||
Import, ParseCtx, Parsed, Snippet, expect_tok, line_items, parse_multiname, token_errv,
|
|
||||||
};
|
|
||||||
use orchid_base::tree::{Paren, Token};
|
|
||||||
use orchid_extension::parser::{
|
|
||||||
PTokTree, ParsCtx, ParsedLine, ParsedLineKind, p_tree2gen, p_v2gen,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub async fn parse_impls(
|
|
||||||
ctx: &ParsCtx<'_>,
|
|
||||||
lines: &mut Vec<ParsedLine>,
|
|
||||||
impls: &mut Vec<(Sym, Tok<String>)>,
|
|
||||||
body_tt: &PTokTree,
|
|
||||||
) -> OrcRes<()> {
|
|
||||||
let i = ctx.i().clone();
|
|
||||||
let body = match &body_tt.tok {
|
|
||||||
Token::S(Paren::Round, body) => line_items(ctx, Snippet::new(body_tt, body)).await,
|
|
||||||
Token::S(ptyp, _) =>
|
|
||||||
return Err(mk_errv(
|
|
||||||
i.i("Incorrect paren type").await,
|
|
||||||
format!("Expected () block, found {ptyp}"),
|
|
||||||
[body_tt.sr().pos()],
|
|
||||||
)),
|
|
||||||
_ =>
|
|
||||||
return Err(
|
|
||||||
token_errv(ctx, body_tt, "Expected body", |s| {
|
|
||||||
format!("Expected (impl ...) block, found {s}")
|
|
||||||
})
|
|
||||||
.await,
|
|
||||||
),
|
|
||||||
};
|
|
||||||
for Parsed { tail: line, output: comments } in body {
|
|
||||||
if let Ok(Parsed { tail, .. }) = expect_tok(ctx, line, i.i("impl").await).await {
|
|
||||||
let Parsed { tail, output: name_tt } = parse_multiname(ctx, tail).await?;
|
|
||||||
let (name, name_sr) = match name_tt.into_iter().at_most_one() {
|
|
||||||
Ok(None) => panic!("multiname is always at least one name"),
|
|
||||||
Ok(Some(ref n @ Import { name: Some(_), ref sr, .. })) =>
|
|
||||||
(n.clone().mspath().to_sym(&i).await, sr.clone()),
|
|
||||||
Ok(Some(Import { name: None, sr, .. })) =>
|
|
||||||
return Err(mk_errv(
|
|
||||||
i.i("impl line with globstar").await,
|
|
||||||
"::* is not permitted in a protocol impl",
|
|
||||||
[sr.pos()],
|
|
||||||
)),
|
|
||||||
Err(e) =>
|
|
||||||
return Err(mk_errv(
|
|
||||||
i.i("Impl line with multiple protocol names").await,
|
|
||||||
"::() is not permitted in a protocol impl",
|
|
||||||
e.map(|i| i.sr.pos()),
|
|
||||||
)),
|
|
||||||
};
|
|
||||||
let Parsed { tail, .. } = expect_tok(ctx, tail, i.i("as").await).await?;
|
|
||||||
let cnst_name = i.i(&format!("{}{}", lines.len(), name.iter().join("__"))).await;
|
|
||||||
lines.push(ParsedLine {
|
|
||||||
comments,
|
|
||||||
sr: line.sr(),
|
|
||||||
kind: ParsedLineKind::Rec(Vec::from_iter(chain![
|
|
||||||
[Token::Name(i.i("let").await).at(line.sr())],
|
|
||||||
[Token::Name(cnst_name.clone()).at(name_sr)],
|
|
||||||
[Token::Name(i.i("=").await).at(line.sr())],
|
|
||||||
tail.iter().cloned().map(p_tree2gen),
|
|
||||||
])),
|
|
||||||
});
|
|
||||||
impls.push((name, cnst_name));
|
|
||||||
} else {
|
|
||||||
lines.push(ParsedLine {
|
|
||||||
sr: line.sr(),
|
|
||||||
comments,
|
|
||||||
kind: ParsedLineKind::Rec(p_v2gen(line.to_vec())),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@@ -1,77 +0,0 @@
|
|||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
|
||||||
use orchid_base::error::{OrcRes, mk_errv};
|
|
||||||
use orchid_base::parse::{Comment, Parsed, expect_end, try_pop_no_fluff};
|
|
||||||
use orchid_base::sym;
|
|
||||||
use orchid_base::tree::Token;
|
|
||||||
use orchid_extension::context::i;
|
|
||||||
use orchid_extension::coroutine_exec::exec;
|
|
||||||
use orchid_extension::gen_expr::{call, sym_ref};
|
|
||||||
use orchid_extension::parser::{PSnippet, ParsCtx, ParsedLine, Parser};
|
|
||||||
|
|
||||||
use crate::std::protocol::parse_impls::parse_impls;
|
|
||||||
use crate::std::protocol::types::Tag;
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct AsProtoParser;
|
|
||||||
impl Parser for AsProtoParser {
|
|
||||||
const LINE_HEAD: &'static str = "as_proto";
|
|
||||||
async fn parse<'a>(
|
|
||||||
pcx: ParsCtx<'a>,
|
|
||||||
exported: bool,
|
|
||||||
cmts: Vec<Comment>,
|
|
||||||
line: PSnippet<'a>,
|
|
||||||
) -> OrcRes<Vec<ParsedLine>> {
|
|
||||||
let Parsed { output: body_tt, tail } = try_pop_no_fluff(&pcx, line).await?;
|
|
||||||
expect_end(&pcx, tail).await?;
|
|
||||||
if exported {
|
|
||||||
return Err(mk_errv(
|
|
||||||
i().i("Exported internal line").await,
|
|
||||||
"as_proto cannot be exported, the type shares the enclosing module's visibility",
|
|
||||||
[line.sr().pos()],
|
|
||||||
));
|
|
||||||
}
|
|
||||||
let mut lines = Vec::new();
|
|
||||||
let mut impls = Vec::new();
|
|
||||||
parse_impls(&pcx, &mut lines, &mut impls, body_tt).await?;
|
|
||||||
let id = pcx.module();
|
|
||||||
let proto_tag_name = i().i("__protocol_tag__").await;
|
|
||||||
let proto_tag_path = id.suffix([proto_tag_name.clone()], &i()).await;
|
|
||||||
lines.push(ParsedLine::cnst(&line.sr(), &cmts, true, proto_tag_name, async |_ccx| {
|
|
||||||
exec(async move |mut h| {
|
|
||||||
let mut new_impls = HashMap::new();
|
|
||||||
for (k, v) in impls {
|
|
||||||
new_impls.insert(k.clone(), h.register(sym_ref(id.suffix([v], &i()).await)).await);
|
|
||||||
}
|
|
||||||
Tag { id, impls: Rc::new(new_impls) }
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
}));
|
|
||||||
lines.push(ParsedLine::cnst(&line.sr(), [], false, i().i("resolve").await, async move |_| {
|
|
||||||
call(sym_ref(sym!(std::protocol::resolve; i())), [sym_ref(proto_tag_path)])
|
|
||||||
}));
|
|
||||||
Ok(lines)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct ProtoParser;
|
|
||||||
impl Parser for ProtoParser {
|
|
||||||
const LINE_HEAD: &'static str = "proto";
|
|
||||||
async fn parse<'a>(
|
|
||||||
ctx: ParsCtx<'a>,
|
|
||||||
exported: bool,
|
|
||||||
cmts: Vec<Comment>,
|
|
||||||
line: PSnippet<'a>,
|
|
||||||
) -> OrcRes<Vec<ParsedLine>> {
|
|
||||||
let Parsed { output: name_tt, tail } = try_pop_no_fluff(&ctx, line).await?;
|
|
||||||
let Token::Name(name) = &name_tt.tok else {
|
|
||||||
return Err(mk_errv(i().i("missing name for type").await, "A type needs a name", [name_tt
|
|
||||||
.sr()
|
|
||||||
.pos()]));
|
|
||||||
};
|
|
||||||
let lines = AsProtoParser::parse(ctx, false, cmts.clone(), tail).await?;
|
|
||||||
Ok(vec![ParsedLine::module(&line.sr(), &cmts, exported, name, true, lines)])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,82 +0,0 @@
|
|||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
|
||||||
use orchid_base::error::{OrcRes, mk_errv};
|
|
||||||
use orchid_base::parse::{Comment, Parsed, expect_end, try_pop_no_fluff};
|
|
||||||
use orchid_base::sym;
|
|
||||||
use orchid_base::tree::Token;
|
|
||||||
use orchid_extension::context::i;
|
|
||||||
use orchid_extension::coroutine_exec::exec;
|
|
||||||
use orchid_extension::gen_expr::{call, sym_ref};
|
|
||||||
use orchid_extension::parser::{PSnippet, ParsCtx, ParsedLine, Parser};
|
|
||||||
|
|
||||||
use crate::std::protocol::parse_impls::parse_impls;
|
|
||||||
use crate::std::protocol::types::Tag;
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct AsTypeParser;
|
|
||||||
impl Parser for AsTypeParser {
|
|
||||||
const LINE_HEAD: &'static str = "as_type";
|
|
||||||
async fn parse<'a>(
|
|
||||||
ctx: ParsCtx<'a>,
|
|
||||||
exported: bool,
|
|
||||||
cmts: Vec<Comment>,
|
|
||||||
line: PSnippet<'a>,
|
|
||||||
) -> OrcRes<Vec<ParsedLine>> {
|
|
||||||
let Parsed { output: body_tt, tail } = try_pop_no_fluff(&ctx, line).await?;
|
|
||||||
expect_end(&ctx, tail).await?;
|
|
||||||
if exported {
|
|
||||||
return Err(mk_errv(
|
|
||||||
i().i("Exported internal line").await,
|
|
||||||
"as_type cannot be exported, the type shares the enclosing module's visibility",
|
|
||||||
[line.sr().pos()],
|
|
||||||
));
|
|
||||||
}
|
|
||||||
let mut lines = Vec::new();
|
|
||||||
let mut impls = Vec::new();
|
|
||||||
parse_impls(&ctx, &mut lines, &mut impls, body_tt).await?;
|
|
||||||
let id = ctx.module();
|
|
||||||
let type_tag_name = i().i("__type_tag__").await;
|
|
||||||
let type_tag_path = id.suffix([type_tag_name.clone()], &i()).await;
|
|
||||||
lines.push(ParsedLine::cnst(&line.sr(), &cmts, true, type_tag_name, async |_ccx| {
|
|
||||||
exec(async move |mut h| {
|
|
||||||
let mut new_impls = HashMap::new();
|
|
||||||
for (k, v) in impls {
|
|
||||||
new_impls.insert(k.clone(), h.register(sym_ref(id.suffix([v], &i()).await)).await);
|
|
||||||
}
|
|
||||||
Tag { id, impls: Rc::new(new_impls) }
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
}));
|
|
||||||
let type_tag_path_1 = type_tag_path.clone();
|
|
||||||
lines.push(ParsedLine::cnst(&line.sr(), [], false, i().i("wrap").await, async move |_ccx| {
|
|
||||||
call(sym_ref(sym!(std::protocol::wrap; i())), [sym_ref(type_tag_path_1)])
|
|
||||||
}));
|
|
||||||
let type_tag_path_1 = type_tag_path.clone();
|
|
||||||
lines.push(ParsedLine::cnst(&line.sr(), [], false, i().i("unwrap").await, async move |_ccx| {
|
|
||||||
call(sym_ref(sym!(std::protocol::unwrap; i())), [sym_ref(type_tag_path_1)])
|
|
||||||
}));
|
|
||||||
Ok(lines)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct TypeParser;
|
|
||||||
impl Parser for TypeParser {
|
|
||||||
const LINE_HEAD: &'static str = "type";
|
|
||||||
async fn parse<'a>(
|
|
||||||
ctx: ParsCtx<'a>,
|
|
||||||
exported: bool,
|
|
||||||
cmts: Vec<Comment>,
|
|
||||||
line: PSnippet<'a>,
|
|
||||||
) -> OrcRes<Vec<ParsedLine>> {
|
|
||||||
let Parsed { output: name_tt, tail } = try_pop_no_fluff(&ctx, line).await?;
|
|
||||||
let Token::Name(name) = &name_tt.tok else {
|
|
||||||
return Err(mk_errv(i().i("missing name for type").await, "A type needs a name", [name_tt
|
|
||||||
.sr()
|
|
||||||
.pos()]));
|
|
||||||
};
|
|
||||||
let lines = AsTypeParser::parse(ctx, false, cmts.clone(), tail).await?;
|
|
||||||
Ok(vec![ParsedLine::module(&line.sr(), &cmts, exported, name, true, lines)])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user