5 Commits

239 changed files with 3528 additions and 16773 deletions

42
Cargo.lock generated
View File

@@ -619,6 +619,17 @@ dependencies = [
"pin-project-lite", "pin-project-lite",
] ]
[[package]]
name = "futures-locks"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45ec6fe3675af967e67c5536c0b9d44e34e6c52f86bedc4ea49c5317b8e94d06"
dependencies = [
"futures-channel",
"futures-task",
"tokio",
]
[[package]] [[package]]
name = "futures-macro" name = "futures-macro"
version = "0.3.31" version = "0.3.31"
@@ -798,9 +809,9 @@ dependencies = [
[[package]] [[package]]
name = "konst" name = "konst"
version = "0.4.1" version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1b7495a4af30134f36ab2018716ba98b092019a6c5dc2126b94e3241c170748" checksum = "64896bdfd7906cfb0b57bc04f08bde408bcd6aaf71ff438ee471061cd16f2e86"
dependencies = [ dependencies = [
"const_panic", "const_panic",
"konst_proc_macros", "konst_proc_macros",
@@ -1029,11 +1040,11 @@ name = "orchid-extension"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"async-fn-stream", "async-fn-stream",
"async-lock",
"async-once-cell", "async-once-cell",
"derive_destructure", "derive_destructure",
"dyn-clone", "dyn-clone",
"futures", "futures",
"futures-locks",
"hashbrown 0.16.0", "hashbrown 0.16.0",
"include_dir", "include_dir",
"itertools", "itertools",
@@ -1050,6 +1061,7 @@ dependencies = [
"pastey", "pastey",
"some_executor", "some_executor",
"substack", "substack",
"task-local",
"tokio", "tokio",
"tokio-util", "tokio-util",
"trait-set", "trait-set",
@@ -1060,12 +1072,12 @@ name = "orchid-host"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"async-fn-stream", "async-fn-stream",
"async-lock",
"async-once-cell", "async-once-cell",
"async-process", "async-process",
"bound", "bound",
"derive_destructure", "derive_destructure",
"futures", "futures",
"futures-locks",
"hashbrown 0.16.0", "hashbrown 0.16.0",
"itertools", "itertools",
"lazy_static", "lazy_static",
@@ -1086,6 +1098,7 @@ dependencies = [
name = "orchid-std" name = "orchid-std"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"async-fn-stream",
"async-once-cell", "async-once-cell",
"futures", "futures",
"hashbrown 0.16.0", "hashbrown 0.16.0",
@@ -1098,7 +1111,9 @@ dependencies = [
"orchid-base", "orchid-base",
"orchid-extension", "orchid-extension",
"ordered-float", "ordered-float",
"pastey",
"rust_decimal", "rust_decimal",
"subslice-offset",
"substack", "substack",
"test_executors", "test_executors",
"tokio", "tokio",
@@ -1431,9 +1446,9 @@ dependencies = [
[[package]] [[package]]
name = "rust_decimal" name = "rust_decimal"
version = "1.37.2" version = "1.38.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b203a6425500a03e0919c42d3c47caca51e79f1132046626d2c8871c5092035d" checksum = "c8975fc98059f365204d635119cf9c5a60ae67b841ed49b5422a9a7e56cdfac0"
dependencies = [ dependencies = [
"arrayvec", "arrayvec",
"borsh", "borsh",
@@ -1620,6 +1635,12 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "subslice-offset"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c883fb2521558a8be70f0f1922babf736f9f72dfbe6ae4f397de3aefb74627ec"
[[package]] [[package]]
name = "substack" name = "substack"
version = "1.1.1" version = "1.1.1"
@@ -1665,6 +1686,15 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "task-local"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2c821daee0efdf6414970c8185a1c22e259a7ed87b2fd9f7d3c5f5503fd2863"
dependencies = [
"pin-project-lite",
]
[[package]] [[package]]
name = "test_executors" name = "test_executors"
version = "0.3.5" version = "0.3.5"

11
LICENCE Normal file
View File

@@ -0,0 +1,11 @@
THIS SOFTWARE IS PROVIDED WITHOUT WARRANTY
The code in this repository is free for noncommercial use, including derivative works and inclusion in other software if those are also free for noncommercial use. Commercial use, or inclusion in any derivative works licensed for commercial use is forbidden under this general licence.
Identifying marks stored in the repository are restricted for use with an unmodified copy of this software. If you distribute modified versions of this software, you must either replace these identifying marks or modify them in a way that clearly indicates that what you are distributing is a derivative work and not this official vversion. You must also replace any contact information in such a way that your derivative work does not suggest that we may be contacted about issues. Your derivative work may use the original identifying marks and contact information to identify this project as its basis, while emphasizing that the authors of the original project are neither in control of, nor liable for the derivative work.
Identifying marks include the Orchid logo, the ribbon image above, and the names "Orchid", "Orchidlang" unless they are part of a technical interface.
Contact information includes email addresses, links to the source code and issue tracker.
Words listed as identifying marks are explicltly not considered as such when they appear in technical interfaces or APIs. For example, shell commands, identifiers within Orchid or Rust code, and names in package registries are not considered as identifying marks.

674
LICENSE
View File

@@ -1,674 +0,0 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.

View File

@@ -7,7 +7,7 @@ An experimental lazy, pure functional programming language designed to be embedd
## Usage ## Usage
The standalone interpreter can be built as the binary target from this package. The language tutorial and standard library documentation is at [www.lbfalvy.com/orchid-reference](https://lbfalvy.github.io/orchid-reference/). Embedder guide and Rust API documentation are coming soon. Updated language tutorial, standard library documentation, embedder guide and Rust API documentation coming soon.
## Design ## Design
@@ -19,10 +19,10 @@ Namespaces are inspired by Rust modules and ES6. Every file and directory is imp
## Try it out ## Try it out
The project uses the nighly rust toolchain. Go to one of the folders within `examples` and run The project uses both the stable and nightly rust toolchain. Run the examples with
```sh ```sh
cargo run --release cargo orcx -- exec --proj ./examples/hello-world "src::main::main"
``` ```
you can try modifying the examples, but error reporting for the time being is pretty terrible. you can try modifying the examples, but error reporting for the time being is pretty terrible.
@@ -35,12 +35,16 @@ Orchids and mangrove trees form complex ecosystems; The flowers persuade the tre
All contributions are welcome. For the time being, use the issue tracker to discuss ideas. All contributions are welcome. For the time being, use the issue tracker to discuss ideas.
## Forks Unless we agree on different terms, by contributing to this software you declare that you have created or otherwise have the right to license your contribution, agree to license it publicly under the general noncommercial licence included in this repository, and grant me (the owner of the project) a permanent, unrestricted license to use, modify, distribute and relicense your contribution.
The code in this repository is available under the GNU GPLv3, but identifying marks stored in the repository are restricted for use with an unmodified copy of this software. If you distribute modified versions of this software, you must either replace these identifying marks or modify them in a way that clearly indicates that what you are distributing is a derivative work and not this official vversion. You must also replace any contact information in such a way that your derivative work does not suggest that we may be contacted about issues. Your derivative work may use the original identifying marks and contact information to identify this project as its basis, while emphasizing that the authors of the original project are neither in control of, nor liable for the derivative work. You retain ownership of your intellectual property to ensure that the copyleft protections cementing the noncommercial availability of the code are preserved.
Identifying marks include the Orchid logo, the ribbon image above, and the names "Orchid", "Orchidlang" unless they are part of a technical interface. ## About the license
Contact information includes email addresses, links to the source code and issue tracker. This software is free for noncommercial use. If you would like to use it for commercial purposes, or distribute your derivative work under a license that permits commercial use, contact me for a separate license. These licences are provided on a case-by-case basis with any limitations and compensation we agree on.
Words listed as identifying marks are explicltly not considered as such when they appear in technical interfaces or APIs. For example, shell commands, identifiers within Orchid or Rust code, and names in package registries are not considered as identifying marks. I generally appreciate the ethos of free software, and particularly the patterns used in copyleft to cement the guarantees of the licence. However, I don't think commercial entities fit that ethos, and I think they should be addressed separately rather than attempting to ignore the inherent unfairness towards contributors.
My intent with the custom license included in this project is to enable the strong guarantees of copyleft towards noncommercial users, while leaving commercial users to engage with this project and its possible future ecosystem in a commercial way; if you intend to profit off my work, the barest cash flow should justify shooting me an email and agreeing on a simple temporary profit sharing deal until you figure out your business model, and the cash flow of a full scale business should more than justify dedicated attention to the software you rely on.
The clause about identifying marks is intended to prevent another pitfall of open-source, wherein Linux distros borrow entire codebases, break them, and then distribute the result under the original author's name. If would like to package Orchid, I'd be delighted if you would talk to me about making it official, but if you would rather operate independently, you should present your project as the rogue derivative work that it is rather than borrowing the original project's identity for something its owner has no control over.

View File

@@ -1,114 +1,44 @@
use std::cell::Cell;
use std::future::poll_fn;
use std::marker::PhantomData; use std::marker::PhantomData;
use std::pin::Pin;
use std::ptr;
use std::task::{Context, Poll};
use futures::future::LocalBoxFuture; use futures::channel::mpsc;
use futures::{FutureExt, Stream}; use futures::stream::{PollNext, select_with_strategy};
use futures::{FutureExt, SinkExt, Stream, StreamExt};
type YieldSlot<'a, T> = &'a Cell<Option<T>>;
/// Handle that allows you to emit values on a stream. If you drop /// Handle that allows you to emit values on a stream. If you drop
/// this, the stream will end and you will not be polled again. /// this, the stream will end and you will not be polled again.
pub struct StreamCtx<'a, T>(&'a Cell<Option<T>>, PhantomData<&'a ()>); pub struct StreamCtx<'a, T>(mpsc::Sender<T>, PhantomData<&'a ()>);
impl<T> StreamCtx<'_, T> { impl<T> StreamCtx<'_, T> {
pub fn emit(&mut self, value: T) -> impl Future<Output = ()> { pub async fn emit(&mut self, value: T) {
assert!(self.0.replace(Some(value)).is_none(), "Leftover value in stream"); (self.0.send(value).await)
let mut state = Poll::Pending; .expect("Dropped a stream receiver without dropping the driving closure");
poll_fn(move |_| std::mem::replace(&mut state, Poll::Ready(())))
} }
} }
enum FnOrFut<'a, T, O> { fn left_strat(_: &mut ()) -> PollNext { PollNext::Left }
Fn(Option<Box<dyn FnOnce(YieldSlot<'a, T>) -> LocalBoxFuture<'a, O> + 'a>>),
Fut(LocalBoxFuture<'a, O>),
}
struct AsyncFnStream<'a, T> {
driver: FnOrFut<'a, T, ()>,
output: Cell<Option<T>>,
}
impl<'a, T> Stream for AsyncFnStream<'a, T> {
type Item = T;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
unsafe {
let self_mut = self.get_unchecked_mut();
let fut = match &mut self_mut.driver {
FnOrFut::Fut(fut) => fut,
FnOrFut::Fn(f) => {
// safety: the cell is held inline in self, which is pinned.
let cell = ptr::from_ref(&self_mut.output).as_ref().unwrap();
let fut = f.take().unwrap()(cell);
self_mut.driver = FnOrFut::Fut(fut);
return Pin::new_unchecked(self_mut).poll_next(cx);
},
};
match fut.as_mut().poll(cx) {
Poll::Ready(()) => Poll::Ready(None),
Poll::Pending => match self_mut.output.replace(None) {
None => Poll::Pending,
Some(t) => Poll::Ready(Some(t)),
},
}
}
}
}
struct AsyncFnTryStream<'a, T, E> {
driver: FnOrFut<'a, T, Result<StreamCtx<'a, T>, E>>,
output: Cell<Option<T>>,
}
impl<'a, T, E> Stream for AsyncFnTryStream<'a, T, E> {
type Item = Result<T, E>;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
unsafe {
let self_mut = self.get_unchecked_mut();
let fut = match &mut self_mut.driver {
FnOrFut::Fut(fut) => fut,
FnOrFut::Fn(f) => {
// safety: the cell is held inline in self, which is pinned.
let cell = ptr::from_ref(&self_mut.output).as_ref().unwrap();
let fut = f.take().unwrap()(cell);
self_mut.driver = FnOrFut::Fut(fut);
return Pin::new_unchecked(self_mut).poll_next(cx);
},
};
match fut.as_mut().poll(cx) {
Poll::Ready(Ok(_)) => Poll::Ready(None),
Poll::Ready(Err(ex)) => Poll::Ready(Some(Err(ex))),
Poll::Pending => match self_mut.output.replace(None) {
None => Poll::Pending,
Some(t) => Poll::Ready(Some(Ok(t))),
},
}
}
}
}
/// Create a stream from an async function acting as a coroutine /// Create a stream from an async function acting as a coroutine
pub fn stream<'a, T: 'a>( pub fn stream<'a, T: 'a>(
f: impl for<'b> AsyncFnOnce(StreamCtx<'b, T>) + 'a, f: impl for<'b> AsyncFnOnce(StreamCtx<'b, T>) + 'a,
) -> impl Stream<Item = T> + 'a { ) -> impl Stream<Item = T> + 'a {
AsyncFnStream { let (send, recv) = mpsc::channel::<T>(1);
output: Cell::new(None), let fut = async { f(StreamCtx(send, PhantomData)).await };
driver: FnOrFut::Fn(Some(Box::new(|t| { // use options to ensure that the stream is driven to exhaustion
async { f(StreamCtx(t, PhantomData)).await }.boxed_local() select_with_strategy(fut.into_stream().map(|()| None), recv.map(|t| Some(t)), left_strat)
}))), .filter_map(async |opt| opt)
}
} }
/// Create a stream of result from a fallible function. /// Create a stream of result from a fallible function.
pub fn try_stream<'a, T: 'a, E: 'a>( pub fn try_stream<'a, T: 'a, E: 'a>(
f: impl for<'b> AsyncFnOnce(StreamCtx<'b, T>) -> Result<StreamCtx<'b, T>, E> + 'a, f: impl for<'b> AsyncFnOnce(StreamCtx<'b, T>) -> Result<StreamCtx<'b, T>, E> + 'a,
) -> impl Stream<Item = Result<T, E>> + 'a { ) -> impl Stream<Item = Result<T, E>> + 'a {
AsyncFnTryStream { let (send, recv) = mpsc::channel::<T>(1);
output: Cell::new(None), let fut = async { f(StreamCtx(send, PhantomData)).await };
driver: FnOrFut::Fn(Some(Box::new(|t| { select_with_strategy(
async { f(StreamCtx(t, PhantomData)).await }.boxed_local() fut.into_stream().map(|res| if let Err(e) = res { Some(Err(e)) } else { None }),
}))), recv.map(|t| Some(Ok(t))),
} left_strat,
)
.filter_map(async |opt| opt)
} }
#[cfg(test)] #[cfg(test)]

View File

@@ -1,2 +1,2 @@
let user = "dave" let my_tuple = option::some t[1, 2]
let main = println "Hello $user!" exit_status::success let main = tuple::get (option::expect my_tuple "tuple is none") 1

View File

@@ -4,7 +4,7 @@ use std::future::Future;
use super::coding::Coding; use super::coding::Coding;
use crate::helpers::enc_vec; use crate::helpers::enc_vec;
pub trait Request: fmt::Debug + Coding + Sized + 'static { pub trait Request: fmt::Debug + Sized + 'static {
type Response: fmt::Debug + Coding + 'static; type Response: fmt::Debug + Coding + 'static;
} }

View File

@@ -13,4 +13,4 @@ futures = { version = "0.3.31", features = ["std"], default-features = false }
itertools = "0.14.0" itertools = "0.14.0"
[dev-dependencies] [dev-dependencies]
test_executors = "0.3.2" test_executors = "0.3.5"

View File

@@ -43,17 +43,6 @@ pub struct Acquire(pub SysId, pub ExprTicket);
#[extends(ExprNotif, ExtHostNotif)] #[extends(ExprNotif, ExtHostNotif)]
pub struct Release(pub SysId, pub ExprTicket); pub struct Release(pub SysId, pub ExprTicket);
/// Decrement the reference count for one system and increment it for another,
/// to indicate passing an owned reference. Equivalent to [Acquire] followed by
/// [Release].
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(ExprNotif, ExtHostNotif)]
pub struct Move {
pub dec: SysId,
pub inc: SysId,
pub expr: ExprTicket,
}
/// A description of a new expression. It is used as the return value of /// A description of a new expression. It is used as the return value of
/// [crate::atom::Call] or [crate::atom::CallRef], or a constant in the /// [crate::atom::Call] or [crate::atom::CallRef], or a constant in the
/// [crate::tree::Tree]. /// [crate::tree::Tree].
@@ -67,8 +56,9 @@ pub enum ExpressionKind {
/// template /// template
Arg(u64), Arg(u64),
/// Insert the specified host-expression in the template here. When the clause /// Insert the specified host-expression in the template here. When the clause
/// is used in the const tree, this variant is forbidden. /// is used in the const tree, this variant is forbidden. The ticket held
Slot { tk: ExprTicket, by_value: bool }, /// within is always owning. To avoid a leak, it must be deserialized.
Slot(ExprTicket),
/// The lhs must be fully processed before the rhs can be processed. /// The lhs must be fully processed before the rhs can be processed.
/// Equivalent to Haskell's function of the same name /// Equivalent to Haskell's function of the same name
Seq(Box<Expression>, Box<Expression>), Seq(Box<Expression>, Box<Expression>),
@@ -115,11 +105,12 @@ impl Request for Inspect {
type Response = Inspected; type Response = Inspected;
} }
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(ExtHostReq)] #[extends(ExtHostReq)]
#[extendable] #[extendable]
pub enum ExprReq { pub enum ExprReq {
Inspect(Inspect), Inspect(Inspect),
Create(Create),
} }
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
@@ -128,5 +119,11 @@ pub enum ExprReq {
pub enum ExprNotif { pub enum ExprNotif {
Acquire(Acquire), Acquire(Acquire),
Release(Release), Release(Release),
Move(Move), }
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(ExprReq, ExtHostReq)]
pub struct Create(pub Expression);
impl Request for Create {
type Response = ExprTicket;
} }

View File

@@ -17,6 +17,8 @@ pub enum Location {
Gen(CodeGenInfo), Gen(CodeGenInfo),
/// Range and file /// Range and file
SourceRange(SourceRange), SourceRange(SourceRange),
/// Multiple locations
Multi(Vec<Location>),
} }
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]

View File

@@ -1,4 +1,5 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::fmt;
use std::num::NonZeroU64; use std::num::NonZeroU64;
use std::ops::Range; use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
@@ -56,6 +57,15 @@ pub enum Paren {
Square, Square,
Curly, Curly,
} }
impl fmt::Display for Paren {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", match self {
Self::Round => "()",
Self::Curly => "{}",
Self::Square => "[]",
})
}
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)] #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
pub struct TreeId(pub NonZeroU64); pub struct TreeId(pub NonZeroU64);

View File

@@ -1,3 +1,4 @@
use std::borrow::Borrow;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::convert::Infallible; use std::convert::Infallible;
use std::future::Future; use std::future::Future;
@@ -5,7 +6,8 @@ use std::iter;
use std::rc::Rc; use std::rc::Rc;
use std::str::FromStr; use std::str::FromStr;
use itertools::Itertools; use futures::future::join_all;
use itertools::{Itertools, chain};
use never::Never; use never::Never;
use regex::Regex; use regex::Regex;
@@ -45,12 +47,14 @@ impl FmtUnit {
} }
} }
pub fn sequence( pub fn sequence(
head: &str,
delim: &str, delim: &str,
tail: &str,
seq_bnd: Option<bool>, seq_bnd: Option<bool>,
seq: impl IntoIterator<Item = FmtUnit>, seq: impl IntoIterator<Item = FmtUnit>,
) -> Self { ) -> Self {
let items = seq.into_iter().collect_vec(); let items = seq.into_iter().collect_vec();
FmtUnit::new(Variants::sequence(items.len(), delim, seq_bnd), items) Variants::default().sequence(items.len(), head, delim, tail, seq_bnd).units_own(items)
} }
} }
impl<T> From<T> for FmtUnit impl<T> From<T> for FmtUnit
@@ -77,9 +81,12 @@ impl FmtElement {
pub fn bounded(i: u32) -> Self { Self::sub(i, Some(true)) } pub fn bounded(i: u32) -> Self { Self::sub(i, Some(true)) }
pub fn unbounded(i: u32) -> Self { Self::sub(i, Some(false)) } pub fn unbounded(i: u32) -> Self { Self::sub(i, Some(false)) }
pub fn last(i: u32) -> Self { Self::sub(i, None) } pub fn last(i: u32) -> Self { Self::sub(i, None) }
pub fn sequence(len: usize, bounded: Option<bool>) -> impl Iterator<Item = Self> { pub fn sequence(len: usize, bounded: Option<bool>) -> Vec<Self> {
let len32: u32 = len.try_into().unwrap(); match len.try_into().unwrap() {
(0..len32 - 1).map(FmtElement::unbounded).chain([FmtElement::sub(len32 - 1, bounded)]) 0u32 => vec![],
1u32 => vec![FmtElement::sub(0, bounded)],
n => (0..n - 1).map(FmtElement::unbounded).chain([FmtElement::sub(n - 1, bounded)]).collect(),
}
} }
pub fn from_api(api: &api::FormattingElement) -> Self { pub fn from_api(api: &api::FormattingElement) -> Self {
match_mapping!(api, api::FormattingElement => FmtElement { match_mapping!(api, api::FormattingElement => FmtElement {
@@ -105,10 +112,38 @@ pub struct Variant {
#[test] #[test]
fn variants_parse_test() { fn variants_parse_test() {
let vars = Variants::default().bounded("({0})"); let vars = Rc::new(Variants::default().bounded("({{{0}}})"));
println!("final: {vars:?}") let expected_vars = Rc::new(Variants(vec![Variant {
bounded: true,
elements: vec![
FmtElement::String(Rc::new("({".to_string())),
FmtElement::Sub { bounded: Some(false), slot: 0 },
FmtElement::String(Rc::new("})".to_string())),
],
}]));
assert_eq!(vars.as_ref(), expected_vars.as_ref());
let unit = vars.units(["1".into()]);
assert_eq!(unit, FmtUnit {
subs: vec![FmtUnit {
subs: vec![],
variants: Rc::new(Variants(vec![Variant {
bounded: true,
elements: vec![FmtElement::String(Rc::new("1".to_string()))]
}]))
}],
variants: expected_vars
});
let str = take_first(&unit, true);
assert_eq!(str, "({1})");
} }
/// Represents a collection of formatting strings for the same set of parameters
/// from which the formatter can choose within their associated constraints.
///
/// - {0b} can be replaced by any variant of the parameter.
/// - {0} can only be replaced by a bounded variant of the parameter
/// - {0l} causes the current end restriction to be applied to the parameter.
/// This is to be used if the parameter is at the very end of the variant.
#[derive(Clone, Debug, Hash, PartialEq, Eq, Default)] #[derive(Clone, Debug, Hash, PartialEq, Eq, Default)]
pub struct Variants(pub Vec<Variant>); pub struct Variants(pub Vec<Variant>);
impl Variants { impl Variants {
@@ -183,20 +218,40 @@ impl Variants {
fn add(&mut self, bounded: bool, s: &'_ str) { fn add(&mut self, bounded: bool, s: &'_ str) {
self.0.push(Variant { bounded, elements: Self::parse(s) }) self.0.push(Variant { bounded, elements: Self::parse(s) })
} }
// This option is available in all positions /// This option is available in all positions.
/// See [Variants] for a description of the format strings
pub fn bounded(mut self, s: &'_ str) -> Self { pub fn bounded(mut self, s: &'_ str) -> Self {
self.add(true, s); self.add(true, s);
self self
} }
// This option is only available in positions immediately preceding the end of /// This option is only available in positions immediately preceding the end
// the sequence or a parenthesized subsequence. /// of the sequence or a parenthesized subsequence.
/// See [Variants] for a description of the format strings
pub fn unbounded(mut self, s: &'_ str) -> Self { pub fn unbounded(mut self, s: &'_ str) -> Self {
self.add(false, s); self.add(false, s);
self self
} }
pub fn sequence(len: usize, delim: &str, seq_bnd: Option<bool>) -> Rc<Self> { pub fn sequence(
let seq = Itertools::intersperse(FmtElement::sequence(len, seq_bnd), FmtElement::str(delim)); mut self,
Rc::new(Variants(vec![Variant { bounded: true, elements: seq.collect_vec() }])) len: usize,
head: &str,
delim: &str,
tail: &str,
seq_bnd: Option<bool>,
) -> Self {
let seq = chain!(
[FmtElement::str(head)],
Itertools::intersperse(
FmtElement::sequence(len, seq_bnd).into_iter(),
FmtElement::str(delim),
),
[FmtElement::str(tail)],
);
self.0.push(Variant { bounded: true, elements: seq.collect_vec() });
self
}
pub fn units_own(self, subs: impl IntoIterator<Item = FmtUnit>) -> FmtUnit {
FmtUnit::new(Rc::new(self), subs)
} }
pub fn units(self: &Rc<Self>, subs: impl IntoIterator<Item = FmtUnit>) -> FmtUnit { pub fn units(self: &Rc<Self>, subs: impl IntoIterator<Item = FmtUnit>) -> FmtUnit {
FmtUnit::new(self.clone(), subs) FmtUnit::new(self.clone(), subs)
@@ -278,3 +333,12 @@ impl Format for Never {
/// Format with default strategy. Currently equal to [take_first_fmt] /// Format with default strategy. Currently equal to [take_first_fmt]
pub async fn fmt(v: &(impl Format + ?Sized), i: &Interner) -> String { take_first_fmt(v, i).await } pub async fn fmt(v: &(impl Format + ?Sized), i: &Interner) -> String { take_first_fmt(v, i).await }
/// Format a sequence with default strategy. Currently equal to [take_first_fmt]
pub async fn fmt_v<F: Format + ?Sized, R: Borrow<F>>(
v: impl IntoIterator<Item = R>,
i: &Interner,
) -> impl Iterator<Item = String> {
join_all(v.into_iter().map(|f| async move { take_first_fmt(f.borrow(), i).await }))
.await
.into_iter()
}

View File

@@ -2,8 +2,9 @@
use std::fmt; use std::fmt;
use std::hash::Hash; use std::hash::Hash;
use std::ops::Range; use std::ops::{Add, AddAssign, Range};
use futures::future::join_all;
use trait_set::trait_set; use trait_set::trait_set;
use crate::error::ErrPos; use crate::error::ErrPos;
@@ -25,6 +26,7 @@ pub enum Pos {
Gen(CodeGenInfo), Gen(CodeGenInfo),
/// Range and file /// Range and file
SrcRange(SrcRange), SrcRange(SrcRange),
Multi(Vec<Pos>),
} }
impl Pos { impl Pos {
pub fn pretty_print(&self, get_src: &mut impl GetSrc) -> String { pub fn pretty_print(&self, get_src: &mut impl GetSrc) -> String {
@@ -39,6 +41,7 @@ impl Pos {
match_mapping!(api, api::Location => Pos { match_mapping!(api, api::Location => Pos {
None, Inherit, SlotTarget, None, Inherit, SlotTarget,
Gen(cgi => CodeGenInfo::from_api(cgi, i).await), Gen(cgi => CodeGenInfo::from_api(cgi, i).await),
Multi(v => join_all(v.iter().map(|l| Pos::from_api(l, i))).await)
} { } {
api::Location::SourceRange(sr) => Self::SrcRange(SrcRange::from_api(sr, i).await) api::Location::SourceRange(sr) => Self::SrcRange(SrcRange::from_api(sr, i).await)
}) })
@@ -47,6 +50,7 @@ impl Pos {
match_mapping!(self, Pos => api::Location { match_mapping!(self, Pos => api::Location {
None, Inherit, SlotTarget, None, Inherit, SlotTarget,
Gen(cgi.to_api()), Gen(cgi.to_api()),
Multi(v => v.iter().map(|pos| pos.to_api()).collect()),
} { } {
Self::SrcRange(sr) => api::Location::SourceRange(sr.to_api()), Self::SrcRange(sr) => api::Location::SourceRange(sr.to_api()),
}) })
@@ -60,7 +64,34 @@ impl fmt::Display for Pos {
Pos::None => f.write_str("N/A"), Pos::None => f.write_str("N/A"),
Pos::Gen(g) => write!(f, "{g}"), Pos::Gen(g) => write!(f, "{g}"),
Pos::SrcRange(sr) => write!(f, "{sr}"), Pos::SrcRange(sr) => write!(f, "{sr}"),
Pos::Multi(posv) => {
write!(f, "{}", posv[0])?;
for pos in posv {
write!(f, "+{}", pos)?;
} }
Ok(())
},
}
}
}
impl Add for Pos {
type Output = Pos;
fn add(self, rhs: Self) -> Self::Output {
match (self, rhs) {
(Pos::Multi(l), Pos::Multi(r)) => Pos::Multi(l.into_iter().chain(r).collect()),
(Pos::None, any) => any,
(any, Pos::None) => any,
(Pos::Multi(v), single) => Pos::Multi(v.into_iter().chain([single]).collect()),
(single, Pos::Multi(v)) => Pos::Multi([single].into_iter().chain(v).collect()),
(l, r) => Pos::Multi(vec![l, r]),
}
}
}
impl AddAssign for Pos {
fn add_assign(&mut self, rhs: Self) {
let mut tmp = Pos::None;
std::mem::swap(&mut tmp, self);
*self = tmp + rhs;
} }
} }
@@ -77,7 +108,7 @@ impl SrcRange {
} }
/// Create a dud [SourceRange] for testing. Its value is unspecified and /// Create a dud [SourceRange] for testing. Its value is unspecified and
/// volatile. /// volatile.
pub async fn mock(i: &Interner) -> Self { Self { range: 0..1, path: sym!(test; i).await } } pub async fn mock(i: &Interner) -> Self { Self { range: 0..1, path: sym!(test; i) } }
/// Path the source text was loaded from /// Path the source text was loaded from
pub fn path(&self) -> Sym { self.path.clone() } pub fn path(&self) -> Sym { self.path.clone() }
/// Byte range /// Byte range

View File

@@ -311,7 +311,7 @@ impl NameLike for VName {}
/// cloning the token. /// cloning the token.
#[macro_export] #[macro_export]
macro_rules! sym { macro_rules! sym {
($seg1:tt $( :: $seg:tt)* ; $i:expr) => { async { ($seg1:tt $( :: $seg:tt)* ; $i:expr) => {
$crate::name::Sym::from_tok( $crate::name::Sym::from_tok(
$i.i(&[ $i.i(&[
$i.i(stringify!($seg1)).await $i.i(stringify!($seg1)).await
@@ -319,9 +319,7 @@ macro_rules! sym {
]) ])
.await .await
).unwrap() ).unwrap()
}
}; };
(@NAME $seg:tt) => {}
} }
/// Create a [VName] literal. /// Create a [VName] literal.
@@ -329,12 +327,12 @@ macro_rules! sym {
/// The components are interned much like in [sym]. /// The components are interned much like in [sym].
#[macro_export] #[macro_export]
macro_rules! vname { macro_rules! vname {
($seg1:tt $( :: $seg:tt)* ; $i:expr) => { async { ($seg1:tt $( :: $seg:tt)* ; $i:expr) => {
$crate::name::VName::new([ $crate::name::VName::new([
$i.i(stringify!($seg1)).await $i.i(stringify!($seg1)).await
$( , $i.i(stringify!($seg)).await )* $( , $i.i(stringify!($seg)).await )*
]).unwrap() ]).unwrap()
} }; };
} }
/// Create a [VPath] literal. /// Create a [VPath] literal.
@@ -342,12 +340,12 @@ macro_rules! vname {
/// The components are interned much like in [sym]. /// The components are interned much like in [sym].
#[macro_export] #[macro_export]
macro_rules! vpath { macro_rules! vpath {
($seg1:tt $( :: $seg:tt)+ ; $i:expr) => { async { ($seg1:tt $( :: $seg:tt)+ ; $i:expr) => {
$crate::name::VPath(vec![ $crate::name::VPath(vec![
$i.i(stringify!($seg1)).await $i.i(stringify!($seg1)).await
$( , $i.i(stringify!($seg)).await )+ $( , $i.i(stringify!($seg)).await )+
]) ])
} }; };
() => { () => {
$crate::name::VPath(vec![]) $crate::name::VPath(vec![])
} }
@@ -367,7 +365,7 @@ mod test {
fn recur() { fn recur() {
spin_on(async { spin_on(async {
let i = Interner::new_master(); let i = Interner::new_master();
let myname = vname!(foo::bar; i).await; let myname = vname!(foo::bar; i);
let _borrowed_slice: &[Tok<String>] = myname.borrow(); let _borrowed_slice: &[Tok<String>] = myname.borrow();
let _deref_pathslice: &[Tok<String>] = &myname; let _deref_pathslice: &[Tok<String>] = &myname;
let _as_slice_out: &[Tok<String>] = myname.as_slice(); let _as_slice_out: &[Tok<String>] = myname.as_slice();
@@ -379,15 +377,15 @@ mod test {
spin_on(async { spin_on(async {
let i = Interner::new_master(); let i = Interner::new_master();
assert_eq!( assert_eq!(
sym!(foo::bar::baz; i).await, sym!(foo::bar::baz; i),
Sym::new([i.i("foo").await, i.i("bar").await, i.i("baz").await], &i).await.unwrap() Sym::new([i.i("foo").await, i.i("bar").await, i.i("baz").await], &i).await.unwrap()
); );
assert_eq!( assert_eq!(
vname!(foo::bar::baz; i).await, vname!(foo::bar::baz; i),
VName::new([i.i("foo").await, i.i("bar").await, i.i("baz").await]).unwrap() VName::new([i.i("foo").await, i.i("bar").await, i.i("baz").await]).unwrap()
); );
assert_eq!( assert_eq!(
vpath!(foo::bar::baz; i).await, vpath!(foo::bar::baz; i),
VPath::new([i.i("foo").await, i.i("bar").await, i.i("baz").await]) VPath::new([i.i("foo").await, i.i("bar").await, i.i("baz").await])
); );
}) })

View File

@@ -145,7 +145,7 @@ impl<T: MsgSet> ReqNot<T> {
notif_cb(notif_val, self.clone()).await notif_cb(notif_val, self.clone()).await
} else if 0 < id.bitand(1 << 63) { } else if 0 < id.bitand(1 << 63) {
let mut sender = g.responses.remove(&!id).expect("Received response for invalid message"); let mut sender = g.responses.remove(&!id).expect("Received response for invalid message");
sender.send(message.to_vec()).await.unwrap() let _ = sender.send(message.to_vec()).await;
} else { } else {
let message = <T::In as Channel>::Req::decode(Pin::new(&mut &payload[..])).await; let message = <T::In as Channel>::Req::decode(Pin::new(&mut &payload[..])).await;
let mut req_cb = clone_box(&*g.req); let mut req_cb = clone_box(&*g.req);

View File

@@ -307,7 +307,7 @@ pub async fn ttv_fmt<'a: 'b, 'b>(
ttv: impl IntoIterator<Item = &'b TokTree<impl ExprRepr + 'a, impl ExtraTok + 'a>>, ttv: impl IntoIterator<Item = &'b TokTree<impl ExprRepr + 'a, impl ExtraTok + 'a>>,
c: &(impl FmtCtx + ?Sized), c: &(impl FmtCtx + ?Sized),
) -> FmtUnit { ) -> FmtUnit {
FmtUnit::sequence(" ", None, join_all(ttv.into_iter().map(|t| t.print(c))).await) FmtUnit::sequence("", " ", "", None, join_all(ttv.into_iter().map(|t| t.print(c))).await)
} }
pub fn indent(s: &str) -> String { s.replace("\n", "\n ") } pub fn indent(s: &str) -> String { s.replace("\n", "\n ") }

View File

@@ -7,7 +7,6 @@ edition = "2024"
[dependencies] [dependencies]
async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" } async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
async-lock = "3.4.1"
async-once-cell = "0.5.4" async-once-cell = "0.5.4"
derive_destructure = "1.0.0" derive_destructure = "1.0.0"
dyn-clone = "1.0.20" dyn-clone = "1.0.20"
@@ -15,10 +14,11 @@ futures = { version = "0.3.31", features = [
"std", "std",
"async-await", "async-await",
], default-features = false } ], default-features = false }
futures-locks = "0.7.1"
hashbrown = "0.16.0" hashbrown = "0.16.0"
include_dir = { version = "0.7.4", optional = true } include_dir = { version = "0.7.4", optional = true }
itertools = "0.14.0" itertools = "0.14.0"
konst = "0.4.1" konst = "0.4.2"
lazy_static = "1.5.0" lazy_static = "1.5.0"
memo-map = "0.3.3" memo-map = "0.3.3"
never = "0.1.0" never = "0.1.0"
@@ -31,6 +31,7 @@ ordered-float = "5.0.0"
pastey = "0.1.1" pastey = "0.1.1"
some_executor = "0.6.1" some_executor = "0.6.1"
substack = "1.1.1" substack = "1.1.1"
task-local = "0.1.0"
tokio = { version = "1.47.1", optional = true, features = [] } tokio = { version = "1.47.1", optional = true, features = [] }
tokio-util = { version = "0.7.16", optional = true, features = ["compat"] } tokio-util = { version = "0.7.16", optional = true, features = ["compat"] }

View File

@@ -12,21 +12,20 @@ use futures::future::LocalBoxFuture;
use futures::{AsyncRead, AsyncWrite, FutureExt, StreamExt, stream}; use futures::{AsyncRead, AsyncWrite, FutureExt, StreamExt, stream};
use orchid_api_derive::Coding; use orchid_api_derive::Coding;
use orchid_api_traits::{Coding, Decode, Encode, Request, enc_vec}; use orchid_api_traits::{Coding, Decode, Encode, Request, enc_vec};
use orchid_base::clone;
use orchid_base::error::{OrcErrv, OrcRes, mk_errv, mk_errv_floating}; use orchid_base::error::{OrcErrv, OrcRes, mk_errv, mk_errv_floating};
use orchid_base::format::{FmtCtx, FmtUnit, Format}; use orchid_base::format::{FmtCtx, FmtUnit, Format, fmt};
use orchid_base::interner::Interner;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::reqnot::Requester; use orchid_base::reqnot::Requester;
use trait_set::trait_set; use trait_set::trait_set;
use crate::api; use crate::api;
use crate::context::{ctx, i};
use crate::conv::ToExpr; use crate::conv::ToExpr;
// use crate::error::{ProjectError, ProjectResult}; // use crate::error::{ProjectError, ProjectResult};
use crate::expr::{Expr, ExprData, ExprHandle, ExprKind}; use crate::expr::{Expr, ExprData, ExprHandle, ExprKind};
use crate::gen_expr::GExpr; use crate::gen_expr::GExpr;
use crate::system::{DynSystemCard, SysCtx, atom_info_for, downcast_atom}; use crate::system::{DynSystemCard, atom_info_for, downcast_atom};
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
pub struct AtomTypeId(pub NonZeroU32); pub struct AtomTypeId(pub NonZeroU32);
@@ -91,26 +90,25 @@ pub struct ForeignAtom {
} }
impl ForeignAtom { impl ForeignAtom {
pub fn pos(&self) -> Pos { self.pos.clone() } pub fn pos(&self) -> Pos { self.pos.clone() }
pub fn ctx(&self) -> &SysCtx { &self.expr.ctx }
pub fn ex(self) -> Expr { pub fn ex(self) -> Expr {
let (handle, pos) = (self.expr.clone(), self.pos.clone()); let (handle, pos) = (self.expr.clone(), self.pos.clone());
let data = ExprData { pos, kind: ExprKind::Atom(ForeignAtom { ..self }) }; let data = ExprData { pos, kind: ExprKind::Atom(ForeignAtom { ..self }) };
Expr::new(handle, data) Expr::from_data(handle, data)
} }
pub(crate) fn new(handle: Rc<ExprHandle>, atom: api::Atom, pos: Pos) -> Self { pub(crate) fn new(handle: Rc<ExprHandle>, atom: api::Atom, pos: Pos) -> Self {
ForeignAtom { atom, expr: handle, pos } ForeignAtom { atom, expr: handle, pos }
} }
pub async fn request<M: AtomMethod>(&self, m: M) -> Option<M::Response> { pub async fn request<M: AtomMethod>(&self, m: M) -> Option<M::Response> {
let rep = (self.ctx().reqnot().request(api::Fwd( let rep = (ctx().reqnot().request(api::Fwd(
self.atom.clone(), self.atom.clone(),
Sym::parse(M::NAME, self.ctx().i()).await.unwrap().tok().to_api(), Sym::parse(M::NAME, &i()).await.unwrap().tok().to_api(),
enc_vec(&m).await, enc_vec(&m).await,
))) )))
.await?; .await?;
Some(M::Response::decode(Pin::new(&mut &rep[..])).await) Some(M::Response::decode(Pin::new(&mut &rep[..])).await)
} }
pub async fn downcast<T: AtomicFeatures>(self) -> Result<TypAtom<T>, NotTypAtom> { pub async fn downcast<T: AtomicFeatures>(self) -> Result<TAtom<T>, NotTypAtom> {
TypAtom::downcast(self.ex().handle()).await TAtom::downcast(self.ex().handle()).await
} }
} }
impl fmt::Display for ForeignAtom { impl fmt::Display for ForeignAtom {
@@ -121,40 +119,38 @@ impl fmt::Debug for ForeignAtom {
} }
impl Format for ForeignAtom { impl Format for ForeignAtom {
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
FmtUnit::from_api(&self.ctx().reqnot().request(api::ExtAtomPrint(self.atom.clone())).await) FmtUnit::from_api(&ctx().reqnot().request(api::ExtAtomPrint(self.atom.clone())).await)
} }
} }
impl ToExpr for ForeignAtom { impl ToExpr for ForeignAtom {
async fn to_expr(self) -> GExpr { self.ex().to_expr().await } async fn to_gen(self) -> GExpr { self.ex().to_gen().await }
} }
pub struct NotTypAtom { pub struct NotTypAtom {
pub pos: Pos, pub pos: Pos,
pub expr: Expr, pub expr: Expr,
pub typ: Box<dyn AtomDynfo>, pub typ: Box<dyn AtomDynfo>,
pub ctx: SysCtx,
} }
impl NotTypAtom { impl NotTypAtom {
pub async fn mk_err(&self) -> OrcErrv { pub async fn mk_err(&self) -> OrcErrv {
mk_errv( mk_errv(
self.ctx.i().i("Not the expected type").await, i().i("Not the expected type").await,
format!("This expression is not a {}", self.typ.name()), format!("The expression {} is not a {}", fmt(&self.expr, &i()).await, self.typ.name()),
[self.pos.clone()], [self.pos.clone()],
) )
} }
} }
pub trait AtomMethod: Request { pub trait AtomMethod: Request + Coding {
const NAME: &str; const NAME: &str;
} }
pub trait Supports<M: AtomMethod>: AtomCard { pub trait Supports<M: AtomMethod>: AtomCard {
fn handle(&self, ctx: SysCtx, req: M) -> impl Future<Output = <M as Request>::Response>; fn handle(&self, req: M) -> impl Future<Output = <M as Request>::Response>;
} }
trait_set! { trait_set! {
trait AtomReqCb<A> = for<'a> Fn( trait AtomReqCb<A> = for<'a> Fn(
&'a A, &'a A,
SysCtx,
Pin<&'a mut dyn AsyncRead>, Pin<&'a mut dyn AsyncRead>,
Pin<&'a mut dyn AsyncWrite>, Pin<&'a mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, ()> ) -> LocalBoxFuture<'a, ()>
@@ -171,24 +167,18 @@ impl<A: AtomCard> MethodSetBuilder<A> {
assert!(!M::NAME.is_empty(), "AtomMethod::NAME cannoot be empty"); assert!(!M::NAME.is_empty(), "AtomMethod::NAME cannoot be empty");
self.handlers.push(( self.handlers.push((
M::NAME, M::NAME,
Rc::new( Rc::new(move |a: &A, req: Pin<&mut dyn AsyncRead>, rep: Pin<&mut dyn AsyncWrite>| {
move |a: &A, ctx: SysCtx, req: Pin<&mut dyn AsyncRead>, rep: Pin<&mut dyn AsyncWrite>| { async { Supports::<M>::handle(a, M::decode(req).await).await.encode(rep).await }
async { Supports::<M>::handle(a, ctx, M::decode(req).await).await.encode(rep).await }
.boxed_local() .boxed_local()
}, }),
),
)); ));
self self
} }
pub async fn pack(&self, ctx: SysCtx) -> MethodSet<A> { pub async fn pack(&self) -> MethodSet<A> {
MethodSet { MethodSet {
handlers: stream::iter(self.handlers.iter()) handlers: stream::iter(self.handlers.iter())
.then(|(k, v)| { .then(async |(k, v)| (Sym::parse(k, &i()).await.unwrap(), v.clone()))
clone!(ctx; async move {
(Sym::parse(k, ctx.i()).await.unwrap(), v.clone())
})
})
.collect() .collect()
.await, .await,
} }
@@ -202,7 +192,6 @@ impl<A: AtomCard> MethodSet<A> {
pub(crate) async fn dispatch<'a>( pub(crate) async fn dispatch<'a>(
&'a self, &'a self,
atom: &'a A, atom: &'a A,
ctx: SysCtx,
key: Sym, key: Sym,
req: Pin<&'a mut dyn AsyncRead>, req: Pin<&'a mut dyn AsyncRead>,
rep: Pin<&'a mut dyn AsyncWrite>, rep: Pin<&'a mut dyn AsyncWrite>,
@@ -210,7 +199,7 @@ impl<A: AtomCard> MethodSet<A> {
match self.handlers.get(&key) { match self.handlers.get(&key) {
None => false, None => false,
Some(handler) => { Some(handler) => {
handler(atom, ctx, req, rep).await; handler(atom, req, rep).await;
true true
}, },
} }
@@ -222,38 +211,29 @@ impl<A: AtomCard> Default for MethodSetBuilder<A> {
} }
#[derive(Clone)] #[derive(Clone)]
pub struct TypAtom<A: AtomicFeatures> { pub struct TAtom<A: AtomicFeatures> {
pub untyped: ForeignAtom, pub untyped: ForeignAtom,
pub value: A::Data, pub value: A::Data,
} }
impl<A: AtomicFeatures> TypAtom<A> { impl<A: AtomicFeatures> TAtom<A> {
pub fn ctx(&self) -> &SysCtx { self.untyped.ctx() } pub fn ex(&self) -> Expr { self.untyped.clone().ex() }
pub fn i(&self) -> &Interner { self.ctx().i() } pub fn pos(&self) -> Pos { self.untyped.pos() }
pub async fn downcast(expr: Rc<ExprHandle>) -> Result<Self, NotTypAtom> { pub async fn downcast(expr: Rc<ExprHandle>) -> Result<Self, NotTypAtom> {
match Expr::from_handle(expr).atom().await { match Expr::from_handle(expr).atom().await {
Err(expr) => Err(NotTypAtom { Err(expr) =>
ctx: expr.handle().get_ctx(), Err(NotTypAtom { pos: expr.data().await.pos.clone(), expr, typ: Box::new(A::info()) }),
pos: expr.data().await.pos.clone(),
expr,
typ: Box::new(A::info()),
}),
Ok(atm) => match downcast_atom::<A>(atm).await { Ok(atm) => match downcast_atom::<A>(atm).await {
Ok(tatom) => Ok(tatom), Ok(tatom) => Ok(tatom),
Err(fa) => Err(NotTypAtom { Err(fa) => Err(NotTypAtom { pos: fa.pos.clone(), expr: fa.ex(), typ: Box::new(A::info()) }),
pos: fa.pos.clone(),
ctx: fa.ctx().clone(),
expr: fa.ex(),
typ: Box::new(A::info()),
}),
}, },
} }
} }
pub async fn request<M: AtomMethod>(&self, req: M) -> M::Response pub async fn request<M: AtomMethod>(&self, req: M) -> M::Response
where A: Supports<M> { where A: Supports<M> {
M::Response::decode(Pin::new( M::Response::decode(Pin::new(
&mut &(self.untyped.ctx().reqnot().request(api::Fwd( &mut &(ctx().reqnot().request(api::Fwd(
self.untyped.atom.clone(), self.untyped.atom.clone(),
Sym::parse(M::NAME, self.untyped.ctx().i()).await.unwrap().tok().to_api(), Sym::parse(M::NAME, &i()).await.unwrap().tok().to_api(),
enc_vec(&req).await, enc_vec(&req).await,
))) )))
.await .await
@@ -262,18 +242,20 @@ impl<A: AtomicFeatures> TypAtom<A> {
.await .await
} }
} }
impl<A: AtomicFeatures> Deref for TypAtom<A> { impl<A: AtomicFeatures> Deref for TAtom<A> {
type Target = A::Data; type Target = A::Data;
fn deref(&self) -> &Self::Target { &self.value } fn deref(&self) -> &Self::Target { &self.value }
} }
impl<A: AtomicFeatures> ToExpr for TypAtom<A> { impl<A: AtomicFeatures> ToExpr for TAtom<A> {
async fn to_expr(self) -> GExpr { self.untyped.to_expr().await } async fn to_gen(self) -> GExpr { self.untyped.to_gen().await }
}
impl<A: AtomicFeatures> Format for TAtom<A> {
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
self.untyped.print(c).await
}
} }
pub struct AtomCtx<'a>(pub &'a [u8], pub Option<api::AtomId>, pub SysCtx); pub struct AtomCtx<'a>(pub &'a [u8], pub Option<api::AtomId>);
impl FmtCtx for AtomCtx<'_> {
fn i(&self) -> &Interner { self.2.i() }
}
pub trait AtomDynfo: 'static { pub trait AtomDynfo: 'static {
fn tid(&self) -> TypeId; fn tid(&self) -> TypeId;
@@ -295,24 +277,19 @@ pub trait AtomDynfo: 'static {
ctx: AtomCtx<'a>, ctx: AtomCtx<'a>,
write: Pin<&'b mut dyn AsyncWrite>, write: Pin<&'b mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, Option<Vec<Expr>>>; ) -> LocalBoxFuture<'a, Option<Vec<Expr>>>;
fn deserialize<'a>( fn deserialize<'a>(&'a self, data: &'a [u8], refs: &'a [Expr]) -> LocalBoxFuture<'a, api::Atom>;
&'a self,
ctx: SysCtx,
data: &'a [u8],
refs: &'a [Expr],
) -> LocalBoxFuture<'a, api::Atom>;
fn drop<'a>(&'a self, ctx: AtomCtx<'a>) -> LocalBoxFuture<'a, ()>; fn drop<'a>(&'a self, ctx: AtomCtx<'a>) -> LocalBoxFuture<'a, ()>;
} }
trait_set! { trait_set! {
pub trait AtomFactoryFn = FnOnce(SysCtx) -> LocalBoxFuture<'static, api::Atom> + DynClone; pub trait AtomFactoryFn = FnOnce() -> LocalBoxFuture<'static, api::Atom> + DynClone;
} }
pub struct AtomFactory(Box<dyn AtomFactoryFn>); pub struct AtomFactory(Box<dyn AtomFactoryFn>);
impl AtomFactory { impl AtomFactory {
pub fn new(f: impl AsyncFnOnce(SysCtx) -> api::Atom + Clone + 'static) -> Self { pub fn new(f: impl AsyncFnOnce() -> api::Atom + Clone + 'static) -> Self {
Self(Box::new(|ctx| f(ctx).boxed_local())) Self(Box::new(|| f().boxed_local()))
} }
pub async fn build(self, ctx: SysCtx) -> api::Atom { (self.0)(ctx).await } pub async fn build(self) -> api::Atom { (self.0)().await }
} }
impl Clone for AtomFactory { impl Clone for AtomFactory {
fn clone(&self) -> Self { AtomFactory(clone_box(&*self.0)) } fn clone(&self) -> Self { AtomFactory(clone_box(&*self.0)) }
@@ -329,10 +306,10 @@ impl Format for AtomFactory {
} }
} }
pub async fn err_not_callable(i: &Interner) -> OrcErrv { pub async fn err_not_callable() -> OrcErrv {
mk_errv_floating(i.i("This atom is not callable").await, "Attempted to apply value as function") mk_errv_floating(i().i("This atom is not callable").await, "Attempted to apply value as function")
} }
pub async fn err_not_command(i: &Interner) -> OrcErrv { pub async fn err_not_command() -> OrcErrv {
mk_errv_floating(i.i("This atom is not a command").await, "Settled on an inactionable value") mk_errv_floating(i().i("This atom is not a command").await, "Settled on an inactionable value")
} }

View File

@@ -1,16 +1,17 @@
use std::any::{Any, TypeId, type_name}; use std::any::{Any, TypeId, type_name};
use std::borrow::Cow; use std::borrow::Cow;
use std::future::Future; use std::future::Future;
use std::marker::PhantomData;
use std::num::NonZero; use std::num::NonZero;
use std::ops::Deref; use std::ops::Deref;
use std::pin::Pin; use std::pin::Pin;
use std::sync::atomic::AtomicU64; use std::sync::atomic::AtomicU64;
use async_lock::{RwLock, RwLockReadGuard};
use async_once_cell::OnceCell; use async_once_cell::OnceCell;
use dyn_clone::{DynClone, clone_box}; use dyn_clone::{DynClone, clone_box};
use futures::future::{LocalBoxFuture, ready}; use futures::future::{LocalBoxFuture, ready};
use futures::{AsyncRead, AsyncWrite, FutureExt}; use futures::{AsyncRead, AsyncWrite, FutureExt};
use futures_locks::{RwLock, RwLockReadGuard};
use itertools::Itertools; use itertools::Itertools;
use memo_map::MemoMap; use memo_map::MemoMap;
use never::Never; use never::Never;
@@ -22,28 +23,28 @@ use orchid_base::name::Sym;
use crate::api; use crate::api;
use crate::atom::{ use crate::atom::{
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet, AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
MethodSetBuilder, TypAtom, err_not_callable, err_not_command, get_info, MethodSetBuilder, TAtom, err_not_callable, err_not_command, get_info,
}; };
use crate::context::{SysCtxEntry, ctx, i};
use crate::expr::Expr; use crate::expr::Expr;
use crate::gen_expr::{GExpr, bot}; use crate::gen_expr::{GExpr, bot};
use crate::system::{SysCtx, SysCtxEntry};
use crate::system_ctor::CtedObj; use crate::system_ctor::CtedObj;
pub struct OwnedVariant; pub struct OwnedVariant;
impl AtomicVariant for OwnedVariant {} impl AtomicVariant for OwnedVariant {}
impl<A: OwnedAtom + Atomic<Variant = OwnedVariant>> AtomicFeaturesImpl<OwnedVariant> for A { impl<A: OwnedAtom + Atomic<Variant = OwnedVariant>> AtomicFeaturesImpl<OwnedVariant> for A {
fn _factory(self) -> AtomFactory { fn _factory(self) -> AtomFactory {
AtomFactory::new(async move |ctx| { AtomFactory::new(async move || {
let serial = let serial = ctx()
ctx.get_or_default::<ObjStore>().next_id.fetch_add(1, std::sync::atomic::Ordering::Relaxed); .get_or_default::<ObjStore>()
.next_id
.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
let atom_id = api::AtomId(NonZero::new(serial + 1).unwrap()); let atom_id = api::AtomId(NonZero::new(serial + 1).unwrap());
let (typ_id, _) = get_info::<A>(ctx.get::<CtedObj>().inst().card()); let (typ_id, _) = get_info::<A>(ctx().get::<CtedObj>().inst().card());
let mut data = enc_vec(&typ_id).await; let mut data = enc_vec(&typ_id).await;
self.encode(Pin::<&mut Vec<u8>>::new(&mut data)).await; self.encode(Pin::<&mut Vec<u8>>::new(&mut data)).await;
let g = ctx.get_or_default::<ObjStore>().objects.read().await; ctx().get_or_default::<ObjStore>().objects.read().await.insert(atom_id, Box::new(self));
g.insert(atom_id, Box::new(self)); api::Atom { drop: Some(atom_id), data: api::AtomData(data), owner: ctx().sys_id() }
std::mem::drop(g);
api::Atom { drop: Some(atom_id), data: api::AtomData(data), owner: ctx.sys_id() }
}) })
} }
fn _info() -> Self::_Info { OwnedAtomDynfo { msbuild: A::reg_reqs(), ms: OnceCell::new() } } fn _info() -> Self::_Info { OwnedAtomDynfo { msbuild: A::reg_reqs(), ms: OnceCell::new() } }
@@ -53,16 +54,16 @@ impl<A: OwnedAtom + Atomic<Variant = OwnedVariant>> AtomicFeaturesImpl<OwnedVari
/// While an atom read guard is held, no atom can be removed. /// While an atom read guard is held, no atom can be removed.
pub(crate) struct AtomReadGuard<'a> { pub(crate) struct AtomReadGuard<'a> {
id: api::AtomId, id: api::AtomId,
guard: RwLockReadGuard<'a, MemoMap<api::AtomId, Box<dyn DynOwnedAtom>>>, _lock: PhantomData<&'a ()>,
guard: RwLockReadGuard<MemoMap<api::AtomId, Box<dyn DynOwnedAtom>>>,
} }
impl<'a> AtomReadGuard<'a> { impl<'a> AtomReadGuard<'a> {
async fn new(id: api::AtomId, ctx: &'a SysCtx) -> Self { async fn new(id: api::AtomId) -> Self {
let guard = ctx.get_or_default::<ObjStore>().objects.read().await; let guard = ctx().get_or_default::<ObjStore>().objects.read().await;
if guard.get(&id).is_none() { if guard.get(&id).is_none() {
let valid = guard.iter().map(|i| i.0).collect_vec(); panic!("Received invalid atom ID: {id:?}");
panic!("Received invalid atom ID: {id:?} not in {valid:?}");
} }
Self { id, guard } Self { id, guard, _lock: PhantomData }
} }
} }
impl Deref for AtomReadGuard<'_> { impl Deref for AtomReadGuard<'_> {
@@ -71,8 +72,8 @@ impl Deref for AtomReadGuard<'_> {
} }
/// Remove an atom from the store /// Remove an atom from the store
pub(crate) async fn take_atom(id: api::AtomId, ctx: &SysCtx) -> Box<dyn DynOwnedAtom> { pub(crate) async fn take_atom(id: api::AtomId) -> Box<dyn DynOwnedAtom> {
let mut g = ctx.get_or_default::<ObjStore>().objects.write().await; let mut g = ctx().get_or_default::<ObjStore>().objects.write().await;
g.remove(&id).unwrap_or_else(|| panic!("Received invalid atom ID: {}", id.0)) g.remove(&id).unwrap_or_else(|| panic!("Received invalid atom ID: {}", id.0))
} }
@@ -88,64 +89,53 @@ impl<T: OwnedAtom> AtomDynfo for OwnedAtomDynfo<T> {
Box::new(<T as AtomCard>::Data::decode(Pin::new(&mut &data[..])).await) as Box<dyn Any> Box::new(<T as AtomCard>::Data::decode(Pin::new(&mut &data[..])).await) as Box<dyn Any>
}) })
} }
fn call(&self, AtomCtx(_, id, ctx): AtomCtx, arg: Expr) -> LocalBoxFuture<'_, GExpr> { fn call(&self, AtomCtx(_, id): AtomCtx, arg: Expr) -> LocalBoxFuture<'_, GExpr> {
Box::pin(async move { take_atom(id.unwrap(), &ctx).await.dyn_call(arg).await }) Box::pin(async move { take_atom(id.unwrap()).await.dyn_call(arg).await })
} }
fn call_ref<'a>( fn call_ref<'a>(&'a self, AtomCtx(_, id): AtomCtx<'a>, arg: Expr) -> LocalBoxFuture<'a, GExpr> {
&'a self, Box::pin(async move { AtomReadGuard::new(id.unwrap()).await.dyn_call_ref(arg).await })
AtomCtx(_, id, ctx): AtomCtx<'a>,
arg: Expr,
) -> LocalBoxFuture<'a, GExpr> {
Box::pin(async move { AtomReadGuard::new(id.unwrap(), &ctx).await.dyn_call_ref(arg).await })
} }
fn print(&self, AtomCtx(_, id, ctx): AtomCtx<'_>) -> LocalBoxFuture<'_, FmtUnit> { fn print(&self, AtomCtx(_, id): AtomCtx<'_>) -> LocalBoxFuture<'_, FmtUnit> {
Box::pin( Box::pin(async move { AtomReadGuard::new(id.unwrap()).await.dyn_print().await })
async move { AtomReadGuard::new(id.unwrap(), &ctx).await.dyn_print(ctx.clone()).await },
)
} }
fn handle_req<'a, 'b: 'a, 'c: 'a>( fn handle_req<'a, 'b: 'a, 'c: 'a>(
&'a self, &'a self,
AtomCtx(_, id, ctx): AtomCtx, AtomCtx(_, id): AtomCtx,
key: Sym, key: Sym,
req: Pin<&'b mut dyn AsyncRead>, req: Pin<&'b mut dyn AsyncRead>,
rep: Pin<&'c mut dyn AsyncWrite>, rep: Pin<&'c mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, bool> { ) -> LocalBoxFuture<'a, bool> {
Box::pin(async move { Box::pin(async move {
let a = AtomReadGuard::new(id.unwrap(), &ctx).await; let a = AtomReadGuard::new(id.unwrap()).await;
let ms = self.ms.get_or_init(self.msbuild.pack(ctx.clone())).await; let ms = self.ms.get_or_init(self.msbuild.pack()).await;
ms.dispatch(a.as_any_ref().downcast_ref().unwrap(), ctx.clone(), key, req, rep).await ms.dispatch(a.as_any_ref().downcast_ref().unwrap(), key, req, rep).await
}) })
} }
fn command<'a>( fn command<'a>(
&'a self, &'a self,
AtomCtx(_, id, ctx): AtomCtx<'a>, AtomCtx(_, id): AtomCtx<'a>,
) -> LocalBoxFuture<'a, OrcRes<Option<GExpr>>> { ) -> LocalBoxFuture<'a, OrcRes<Option<GExpr>>> {
Box::pin(async move { take_atom(id.unwrap(), &ctx).await.dyn_command(ctx.clone()).await }) Box::pin(async move { take_atom(id.unwrap()).await.dyn_command().await })
} }
fn drop(&self, AtomCtx(_, id, ctx): AtomCtx) -> LocalBoxFuture<'_, ()> { fn drop(&self, AtomCtx(_, id): AtomCtx) -> LocalBoxFuture<'_, ()> {
Box::pin(async move { take_atom(id.unwrap(), &ctx).await.dyn_free(ctx.clone()).await }) Box::pin(async move { take_atom(id.unwrap()).await.dyn_free().await })
} }
fn serialize<'a, 'b: 'a>( fn serialize<'a, 'b: 'a>(
&'a self, &'a self,
AtomCtx(_, id, ctx): AtomCtx<'a>, AtomCtx(_, id): AtomCtx<'a>,
mut write: Pin<&'b mut dyn AsyncWrite>, mut write: Pin<&'b mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, Option<Vec<Expr>>> { ) -> LocalBoxFuture<'a, Option<Vec<Expr>>> {
Box::pin(async move { Box::pin(async move {
let id = id.unwrap(); let id = id.unwrap();
id.encode(write.as_mut()).await; id.encode(write.as_mut()).await;
AtomReadGuard::new(id, &ctx).await.dyn_serialize(ctx.clone(), write).await AtomReadGuard::new(id).await.dyn_serialize(write).await
}) })
} }
fn deserialize<'a>( fn deserialize<'a>(&'a self, data: &'a [u8], refs: &'a [Expr]) -> LocalBoxFuture<'a, api::Atom> {
&'a self,
ctx: SysCtx,
data: &'a [u8],
refs: &'a [Expr],
) -> LocalBoxFuture<'a, api::Atom> {
Box::pin(async move { Box::pin(async move {
let refs = T::Refs::from_iter(refs.iter().cloned()); let refs = T::Refs::from_iter(refs.iter().cloned());
let obj = T::deserialize(DeserCtxImpl(data, &ctx), refs).await; let obj = T::deserialize(DeserCtxImpl(data), refs).await;
obj._factory().build(ctx).await obj._factory().build().await
}) })
} }
} }
@@ -161,14 +151,12 @@ pub trait DeserializeCtx: Sized {
t t
} }
} }
fn sys(&self) -> SysCtx;
} }
struct DeserCtxImpl<'a>(&'a [u8], &'a SysCtx); struct DeserCtxImpl<'a>(&'a [u8]);
impl DeserializeCtx for DeserCtxImpl<'_> { impl DeserializeCtx for DeserCtxImpl<'_> {
async fn read<T: Decode>(&mut self) -> T { T::decode(Pin::new(&mut self.0)).await } async fn read<T: Decode>(&mut self) -> T { T::decode(Pin::new(&mut self.0)).await }
fn is_empty(&self) -> bool { self.0.is_empty() } fn is_empty(&self) -> bool { self.0.is_empty() }
fn sys(&self) -> SysCtx { self.1.clone() }
} }
pub trait RefSet { pub trait RefSet {
@@ -219,22 +207,21 @@ pub trait OwnedAtom: Atomic<Variant = OwnedVariant> + Any + Clone + 'static {
fn val(&self) -> impl Future<Output = Cow<'_, Self::Data>>; fn val(&self) -> impl Future<Output = Cow<'_, Self::Data>>;
#[allow(unused_variables)] #[allow(unused_variables)]
fn call_ref(&self, arg: Expr) -> impl Future<Output = GExpr> { fn call_ref(&self, arg: Expr) -> impl Future<Output = GExpr> {
async move { bot(err_not_callable(arg.ctx().i()).await) } async move { bot(err_not_callable().await) }
} }
fn call(self, arg: Expr) -> impl Future<Output = GExpr> { fn call(self, arg: Expr) -> impl Future<Output = GExpr> {
async { async {
let ctx = arg.ctx();
let gcl = self.call_ref(arg).await; let gcl = self.call_ref(arg).await;
self.free(ctx).await; self.free().await;
gcl gcl
} }
} }
#[allow(unused_variables)] #[allow(unused_variables)]
fn command(self, ctx: SysCtx) -> impl Future<Output = OrcRes<Option<GExpr>>> { fn command(self) -> impl Future<Output = OrcRes<Option<GExpr>>> {
async move { Err(err_not_command(ctx.i()).await) } async move { Err(err_not_command().await) }
} }
#[allow(unused_variables)] #[allow(unused_variables)]
fn free(self, ctx: SysCtx) -> impl Future<Output = ()> { async {} } fn free(self) -> impl Future<Output = ()> { async {} }
#[allow(unused_variables)] #[allow(unused_variables)]
fn print_atom<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> impl Future<Output = FmtUnit> { fn print_atom<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> impl Future<Output = FmtUnit> {
async { format!("OwnedAtom({})", type_name::<Self>()).into() } async { format!("OwnedAtom({})", type_name::<Self>()).into() }
@@ -242,14 +229,13 @@ pub trait OwnedAtom: Atomic<Variant = OwnedVariant> + Any + Clone + 'static {
#[allow(unused_variables)] #[allow(unused_variables)]
fn serialize( fn serialize(
&self, &self,
ctx: SysCtx,
write: Pin<&mut (impl AsyncWrite + ?Sized)>, write: Pin<&mut (impl AsyncWrite + ?Sized)>,
) -> impl Future<Output = Self::Refs> { ) -> impl Future<Output = Self::Refs> {
assert_serializable::<Self>(); assert_serializable::<Self>();
async { panic!("Either implement serialize or set Refs to Never for {}", type_name::<Self>()) } async { panic!("Either implement serialize or set Refs to Never for {}", type_name::<Self>()) }
} }
#[allow(unused_variables)] #[allow(unused_variables)]
fn deserialize(ctx: impl DeserializeCtx, refs: Self::Refs) -> impl Future<Output = Self> { fn deserialize(dctx: impl DeserializeCtx, refs: Self::Refs) -> impl Future<Output = Self> {
assert_serializable::<Self>(); assert_serializable::<Self>();
async { async {
panic!("Either implement deserialize or set Refs to Never for {}", type_name::<Self>()) panic!("Either implement deserialize or set Refs to Never for {}", type_name::<Self>())
@@ -268,12 +254,11 @@ pub trait DynOwnedAtom: DynClone + 'static {
fn encode<'a>(&'a self, buffer: Pin<&'a mut dyn AsyncWrite>) -> LocalBoxFuture<'a, ()>; fn encode<'a>(&'a self, buffer: Pin<&'a mut dyn AsyncWrite>) -> LocalBoxFuture<'a, ()>;
fn dyn_call_ref(&self, arg: Expr) -> LocalBoxFuture<'_, GExpr>; fn dyn_call_ref(&self, arg: Expr) -> LocalBoxFuture<'_, GExpr>;
fn dyn_call(self: Box<Self>, arg: Expr) -> LocalBoxFuture<'static, GExpr>; fn dyn_call(self: Box<Self>, arg: Expr) -> LocalBoxFuture<'static, GExpr>;
fn dyn_command(self: Box<Self>, ctx: SysCtx) -> LocalBoxFuture<'static, OrcRes<Option<GExpr>>>; fn dyn_command(self: Box<Self>) -> LocalBoxFuture<'static, OrcRes<Option<GExpr>>>;
fn dyn_free(self: Box<Self>, ctx: SysCtx) -> LocalBoxFuture<'static, ()>; fn dyn_free(self: Box<Self>) -> LocalBoxFuture<'static, ()>;
fn dyn_print(&self, ctx: SysCtx) -> LocalBoxFuture<'_, FmtUnit>; fn dyn_print(&self) -> LocalBoxFuture<'_, FmtUnit>;
fn dyn_serialize<'a>( fn dyn_serialize<'a>(
&'a self, &'a self,
ctx: SysCtx,
sink: Pin<&'a mut dyn AsyncWrite>, sink: Pin<&'a mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, Option<Vec<Expr>>>; ) -> LocalBoxFuture<'a, Option<Vec<Expr>>>;
} }
@@ -289,23 +274,20 @@ impl<T: OwnedAtom> DynOwnedAtom for T {
fn dyn_call(self: Box<Self>, arg: Expr) -> LocalBoxFuture<'static, GExpr> { fn dyn_call(self: Box<Self>, arg: Expr) -> LocalBoxFuture<'static, GExpr> {
self.call(arg).boxed_local() self.call(arg).boxed_local()
} }
fn dyn_command(self: Box<Self>, ctx: SysCtx) -> LocalBoxFuture<'static, OrcRes<Option<GExpr>>> { fn dyn_command(self: Box<Self>) -> LocalBoxFuture<'static, OrcRes<Option<GExpr>>> {
self.command(ctx).boxed_local() self.command().boxed_local()
} }
fn dyn_free(self: Box<Self>, ctx: SysCtx) -> LocalBoxFuture<'static, ()> { fn dyn_free(self: Box<Self>) -> LocalBoxFuture<'static, ()> { self.free().boxed_local() }
self.free(ctx).boxed_local() fn dyn_print(&self) -> LocalBoxFuture<'_, FmtUnit> {
} async move { self.print_atom(&FmtCtxImpl { i: &i() }).await }.boxed_local()
fn dyn_print(&self, ctx: SysCtx) -> LocalBoxFuture<'_, FmtUnit> {
async move { self.print_atom(&FmtCtxImpl { i: ctx.i() }).await }.boxed_local()
} }
fn dyn_serialize<'a>( fn dyn_serialize<'a>(
&'a self, &'a self,
ctx: SysCtx,
sink: Pin<&'a mut dyn AsyncWrite>, sink: Pin<&'a mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, Option<Vec<Expr>>> { ) -> LocalBoxFuture<'a, Option<Vec<Expr>>> {
match TypeId::of::<Never>() == TypeId::of::<<Self as OwnedAtom>::Refs>() { match TypeId::of::<Never>() == TypeId::of::<<Self as OwnedAtom>::Refs>() {
true => ready(None).boxed_local(), true => ready(None).boxed_local(),
false => async { Some(self.serialize(ctx, sink).await.to_vec()) }.boxed_local(), false => async { Some(self.serialize(sink).await.to_vec()) }.boxed_local(),
} }
} }
} }
@@ -317,16 +299,16 @@ pub(crate) struct ObjStore {
} }
impl SysCtxEntry for ObjStore {} impl SysCtxEntry for ObjStore {}
pub async fn own<A: OwnedAtom>(typ: TypAtom<A>) -> A { pub async fn own<A: OwnedAtom>(typ: &TAtom<A>) -> A {
let ctx = typ.untyped.ctx(); let g = ctx().get_or_default::<ObjStore>().objects.read().await;
let g = ctx.get_or_default::<ObjStore>().objects.read().await;
let atom_id = typ.untyped.atom.drop.expect("Owned atoms always have a drop ID"); let atom_id = typ.untyped.atom.drop.expect("Owned atoms always have a drop ID");
let dyn_atom = let dyn_atom =
g.get(&atom_id).expect("Atom ID invalid; atom type probably not owned by this crate"); g.get(&atom_id).expect("Atom ID invalid; atom type probably not owned by this crate");
dyn_atom.as_any_ref().downcast_ref().cloned().expect("The ID should imply a type as well") dyn_atom.as_any_ref().downcast_ref().cloned().expect("The ID should imply a type as well")
} }
pub async fn debug_print_obj_store(ctx: &SysCtx, show_atoms: bool) { pub async fn debug_print_obj_store(show_atoms: bool) {
let ctx = ctx();
let store = ctx.get_or_default::<ObjStore>(); let store = ctx.get_or_default::<ObjStore>();
let keys = store.objects.read().await.keys().cloned().collect_vec(); let keys = store.objects.read().await.keys().cloned().collect_vec();
let mut message = "Atoms in store:".to_string(); let mut message = "Atoms in store:".to_string();
@@ -341,7 +323,7 @@ pub async fn debug_print_obj_store(ctx: &SysCtx, show_atoms: bool) {
}; };
let atom = clone_box(&**atom); let atom = clone_box(&**atom);
std::mem::drop(g); std::mem::drop(g);
message += &format!("\n{k:?} -> {}", take_first(&atom.dyn_print(ctx.clone()).await, true)); message += &format!("\n{k:?} -> {}", take_first(&atom.dyn_print().await, true));
} }
} }
eprintln!("{message}") eprintln!("{message}")

View File

@@ -15,20 +15,20 @@ use crate::atom::{
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet, AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
MethodSetBuilder, err_not_callable, err_not_command, get_info, MethodSetBuilder, err_not_callable, err_not_command, get_info,
}; };
use crate::context::ctx;
use crate::expr::Expr; use crate::expr::Expr;
use crate::gen_expr::{GExpr, bot}; use crate::gen_expr::{GExpr, bot};
use crate::system::SysCtx;
use crate::system_ctor::CtedObj; use crate::system_ctor::CtedObj;
pub struct ThinVariant; pub struct ThinVariant;
impl AtomicVariant for ThinVariant {} impl AtomicVariant for ThinVariant {}
impl<A: ThinAtom + Atomic<Variant = ThinVariant>> AtomicFeaturesImpl<ThinVariant> for A { impl<A: ThinAtom + Atomic<Variant = ThinVariant>> AtomicFeaturesImpl<ThinVariant> for A {
fn _factory(self) -> AtomFactory { fn _factory(self) -> AtomFactory {
AtomFactory::new(async move |ctx| { AtomFactory::new(async move || {
let (id, _) = get_info::<A>(ctx.get::<CtedObj>().inst().card()); let (id, _) = get_info::<A>(ctx().get::<CtedObj>().inst().card());
let mut buf = enc_vec(&id).await; let mut buf = enc_vec(&id).await;
self.encode(Pin::new(&mut buf)).await; self.encode(Pin::new(&mut buf)).await;
api::Atom { drop: None, data: api::AtomData(buf), owner: ctx.sys_id() } api::Atom { drop: None, data: api::AtomData(buf), owner: ctx().sys_id() }
}) })
} }
fn _info() -> Self::_Info { ThinAtomDynfo { msbuild: Self::reg_reqs(), ms: OnceCell::new() } } fn _info() -> Self::_Info { ThinAtomDynfo { msbuild: Self::reg_reqs(), ms: OnceCell::new() } }
@@ -40,8 +40,8 @@ pub struct ThinAtomDynfo<T: ThinAtom> {
ms: OnceCell<MethodSet<T>>, ms: OnceCell<MethodSet<T>>,
} }
impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> { impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
fn print<'a>(&self, AtomCtx(buf, _, ctx): AtomCtx<'a>) -> LocalBoxFuture<'a, FmtUnit> { fn print<'a>(&self, AtomCtx(buf, _): AtomCtx<'a>) -> LocalBoxFuture<'a, FmtUnit> {
Box::pin(async move { T::decode(Pin::new(&mut &buf[..])).await.print(ctx).await }) Box::pin(async move { T::decode(Pin::new(&mut &buf[..])).await.print().await })
} }
fn tid(&self) -> TypeId { TypeId::of::<T>() } fn tid(&self) -> TypeId { TypeId::of::<T>() }
fn name(&self) -> &'static str { type_name::<T>() } fn name(&self) -> &'static str { type_name::<T>() }
@@ -56,21 +56,21 @@ impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
} }
fn handle_req<'a, 'm1: 'a, 'm2: 'a>( fn handle_req<'a, 'm1: 'a, 'm2: 'a>(
&'a self, &'a self,
AtomCtx(buf, _, sys): AtomCtx<'a>, AtomCtx(buf, _): AtomCtx<'a>,
key: Sym, key: Sym,
req: Pin<&'m1 mut dyn AsyncRead>, req: Pin<&'m1 mut dyn AsyncRead>,
rep: Pin<&'m2 mut dyn AsyncWrite>, rep: Pin<&'m2 mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, bool> { ) -> LocalBoxFuture<'a, bool> {
Box::pin(async move { Box::pin(async move {
let ms = self.ms.get_or_init(self.msbuild.pack(sys.clone())).await; let ms = self.ms.get_or_init(self.msbuild.pack()).await;
ms.dispatch(&T::decode(Pin::new(&mut &buf[..])).await, sys, key, req, rep).await ms.dispatch(&T::decode(Pin::new(&mut &buf[..])).await, key, req, rep).await
}) })
} }
fn command<'a>( fn command<'a>(
&'a self, &'a self,
AtomCtx(buf, _, ctx): AtomCtx<'a>, AtomCtx(buf, _): AtomCtx<'a>,
) -> LocalBoxFuture<'a, OrcRes<Option<GExpr>>> { ) -> LocalBoxFuture<'a, OrcRes<Option<GExpr>>> {
async move { T::decode(Pin::new(&mut &buf[..])).await.command(ctx).await }.boxed_local() async move { T::decode(Pin::new(&mut &buf[..])).await.command().await }.boxed_local()
} }
fn serialize<'a, 'b: 'a>( fn serialize<'a, 'b: 'a>(
&'a self, &'a self,
@@ -82,19 +82,14 @@ impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
Some(Vec::new()) Some(Vec::new())
}) })
} }
fn deserialize<'a>( fn deserialize<'a>(&'a self, data: &'a [u8], refs: &'a [Expr]) -> LocalBoxFuture<'a, api::Atom> {
&'a self,
ctx: SysCtx,
data: &'a [u8],
refs: &'a [Expr],
) -> LocalBoxFuture<'a, api::Atom> {
assert!(refs.is_empty(), "Refs found when deserializing thin atom"); assert!(refs.is_empty(), "Refs found when deserializing thin atom");
Box::pin(async { T::decode(Pin::new(&mut &data[..])).await._factory().build(ctx).await }) Box::pin(async { T::decode(Pin::new(&mut &data[..])).await._factory().build().await })
} }
fn drop<'a>(&'a self, AtomCtx(buf, _, ctx): AtomCtx<'a>) -> LocalBoxFuture<'a, ()> { fn drop<'a>(&'a self, AtomCtx(buf, _): AtomCtx<'a>) -> LocalBoxFuture<'a, ()> {
Box::pin(async move { Box::pin(async move {
let string_self = T::decode(Pin::new(&mut &buf[..])).await.print(ctx.clone()).await; let string_self = T::decode(Pin::new(&mut &buf[..])).await.print().await;
writeln!(ctx.logger(), "Received drop signal for non-drop atom {string_self:?}"); writeln!(ctx().logger(), "Received drop signal for non-drop atom {string_self:?}");
}) })
} }
} }
@@ -104,14 +99,14 @@ pub trait ThinAtom:
{ {
#[allow(unused_variables)] #[allow(unused_variables)]
fn call(&self, arg: Expr) -> impl Future<Output = GExpr> { fn call(&self, arg: Expr) -> impl Future<Output = GExpr> {
async move { bot(err_not_callable(arg.ctx().i()).await) } async move { bot(err_not_callable().await) }
} }
#[allow(unused_variables)] #[allow(unused_variables)]
fn command(&self, ctx: SysCtx) -> impl Future<Output = OrcRes<Option<GExpr>>> { fn command(&self) -> impl Future<Output = OrcRes<Option<GExpr>>> {
async move { Err(err_not_command(ctx.i()).await) } async move { Err(err_not_command().await) }
} }
#[allow(unused_variables)] #[allow(unused_variables)]
fn print(&self, ctx: SysCtx) -> impl Future<Output = FmtUnit> { fn print(&self) -> impl Future<Output = FmtUnit> {
async { format!("ThinAtom({})", type_name::<Self>()).into() } async { format!("ThinAtom({})", type_name::<Self>()).into() }
} }
} }

View File

@@ -0,0 +1,90 @@
use std::any::{Any, TypeId, type_name};
use std::fmt;
use std::num::NonZero;
use std::rc::Rc;
use memo_map::MemoMap;
use orchid_base::builtin::Spawner;
use orchid_base::interner::Interner;
use orchid_base::logging::Logger;
use orchid_base::reqnot::ReqNot;
use task_local::task_local;
use crate::api;
use crate::system_ctor::CtedObj;
#[derive(Clone)]
pub struct SysCtx(Rc<MemoMap<TypeId, Box<dyn Any>>>);
impl SysCtx {
pub fn new(
id: api::SysId,
i: Interner,
reqnot: ReqNot<api::ExtMsgSet>,
spawner: Spawner,
logger: Logger,
cted: CtedObj,
) -> Self {
let this = Self(Rc::new(MemoMap::new()));
this.add(id).add(i).add(reqnot).add(spawner).add(logger).add(cted);
this
}
pub fn add<T: SysCtxEntry>(&self, t: T) -> &Self {
assert!(self.0.insert(TypeId::of::<T>(), Box::new(t)), "Key already exists");
self
}
pub fn get_or_insert<T: SysCtxEntry>(&self, f: impl FnOnce() -> T) -> &T {
(self.0.get_or_insert_owned(TypeId::of::<T>(), || Box::new(f())).downcast_ref())
.expect("Keyed by TypeId")
}
pub fn get_or_default<T: SysCtxEntry + Default>(&self) -> &T { self.get_or_insert(T::default) }
pub fn try_get<T: SysCtxEntry>(&self) -> Option<&T> {
Some(self.0.get(&TypeId::of::<T>())?.downcast_ref().expect("Keyed by TypeId"))
}
pub fn get<T: SysCtxEntry>(&self) -> &T {
self.try_get().unwrap_or_else(|| panic!("Context {} missing", type_name::<T>()))
}
/// Shorthand to get the messaging link
pub fn reqnot(&self) -> &ReqNot<api::ExtMsgSet> { self.get::<ReqNot<api::ExtMsgSet>>() }
/// Shorthand to get the system ID
pub fn sys_id(&self) -> api::SysId { *self.get::<api::SysId>() }
/// Spawn a task that will eventually be executed asynchronously
pub fn spawn(&self, f: impl Future<Output = ()> + 'static) {
(self.get::<Spawner>())(Box::pin(CTX.scope(self.clone(), f)))
}
/// Shorthand to get the logger
pub fn logger(&self) -> &Logger { self.get::<Logger>() }
/// Shorthand to get the constructed system object
pub fn cted(&self) -> &CtedObj { self.get::<CtedObj>() }
}
impl fmt::Debug for SysCtx {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "SysCtx({:?})", self.sys_id())
}
}
pub trait SysCtxEntry: 'static + Sized {}
impl SysCtxEntry for api::SysId {}
impl SysCtxEntry for ReqNot<api::ExtMsgSet> {}
impl SysCtxEntry for Spawner {}
impl SysCtxEntry for CtedObj {}
impl SysCtxEntry for Logger {}
impl SysCtxEntry for Interner {}
task_local! {
static CTX: SysCtx;
}
pub async fn with_ctx<F: Future>(ctx: SysCtx, f: F) -> F::Output { CTX.scope(ctx, f).await }
pub fn ctx() -> SysCtx { CTX.get() }
/// Shorthand to get the [Interner] instance
pub fn i() -> Interner { ctx().get::<Interner>().clone() }
pub fn mock_ctx() -> SysCtx {
let ctx = SysCtx(Rc::default());
ctx
.add(Logger::new(api::LogStrategy::StdErr))
.add(Interner::new_master())
.add::<Spawner>(Rc::new(|_| panic!("Cannot fork in test environment")))
.add(api::SysId(NonZero::<u16>::MIN));
ctx
}

View File

@@ -1,14 +1,16 @@
use std::future::Future; use std::future::Future;
use std::pin::Pin;
use dyn_clone::DynClone;
use never::Never; use never::Never;
use orchid_base::error::{OrcErrv, OrcRes, mk_errv}; use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
use orchid_base::interner::Interner;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use trait_set::trait_set;
use crate::atom::{AtomicFeatures, ForeignAtom, ToAtom, TypAtom}; use crate::atom::{AtomicFeatures, ForeignAtom, TAtom, ToAtom};
use crate::context::i;
use crate::expr::Expr; use crate::expr::Expr;
use crate::gen_expr::{GExpr, atom, bot}; use crate::gen_expr::{GExpr, atom, bot};
use crate::system::{SysCtx, downcast_atom};
pub trait TryFromExpr: Sized { pub trait TryFromExpr: Sized {
fn try_from_expr(expr: Expr) -> impl Future<Output = OrcRes<Self>>; fn try_from_expr(expr: Expr) -> impl Future<Output = OrcRes<Self>>;
@@ -24,61 +26,91 @@ impl<T: TryFromExpr, U: TryFromExpr> TryFromExpr for (T, U) {
} }
} }
async fn err_not_atom(pos: Pos, i: &Interner) -> OrcErrv { async fn err_not_atom(pos: Pos) -> OrcErrv {
mk_errv(i.i("Expected an atom").await, "This expression is not an atom", [pos]) mk_errv(i().i("Expected an atom").await, "This expression is not an atom", [pos])
}
async fn err_type(pos: Pos, i: &Interner) -> OrcErrv {
mk_errv(i.i("Type error").await, "The atom is a different type than expected", [pos])
} }
impl TryFromExpr for ForeignAtom { impl TryFromExpr for ForeignAtom {
async fn try_from_expr(expr: Expr) -> OrcRes<Self> { async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
match expr.atom().await { match expr.atom().await {
Err(ex) => Err(err_not_atom(ex.data().await.pos.clone(), ex.ctx().i()).await), Err(ex) => Err(err_not_atom(ex.data().await.pos.clone()).await),
Ok(f) => Ok(f), Ok(f) => Ok(f),
} }
} }
} }
impl<A: AtomicFeatures> TryFromExpr for TypAtom<A> { impl<A: AtomicFeatures> TryFromExpr for TAtom<A> {
async fn try_from_expr(expr: Expr) -> OrcRes<Self> { async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
let f = ForeignAtom::try_from_expr(expr).await?; let f = ForeignAtom::try_from_expr(expr).await?;
match downcast_atom::<A>(f).await { match f.clone().downcast::<A>().await {
Ok(a) => Ok(a), Ok(a) => Ok(a),
Err(f) => Err(err_type(f.pos(), f.ctx().i()).await), Err(e) => Err(e.mk_err().await),
} }
} }
} }
impl TryFromExpr for SysCtx {
async fn try_from_expr(expr: Expr) -> OrcRes<Self> { Ok(expr.ctx()) }
}
pub trait ToExpr { pub trait ToExpr {
fn to_expr(self) -> impl Future<Output = GExpr>; fn to_gen(self) -> impl Future<Output = GExpr>;
fn to_expr(self) -> impl Future<Output = Expr>
where Self: Sized {
async { self.to_gen().await.create().await }
}
}
pub trait ToExprDyn {
fn to_gen_dyn<'a>(self: Box<Self>) -> Pin<Box<dyn Future<Output = GExpr> + 'a>>
where Self: 'a;
fn to_expr_dyn<'a>(self: Box<Self>) -> Pin<Box<dyn Future<Output = Expr> + 'a>>
where Self: 'a;
}
impl<T: ToExpr> ToExprDyn for T {
fn to_gen_dyn<'a>(self: Box<Self>) -> Pin<Box<dyn Future<Output = GExpr> + 'a>>
where Self: 'a {
Box::pin(self.to_gen())
}
fn to_expr_dyn<'a>(self: Box<Self>) -> Pin<Box<dyn Future<Output = Expr> + 'a>>
where Self: 'a {
Box::pin(self.to_expr())
}
}
trait_set! {
pub trait ClonableToExprDyn = ToExprDyn + DynClone;
}
impl ToExpr for Box<dyn ToExprDyn> {
async fn to_gen(self) -> GExpr { self.to_gen_dyn().await }
async fn to_expr(self) -> Expr { self.to_expr_dyn().await }
}
impl ToExpr for Box<dyn ClonableToExprDyn> {
async fn to_gen(self) -> GExpr { self.to_gen_dyn().await }
async fn to_expr(self) -> Expr { self.to_expr_dyn().await }
}
impl Clone for Box<dyn ClonableToExprDyn> {
fn clone(&self) -> Self { dyn_clone::clone_box(&**self) }
} }
impl ToExpr for GExpr { impl ToExpr for GExpr {
async fn to_expr(self) -> GExpr { self } async fn to_gen(self) -> GExpr { self }
async fn to_expr(self) -> Expr { self.create().await }
} }
impl ToExpr for Expr { impl ToExpr for Expr {
async fn to_expr(self) -> GExpr { self.slot() } async fn to_gen(self) -> GExpr { self.slot() }
async fn to_expr(self) -> Expr { self }
} }
impl<T: ToExpr> ToExpr for OrcRes<T> { impl<T: ToExpr> ToExpr for OrcRes<T> {
async fn to_expr(self) -> GExpr { async fn to_gen(self) -> GExpr {
match self { match self {
Err(e) => bot(e), Err(e) => bot(e),
Ok(t) => t.to_expr().await, Ok(t) => t.to_gen().await,
} }
} }
} }
impl<A: ToAtom> ToExpr for A { impl<A: ToAtom> ToExpr for A {
async fn to_expr(self) -> GExpr { atom(self) } async fn to_gen(self) -> GExpr { atom(self) }
} }
impl ToExpr for Never { impl ToExpr for Never {
async fn to_expr(self) -> GExpr { match self {} } async fn to_gen(self) -> GExpr { match self {} }
} }

View File

@@ -8,7 +8,6 @@ use futures::stream::{self, LocalBoxStream};
use futures::{FutureExt, SinkExt, StreamExt}; use futures::{FutureExt, SinkExt, StreamExt};
use never::Never; use never::Never;
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use orchid_base::format::{FmtCtx, FmtUnit};
use crate::atom::Atomic; use crate::atom::Atomic;
use crate::atom_owned::{OwnedAtom, OwnedVariant}; use crate::atom_owned::{OwnedAtom, OwnedVariant};
@@ -23,7 +22,6 @@ enum Command {
} }
struct BuilderCoroutineData { struct BuilderCoroutineData {
name: Option<String>,
receiver: Mutex<LocalBoxStream<'static, Command>>, receiver: Mutex<LocalBoxStream<'static, Command>>,
} }
@@ -35,15 +33,15 @@ impl BuilderCoroutine {
match cmd { match cmd {
None => panic!("Before the stream ends, we should have gotten a Halt"), None => panic!("Before the stream ends, we should have gotten a Halt"),
Some(Command::Halt(expr)) => expr, Some(Command::Halt(expr)) => expr,
Some(Command::Execute(expr, reply)) => call([ Some(Command::Execute(expr, reply)) => call(
lambda(0, [seq([ lambda(0, [seq(
arg(0), [arg(0)],
call([Replier { reply, builder: self }.to_expr().await, arg(0)]), call(Replier { reply, builder: self }.to_gen().await, [arg(0)]),
])]), )]),
expr, [expr],
]), ),
Some(Command::Register(expr, reply)) => Some(Command::Register(expr, reply)) =>
call([Replier { reply, builder: self }.to_expr().await, expr]), call(Replier { reply, builder: self }.to_gen().await, [expr]),
} }
} }
} }
@@ -65,23 +63,13 @@ impl OwnedAtom for Replier {
std::mem::drop(self.reply); std::mem::drop(self.reply);
self.builder.run().await self.builder.run().await
} }
async fn print_atom<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
match &self.builder.0.name {
None => "BuilderCoroutine".into(),
Some(name) => format!("BuilderCoroutine({name})").into(),
}
}
} }
pub async fn exec<R: ToExpr>( pub async fn exec<R: ToExpr>(f: impl for<'a> AsyncFnOnce(ExecHandle<'a>) -> R + 'static) -> GExpr {
debug: impl AsRef<str>,
f: impl for<'a> AsyncFnOnce(ExecHandle<'a>) -> R + 'static,
) -> GExpr {
let (cmd_snd, cmd_recv) = channel(0); let (cmd_snd, cmd_recv) = channel(0);
let halt = async { Command::Halt(f(ExecHandle(cmd_snd, PhantomData)).await.to_expr().await) } let halt = async { Command::Halt(f(ExecHandle(cmd_snd, PhantomData)).await.to_gen().await) }
.into_stream(); .into_stream();
let coro = BuilderCoroutine(Rc::new(BuilderCoroutineData { let coro = BuilderCoroutine(Rc::new(BuilderCoroutineData {
name: Some(debug.as_ref().to_string()),
receiver: Mutex::new(stream::select(halt, cmd_recv).boxed_local()), receiver: Mutex::new(stream::select(halt, cmd_recv).boxed_local()),
})); }));
coro.run().await coro.run().await
@@ -93,12 +81,12 @@ pub struct ExecHandle<'a>(Sender<Command>, PhantomData<&'a ()>);
impl ExecHandle<'_> { impl ExecHandle<'_> {
pub async fn exec<T: TryFromExpr>(&mut self, val: impl ToExpr) -> OrcRes<T> { pub async fn exec<T: TryFromExpr>(&mut self, val: impl ToExpr) -> OrcRes<T> {
let (reply_snd, mut reply_recv) = channel(1); let (reply_snd, mut reply_recv) = channel(1);
self.0.send(Command::Execute(val.to_expr().await, reply_snd)).await.expect(WEIRD_DROP_ERR); self.0.send(Command::Execute(val.to_gen().await, reply_snd)).await.expect(WEIRD_DROP_ERR);
T::try_from_expr(reply_recv.next().await.expect(WEIRD_DROP_ERR)).await T::try_from_expr(reply_recv.next().await.expect(WEIRD_DROP_ERR)).await
} }
pub async fn register(&mut self, val: impl ToExpr) -> Expr { pub async fn register(&mut self, val: impl ToExpr) -> Expr {
let (reply_snd, mut reply_recv) = channel(1); let (reply_snd, mut reply_recv) = channel(1);
self.0.send(Command::Register(val.to_expr().await, reply_snd)).await.expect(WEIRD_DROP_ERR); self.0.send(Command::Register(val.to_gen().await, reply_snd)).await.expect(WEIRD_DROP_ERR);
reply_recv.next().await.expect(WEIRD_DROP_ERR) reply_recv.next().await.expect(WEIRD_DROP_ERR)
} }
} }

View File

@@ -5,11 +5,11 @@ use std::num::NonZero;
use std::pin::Pin; use std::pin::Pin;
use std::rc::Rc; use std::rc::Rc;
use async_lock::RwLock;
use futures::channel::mpsc::{Receiver, Sender, channel}; use futures::channel::mpsc::{Receiver, Sender, channel};
use futures::future::{LocalBoxFuture, join_all}; use futures::future::{LocalBoxFuture, join_all};
use futures::lock::Mutex; use futures::lock::Mutex;
use futures::{FutureExt, SinkExt, StreamExt, stream, stream_select}; use futures::{FutureExt, SinkExt, StreamExt, stream, stream_select};
use futures_locks::RwLock;
use hashbrown::HashMap; use hashbrown::HashMap;
use itertools::Itertools; use itertools::Itertools;
use orchid_api_traits::{Decode, UnderRoot, enc_vec}; use orchid_api_traits::{Decode, UnderRoot, enc_vec};
@@ -29,10 +29,11 @@ use trait_set::trait_set;
use crate::api; use crate::api;
use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId}; use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId};
use crate::atom_owned::take_atom; use crate::atom_owned::take_atom;
use crate::context::{SysCtx, ctx, i, with_ctx};
use crate::expr::{BorrowedExprStore, Expr, ExprHandle}; use crate::expr::{BorrowedExprStore, Expr, ExprHandle};
use crate::lexer::{LexContext, ekey_cascade, ekey_not_applicable}; use crate::lexer::{LexContext, ekey_cascade, ekey_not_applicable};
use crate::parser::{PTokTree, ParsCtx, get_const, linev_into_api}; use crate::parser::{PTokTree, ParsCtx, get_const, linev_into_api};
use crate::system::{SysCtx, atom_by_idx}; use crate::system::atom_by_idx;
use crate::system_ctor::{CtedObj, DynSystemCtor}; use crate::system_ctor::{CtedObj, DynSystemCtor};
use crate::tree::{LazyMemberFactory, TreeIntoApiCtxImpl}; use crate::tree::{LazyMemberFactory, TreeIntoApiCtxImpl};
@@ -62,7 +63,6 @@ pub struct SystemRecord {
trait_set! { trait_set! {
pub trait WithAtomRecordCallback<'a, T> = AsyncFnOnce( pub trait WithAtomRecordCallback<'a, T> = AsyncFnOnce(
Box<dyn AtomDynfo>, Box<dyn AtomDynfo>,
SysCtx,
AtomTypeId, AtomTypeId,
&'a [u8] &'a [u8]
) -> T ) -> T
@@ -78,7 +78,7 @@ pub async fn with_atom_record<'a, F: Future<Output = SysCtx>, T>(
let inst = ctx.get::<CtedObj>().inst(); let inst = ctx.get::<CtedObj>().inst();
let id = AtomTypeId::decode(Pin::new(&mut data)).await; let id = AtomTypeId::decode(Pin::new(&mut data)).await;
let atom_record = atom_by_idx(inst.card(), id.clone()).expect("Atom ID reserved"); let atom_record = atom_by_idx(inst.card(), id.clone()).expect("Atom ID reserved");
cb(atom_record, ctx, id, data).await with_ctx(ctx, async move { cb(atom_record, id, data).await }).await
} }
pub struct ExtensionOwner { pub struct ExtensionOwner {
@@ -145,10 +145,7 @@ pub fn extension_init(
clone!(exit_send mut); clone!(exit_send mut);
async move { async move {
match n { match n {
api::HostExtNotif::Exit => { api::HostExtNotif::Exit => exit_send.send(()).await.unwrap(),
eprintln!("Exit received");
exit_send.send(()).await.unwrap()
},
} }
} }
.boxed_local() .boxed_local()
@@ -160,7 +157,8 @@ pub fn extension_init(
clone!(logger, get_ctx, init_ctx, systems_weak, interner_weak, decls, msg_logger); clone!(logger, get_ctx, init_ctx, systems_weak, interner_weak, decls, msg_logger);
async move { async move {
let interner_cell = interner_weak.upgrade().expect("Interner dropped before request"); let interner_cell = interner_weak.upgrade().expect("Interner dropped before request");
let i = interner_cell.borrow().clone().expect("Request arrived before interner set"); let interner =
interner_cell.borrow().clone().expect("Request arrived before interner set");
if !matches!(req, api::HostExtReq::AtomReq(api::AtomReq::AtomPrint(_))) { if !matches!(req, api::HostExtReq::AtomReq(api::AtomReq::AtomPrint(_))) {
writeln!(msg_logger, "{} extension received request {req:?}", data.name); writeln!(msg_logger, "{} extension received request {req:?}", data.name);
} }
@@ -172,54 +170,58 @@ pub fn extension_init(
} }
hand.handle(&sys_drop, &()).await hand.handle(&sys_drop, &()).await
}, },
api::HostExtReq::AtomDrop(atom_drop @ api::AtomDrop(sys_id, atom)) => { api::HostExtReq::AtomDrop(atom_drop @ api::AtomDrop(sys_id, atom)) =>
let ctx = get_ctx(sys_id).await; with_ctx(get_ctx(sys_id).await, async move {
take_atom(atom, &ctx).await.dyn_free(ctx.clone()).await; take_atom(atom).await.dyn_free().await;
hand.handle(&atom_drop, &()).await hand.handle(&atom_drop, &()).await
}, })
.await,
api::HostExtReq::Ping(ping @ api::Ping) => hand.handle(&ping, &()).await, api::HostExtReq::Ping(ping @ api::Ping) => hand.handle(&ping, &()).await,
api::HostExtReq::Sweep(sweep @ api::Sweep) => api::HostExtReq::Sweep(sweep @ api::Sweep) =>
hand.handle(&sweep, &i.sweep_replica().await).await, hand.handle(&sweep, &interner.sweep_replica().await).await,
api::HostExtReq::SysReq(api::SysReq::NewSystem(new_sys)) => { api::HostExtReq::SysReq(api::SysReq::NewSystem(new_sys)) => {
let (sys_id, _) = (decls.iter().enumerate().find(|(_, s)| s.id == new_sys.system)) let (sys_id, _) = (decls.iter().enumerate().find(|(_, s)| s.id == new_sys.system))
.expect("NewSystem call received for invalid system"); .expect("NewSystem call received for invalid system");
let cted = data.systems[sys_id].new_system(&new_sys); let cted = data.systems[sys_id].new_system(&new_sys);
with_ctx(init_ctx(new_sys.id, cted.clone(), hand.reqnot()).await, async move {
let lex_filter = let lex_filter =
cted.inst().dyn_lexers().iter().fold(api::CharFilter(vec![]), |cf, lx| { cted.inst().dyn_lexers().iter().fold(api::CharFilter(vec![]), |cf, lx| {
char_filter_union(&cf, &mk_char_filter(lx.char_filter().iter().cloned())) char_filter_union(&cf, &mk_char_filter(lx.char_filter().iter().cloned()))
}); });
let lazy_members = Mutex::new(HashMap::new()); let lazy_members = Mutex::new(HashMap::new());
let ctx = init_ctx(new_sys.id, cted.clone(), hand.reqnot()).await; let const_root = stream::iter(cted.inst().dyn_env().await)
let const_root = stream::iter(cted.inst().dyn_env())
.then(|mem| { .then(|mem| {
let lazy_mems = &lazy_members; let lazy_mems = &lazy_members;
clone!(i, ctx; async move { async move {
let name = i().i(&mem.name).await;
let mut tia_ctx = TreeIntoApiCtxImpl { let mut tia_ctx = TreeIntoApiCtxImpl {
lazy_members: &mut *lazy_mems.lock().await, lazy_members: &mut *lazy_mems.lock().await,
sys: ctx,
basepath: &[], basepath: &[],
path: Substack::Bottom, path: Substack::Bottom.push(name.clone()),
}; };
(i.i(&mem.name).await.to_api(), mem.kind.into_api(&mut tia_ctx).await) (name.to_api(), mem.kind.into_api(&mut tia_ctx).await)
}) }
}) })
.collect() .collect()
.await; .await;
let prelude = let prelude =
cted.inst().dyn_prelude(&i).await.iter().map(|sym| sym.to_api()).collect(); cted.inst().dyn_prelude().await.iter().map(|sym| sym.to_api()).collect();
let record = SystemRecord { ctx, lazy_members }; let record = SystemRecord { ctx: ctx(), lazy_members };
let systems = systems_weak.upgrade().expect("System constructed during shutdown"); let systems = systems_weak.upgrade().expect("System constructed during shutdown");
systems.write().await.insert(new_sys.id, record); systems.write().await.insert(new_sys.id, record);
let line_types = join_all( let line_types = join_all(
(cted.inst().dyn_parsers().iter()) (cted.inst().dyn_parsers().iter())
.map(|p| async { i.i(p.line_head()).await.to_api() }), .map(|p| async { interner.i(p.line_head()).await.to_api() }),
) )
.await; .await;
let response = api::NewSystemResponse { lex_filter, const_root, line_types, prelude }; let response =
api::NewSystemResponse { lex_filter, const_root, line_types, prelude };
hand.handle(&new_sys, &response).await hand.handle(&new_sys, &response).await
})
.await
}, },
api::HostExtReq::GetMember(get_tree @ api::GetMember(sys_id, tree_id)) => { api::HostExtReq::GetMember(get_tree @ api::GetMember(sys_id, tree_id)) =>
let sys_ctx = get_ctx(sys_id).await; with_ctx(get_ctx(sys_id).await, async move {
let systems = systems_weak.upgrade().expect("Member queried during shutdown"); let systems = systems_weak.upgrade().expect("Member queried during shutdown");
let systems_g = systems.read().await; let systems_g = systems.read().await;
let mut lazy_members = let mut lazy_members =
@@ -229,41 +231,37 @@ pub fn extension_init(
Some(MemberRecord::Res) => panic!("This tree has already been transmitted"), Some(MemberRecord::Res) => panic!("This tree has already been transmitted"),
Some(MemberRecord::Gen(path, cb)) => (path, cb), Some(MemberRecord::Gen(path, cb)) => (path, cb),
}; };
let tree = cb.build(Sym::new(path.clone(), &i).await.unwrap(), sys_ctx.clone()).await; let tree = cb.build(Sym::new(path.clone(), &interner).await.unwrap()).await;
let mut tia_ctx = TreeIntoApiCtxImpl { let mut tia_ctx = TreeIntoApiCtxImpl {
sys: sys_ctx,
path: Substack::Bottom, path: Substack::Bottom,
basepath: &path, basepath: &path,
lazy_members: &mut lazy_members, lazy_members: &mut lazy_members,
}; };
hand.handle(&get_tree, &tree.into_api(&mut tia_ctx).await).await hand.handle(&get_tree, &tree.into_api(&mut tia_ctx).await).await
}, })
.await,
api::HostExtReq::SysReq(api::SysReq::SysFwded(fwd)) => { api::HostExtReq::SysReq(api::SysReq::SysFwded(fwd)) => {
let api::SysFwded(sys_id, payload) = fwd; let api::SysFwded(sys_id, payload) = fwd;
let ctx = get_ctx(sys_id).await; let ctx = get_ctx(sys_id).await;
with_ctx(ctx.clone(), async move {
let sys = ctx.cted().inst(); let sys = ctx.cted().inst();
sys.dyn_request(hand, payload).await sys.dyn_request(hand, payload).await
})
.await
}, },
api::HostExtReq::LexExpr(lex @ api::LexExpr { sys, src, text, pos, id }) => { api::HostExtReq::LexExpr(lex @ api::LexExpr { sys, src, text, pos, id }) =>
let mut sys_ctx = get_ctx(sys).await; with_ctx(get_ctx(sys).await, async move {
let text = Tok::from_api(text, &i).await; let text = Tok::from_api(text, &i()).await;
let src = Sym::from_api(src, sys_ctx.i()).await; let src = Sym::from_api(src, &i()).await;
let rep = Reporter::new(); let rep = Reporter::new();
let expr_store = BorrowedExprStore::new(); let expr_store = BorrowedExprStore::new();
let trigger_char = text.chars().nth(pos as usize).unwrap(); let trigger_char = text.chars().nth(pos as usize).unwrap();
let ekey_na = ekey_not_applicable(&i).await; let ekey_na = ekey_not_applicable().await;
let ekey_cascade = ekey_cascade(&i).await; let ekey_cascade = ekey_cascade().await;
let lexers = sys_ctx.cted().inst().dyn_lexers(); let lexers = ctx().cted().inst().dyn_lexers();
for lx in lexers.iter().filter(|l| char_filter_match(l.char_filter(), trigger_char)) { for lx in lexers.iter().filter(|l| char_filter_match(l.char_filter(), trigger_char))
let ctx = LexContext { {
id, let ctx = LexContext::new(&expr_store, &text, id, pos, src.clone(), &rep);
pos,
text: &text,
src: src.clone(),
ctx: sys_ctx.clone(),
rep: &rep,
exprs: &expr_store,
};
match lx.lex(&text[pos as usize..], &ctx).await { match lx.lex(&text[pos as usize..], &ctx).await {
Err(e) if e.any(|e| *e == ekey_na) => continue, Err(e) if e.any(|e| *e == ekey_na) => continue,
Err(e) => { Err(e) => {
@@ -272,7 +270,7 @@ pub fn extension_init(
return hand.handle(&lex, &eopt).await; return hand.handle(&lex, &eopt).await;
}, },
Ok((s, expr)) => { Ok((s, expr)) => {
let expr = expr.into_api(&mut (), &mut sys_ctx).await; let expr = expr.into_api(&mut (), &mut ()).await;
let pos = (text.len() - s.len()) as u32; let pos = (text.len() - s.len()) as u32;
expr_store.dispose().await; expr_store.dispose().await;
return hand.handle(&lex, &Some(Ok(api::LexedExpr { pos, expr }))).await; return hand.handle(&lex, &Some(Ok(api::LexedExpr { pos, expr }))).await;
@@ -282,42 +280,46 @@ pub fn extension_init(
writeln!(logger, "Got notified about n/a character '{trigger_char}'"); writeln!(logger, "Got notified about n/a character '{trigger_char}'");
expr_store.dispose().await; expr_store.dispose().await;
hand.handle(&lex, &None).await hand.handle(&lex, &None).await
}, })
.await,
api::HostExtReq::ParseLine(pline) => { api::HostExtReq::ParseLine(pline) => {
let api::ParseLine { module, src, exported, comments, sys, line, idx } = &pline; let api::ParseLine { module, src, exported, comments, sys, line, idx } = &pline;
let ctx = get_ctx(*sys).await; with_ctx(get_ctx(*sys).await, async {
let parsers = ctx.cted().inst().dyn_parsers(); let parsers = ctx().cted().inst().dyn_parsers();
let src = Sym::from_api(*src, ctx.i()).await; let src = Sym::from_api(*src, &i()).await;
let comments = let comments =
join_all(comments.iter().map(|c| Comment::from_api(c, src.clone(), &i))).await; join_all(comments.iter().map(|c| Comment::from_api(c, src.clone(), &interner)))
.await;
let expr_store = BorrowedExprStore::new(); let expr_store = BorrowedExprStore::new();
let mut from_api_ctx = (ctx.clone(), &expr_store);
let line: Vec<PTokTree> = let line: Vec<PTokTree> =
ttv_from_api(line, &mut from_api_ctx, &mut (), &src, &i).await; ttv_from_api(line, &mut &expr_store, &mut (), &src, &i()).await;
let snip = Snippet::new(line.first().expect("Empty line"), &line); let snip = Snippet::new(line.first().expect("Empty line"), &line);
let parser = parsers[*idx as usize]; let parser = parsers[*idx as usize];
let module = Sym::from_api(*module, ctx.i()).await; let module = Sym::from_api(*module, &i()).await;
let reporter = Reporter::new(); let reporter = Reporter::new();
let pctx = ParsCtx::new(ctx.clone(), module, &reporter); let pctx = ParsCtx::new(module, &reporter);
let parse_res = parser.parse(pctx, *exported, comments, snip).await; let parse_res = parser.parse(pctx, *exported, comments, snip).await;
let o_line = match reporter.merge(parse_res) { let o_line = match reporter.merge(parse_res) {
Err(e) => Err(e.to_api()), Err(e) => Err(e.to_api()),
Ok(t) => Ok(linev_into_api(t, ctx.clone()).await), Ok(t) => Ok(linev_into_api(t).await),
}; };
mem::drop(line); mem::drop(line);
expr_store.dispose().await; expr_store.dispose().await;
hand.handle(&pline, &o_line).await hand.handle(&pline, &o_line).await
})
.await
}, },
api::HostExtReq::FetchParsedConst(ref fpc @ api::FetchParsedConst(sys, id)) => { api::HostExtReq::FetchParsedConst(ref fpc @ api::FetchParsedConst(sys, id)) =>
let ctx = get_ctx(sys).await; with_ctx(get_ctx(sys).await, async move {
let cnst = get_const(id, ctx.clone()).await; let cnst = get_const(id).await;
hand.handle(fpc, &cnst.api_return(ctx).await).await hand.handle(fpc, &cnst.serialize().await).await
}, })
.await,
api::HostExtReq::AtomReq(atom_req) => { api::HostExtReq::AtomReq(atom_req) => {
let atom = atom_req.get_atom(); let atom = atom_req.get_atom();
let atom_req = atom_req.clone(); let atom_req = atom_req.clone();
with_atom_record(&get_ctx, atom, async move |nfo, ctx, id, buf| { with_atom_record(&get_ctx, atom, async move |nfo, id, buf| {
let actx = AtomCtx(buf, atom.drop, ctx.clone()); let actx = AtomCtx(buf, atom.drop);
match &atom_req { match &atom_req {
api::AtomReq::SerializeAtom(ser) => { api::AtomReq::SerializeAtom(ser) => {
let mut buf = enc_vec(&id).await; let mut buf = enc_vec(&id).await;
@@ -336,7 +338,7 @@ pub fn extension_init(
api::AtomReq::Fwded(fwded) => { api::AtomReq::Fwded(fwded) => {
let api::Fwded(_, key, payload) = &fwded; let api::Fwded(_, key, payload) = &fwded;
let mut reply = Vec::new(); let mut reply = Vec::new();
let key = Sym::from_api(*key, &i).await; let key = Sym::from_api(*key, &interner).await;
let some = nfo let some = nfo
.handle_req( .handle_req(
actx, actx,
@@ -349,18 +351,18 @@ pub fn extension_init(
}, },
api::AtomReq::CallRef(call @ api::CallRef(_, arg)) => { api::AtomReq::CallRef(call @ api::CallRef(_, arg)) => {
let expr_store = BorrowedExprStore::new(); let expr_store = BorrowedExprStore::new();
let expr_handle = ExprHandle::borrowed(ctx.clone(), *arg, &expr_store); let expr_handle = ExprHandle::borrowed(*arg, &expr_store);
let ret = nfo.call_ref(actx, Expr::from_handle(expr_handle.clone())).await; let ret = nfo.call_ref(actx, Expr::from_handle(expr_handle.clone())).await;
let api_expr = ret.api_return(ctx.clone()).await; let api_expr = ret.serialize().await;
mem::drop(expr_handle); mem::drop(expr_handle);
expr_store.dispose().await; expr_store.dispose().await;
hand.handle(call, &api_expr).await hand.handle(call, &api_expr).await
}, },
api::AtomReq::FinalCall(call @ api::FinalCall(_, arg)) => { api::AtomReq::FinalCall(call @ api::FinalCall(_, arg)) => {
let expr_store = BorrowedExprStore::new(); let expr_store = BorrowedExprStore::new();
let expr_handle = ExprHandle::borrowed(ctx.clone(), *arg, &expr_store); let expr_handle = ExprHandle::borrowed(*arg, &expr_store);
let ret = nfo.call(actx, Expr::from_handle(expr_handle.clone())).await; let ret = nfo.call(actx, Expr::from_handle(expr_handle.clone())).await;
let api_expr = ret.api_return(ctx.clone()).await; let api_expr = ret.serialize().await;
mem::drop(expr_handle); mem::drop(expr_handle);
expr_store.dispose().await; expr_store.dispose().await;
hand.handle(call, &api_expr).await hand.handle(call, &api_expr).await
@@ -370,7 +372,7 @@ pub fn extension_init(
Ok(opt) => match opt { Ok(opt) => match opt {
None => hand.handle(cmd, &Ok(api::NextStep::Halt)).await, None => hand.handle(cmd, &Ok(api::NextStep::Halt)).await,
Some(cont) => { Some(cont) => {
let cont = cont.api_return(ctx.clone()).await; let cont = cont.serialize().await;
hand.handle(cmd, &Ok(api::NextStep::Continue(cont))).await hand.handle(cmd, &Ok(api::NextStep::Continue(cont))).await
}, },
}, },
@@ -385,12 +387,12 @@ pub fn extension_init(
let ctx = get_ctx(*sys).await; let ctx = get_ctx(*sys).await;
// SAFETY: deserialization implicitly grants ownership to previously owned exprs // SAFETY: deserialization implicitly grants ownership to previously owned exprs
let refs = (refs.iter()) let refs = (refs.iter())
.map(|tk| Expr::from_handle(ExprHandle::deserialize(ctx.clone(), *tk))) .map(|tk| Expr::from_handle(ExprHandle::deserialize(*tk)))
.collect_vec(); .collect_vec();
let id = AtomTypeId::decode(Pin::new(&mut read)).await; let id = AtomTypeId::decode(Pin::new(&mut read)).await;
let inst = ctx.cted().inst(); let inst = ctx.cted().inst();
let nfo = atom_by_idx(inst.card(), id).expect("Deserializing atom with invalid ID"); let nfo = atom_by_idx(inst.card(), id).expect("Deserializing atom with invalid ID");
hand.handle(&deser, &nfo.deserialize(ctx.clone(), read, &refs).await).await hand.handle(&deser, &nfo.deserialize(read, &refs).await).await
}, },
} }
} }

View File

@@ -13,8 +13,8 @@ use orchid_base::reqnot::Requester;
use crate::api; use crate::api;
use crate::atom::ForeignAtom; use crate::atom::ForeignAtom;
use crate::context::{ctx, i};
use crate::gen_expr::{GExpr, GExprKind}; use crate::gen_expr::{GExpr, GExprKind};
use crate::system::SysCtx;
pub struct BorrowedExprStore(RefCell<Option<HashSet<Rc<ExprHandle>>>>); pub struct BorrowedExprStore(RefCell<Option<HashSet<Rc<ExprHandle>>>>);
impl BorrowedExprStore { impl BorrowedExprStore {
@@ -22,7 +22,7 @@ impl BorrowedExprStore {
pub async fn dispose(self) { pub async fn dispose(self) {
let elements = self.0.borrow_mut().take().unwrap(); let elements = self.0.borrow_mut().take().unwrap();
for handle in elements { for handle in elements {
handle.drop_one().await handle.on_borrow_expire().await
} }
} }
} }
@@ -34,64 +34,67 @@ impl Drop for BorrowedExprStore {
} }
} }
#[derive(destructure)] #[derive(destructure, PartialEq, Eq, Hash)]
pub struct ExprHandle { pub struct ExprHandle(api::ExprTicket);
pub tk: api::ExprTicket,
pub ctx: SysCtx,
}
impl ExprHandle { impl ExprHandle {
/// This function does not signal to take ownership of the expr. /// Do not signal to take ownership of the expr. Instead, the
pub fn borrowed(ctx: SysCtx, tk: api::ExprTicket, store: &BorrowedExprStore) -> Rc<Self> { /// [BorrowedExprStore] signifies the lifetime of the borrow, and when it is
let this = Rc::new(Self { ctx, tk }); /// freed, it signals to take ownership of any exprs that ended up outliving
/// it. It is used to receive exprs sent via [ExprHandle::ticket] as an
/// optimization over [ExprHandle::from_ticket]
pub fn borrowed(tk: api::ExprTicket, store: &BorrowedExprStore) -> Rc<Self> {
let this = Rc::new(Self(tk));
store.0.borrow_mut().as_mut().unwrap().insert(this.clone()); store.0.borrow_mut().as_mut().unwrap().insert(this.clone());
this this
} }
pub fn deserialize(ctx: SysCtx, tk: api::ExprTicket) -> Rc<Self> { Rc::new(Self { ctx, tk }) } /// This function takes over the loose reference pre-created via
pub fn get_ctx(&self) -> SysCtx { self.ctx.clone() } /// [ExprHandle::serialize] in the sender. It must therefore pair up with a
/// Drop one instance of the handle silently; if it's the last one, do /// corresponding call to that function.
/// nothing, otherwise send an Acquire pub fn deserialize(tk: api::ExprTicket) -> Rc<Self> { Rc::new(Self(tk)) }
pub async fn drop_one(self: Rc<Self>) { /// This function takes ownership of a borrowed expr sent via
match Rc::try_unwrap(self) { /// [ExprHandle::ticket] and signals immediately to record that ownership. It
Err(rc) => { /// is used in place of [ExprHandle::borrowed] when it's impractical to
eprintln!("Extending lifetime for {:?}", rc.tk); /// determine how long the borrow will live.
rc.ctx.reqnot().notify(api::Acquire(rc.ctx.sys_id(), rc.tk)).await ///
}, /// # Safety
Ok(hand) => { ///
// avoid calling destructor /// You need to ensure that the [api::Acquire] sent by this function arrives
hand.destructure(); /// before the borrow expires, so you still need a borrow delimited by some
}, /// message you will send in the future.
} pub async fn from_ticket(tk: api::ExprTicket) -> Rc<Self> {
let store = BorrowedExprStore::new();
let expr = Self::borrowed(tk, &store);
store.dispose().await;
expr
} }
/// The raw ticket used in messages. If you want to transfer ownership via the
/// ticket, you should use [ExprHandle::serialize]. Only send this if you want
/// to lend the expr, and you expect the receiver to use
/// [ExprHandle::borrowed] or [ExprHandle::from_ticket]
pub fn ticket(&self) -> api::ExprTicket { self.0 }
async fn send_acq(&self) { ctx().reqnot().notify(api::Acquire(ctx().sys_id(), self.0)).await }
/// If this is the last one reference, do nothing, otherwise send an Acquire
pub async fn on_borrow_expire(self: Rc<Self>) { self.serialize().await; }
/// Drop the handle and get the ticket without a release notification. /// Drop the handle and get the ticket without a release notification.
/// Use this with messages that imply ownership transfer. This function is /// Use this with messages that imply ownership transfer. This function is
/// safe because abusing it is a memory leak. /// safe because abusing it is a memory leak.
pub fn serialize(self) -> api::ExprTicket { pub async fn serialize(self: Rc<Self>) -> api::ExprTicket {
eprintln!("Skipping destructor for {:?}", self.tk); match Rc::try_unwrap(self) {
self.destructure().0 Err(rc) => {
rc.send_acq().await;
rc.0
},
Ok(hand) => hand.destructure().0,
} }
}
impl Eq for ExprHandle {}
impl PartialEq for ExprHandle {
fn eq(&self, other: &Self) -> bool {
self.ctx.sys_id() == other.ctx.sys_id() && self.tk == other.tk
}
}
impl Hash for ExprHandle {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.ctx.sys_id().hash(state);
self.tk.hash(state);
} }
} }
impl fmt::Debug for ExprHandle { impl fmt::Debug for ExprHandle {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ExprHandle({})", self.0.0) }
write!(f, "ExprHandle({})", self.tk.0)
}
} }
impl Drop for ExprHandle { impl Drop for ExprHandle {
fn drop(&mut self) { fn drop(&mut self) {
let notif = api::Release(self.ctx.sys_id(), self.tk); let notif = api::Release(ctx().sys_id(), self.0);
let reqnot = self.ctx.reqnot().clone(); ctx().spawn(async move { ctx().reqnot().clone().notify(notif).await })
self.ctx.spawner()(Box::pin(async move { reqnot.notify(notif).await }))
} }
} }
@@ -102,19 +105,23 @@ pub struct Expr {
} }
impl Expr { impl Expr {
pub fn from_handle(handle: Rc<ExprHandle>) -> Self { Self { handle, data: Rc::default() } } pub fn from_handle(handle: Rc<ExprHandle>) -> Self { Self { handle, data: Rc::default() } }
pub fn new(handle: Rc<ExprHandle>, d: ExprData) -> Self { pub fn from_data(handle: Rc<ExprHandle>, d: ExprData) -> Self {
Self { handle, data: Rc::new(OnceCell::from(d)) } Self { handle, data: Rc::new(OnceCell::from(d)) }
} }
/// Creates an instance without incrementing the reference count. This is
/// only safe to be called on a reference created with an [Expr::serialize]
/// call which created the loose reference it can take ownership of.
pub async fn deserialize(tk: api::ExprTicket) -> Self {
Self::from_handle(ExprHandle::deserialize(tk))
}
pub async fn data(&self) -> &ExprData { pub async fn data(&self) -> &ExprData {
(self.data.get_or_init(async { (self.data.get_or_init(async {
let details = self.handle.ctx.reqnot().request(api::Inspect { target: self.handle.tk }).await; let details = ctx().reqnot().request(api::Inspect { target: self.handle.ticket() }).await;
let pos = Pos::from_api(&details.location, self.handle.ctx.i()).await; let pos = Pos::from_api(&details.location, &i()).await;
let kind = match details.kind { let kind = match details.kind {
api::InspectedKind::Atom(a) => api::InspectedKind::Atom(a) =>
ExprKind::Atom(ForeignAtom::new(self.handle.clone(), a, pos.clone())), ExprKind::Atom(ForeignAtom::new(self.handle.clone(), a, pos.clone())),
api::InspectedKind::Bottom(b) => api::InspectedKind::Bottom(b) => ExprKind::Bottom(OrcErrv::from_api(&b, &i()).await),
ExprKind::Bottom(OrcErrv::from_api(&b, self.handle.ctx.i()).await),
api::InspectedKind::Opaque => ExprKind::Opaque, api::InspectedKind::Opaque => ExprKind::Opaque,
}; };
ExprData { pos, kind } ExprData { pos, kind }
@@ -128,20 +135,22 @@ impl Expr {
} }
} }
pub fn handle(&self) -> Rc<ExprHandle> { self.handle.clone() } pub fn handle(&self) -> Rc<ExprHandle> { self.handle.clone() }
pub fn ctx(&self) -> SysCtx { self.handle.ctx.clone() }
pub fn slot(&self) -> GExpr { pub fn slot(&self) -> GExpr {
GExpr { pos: Pos::SlotTarget, kind: GExprKind::Slot(self.clone()) } GExpr { pos: Pos::SlotTarget, kind: GExprKind::Slot(self.clone()) }
} }
/// Increments the refcount to ensure that the ticket remains valid even if
/// the handle is freed. To avoid a leak, [Expr::deserialize] must eventually
/// be called.
pub async fn serialize(self) -> api::ExprTicket { self.handle.serialize().await }
} }
impl Format for Expr { impl Format for Expr {
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
match &self.data().await.kind { match &self.data().await.kind {
ExprKind::Opaque => "OPAQUE".to_string().into(), ExprKind::Opaque => "OPAQUE".to_string().into(),
ExprKind::Bottom(b) => format!("Bottom({b})").into(), ExprKind::Bottom(b) => format!("Bottom({b})").into(),
ExprKind::Atom(a) => FmtUnit::from_api( ExprKind::Atom(a) =>
&self.handle.ctx.reqnot().request(api::ExtAtomPrint(a.atom.clone())).await, FmtUnit::from_api(&ctx().reqnot().request(api::ExtAtomPrint(a.atom.clone())).await),
),
} }
} }
} }

View File

@@ -19,11 +19,11 @@ use trait_set::trait_set;
use crate::atom::Atomic; use crate::atom::Atomic;
use crate::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant}; use crate::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
use crate::context::{SysCtxEntry, ctx, i};
use crate::conv::ToExpr; use crate::conv::ToExpr;
use crate::coroutine_exec::{ExecHandle, exec}; use crate::coroutine_exec::{ExecHandle, exec};
use crate::expr::Expr; use crate::expr::Expr;
use crate::gen_expr::GExpr; use crate::gen_expr::GExpr;
use crate::system::{SysCtx, SysCtxEntry};
trait_set! { trait_set! {
trait FunCB = Fn(Vec<Expr>) -> LocalBoxFuture<'static, OrcRes<GExpr>> + 'static; trait FunCB = Fn(Vec<Expr>) -> LocalBoxFuture<'static, OrcRes<GExpr>> + 'static;
@@ -43,14 +43,11 @@ struct FunRecord {
fun: Rc<dyn FunCB>, fun: Rc<dyn FunCB>,
} }
async fn process_args<I, O, F: ExprFunc<I, O>>( fn process_args<I, O, F: ExprFunc<I, O>>(f: F) -> FunRecord {
debug: impl AsRef<str> + Clone + 'static,
f: F,
) -> FunRecord {
let argtyps = F::argtyps(); let argtyps = F::argtyps();
let fun = Rc::new(move |v: Vec<Expr>| { let fun = Rc::new(move |v: Vec<Expr>| {
clone!(f, v mut); clone!(f, v mut);
exec(debug.clone(), async move |mut hand| { exec(async move |mut hand| {
let mut norm_args = Vec::with_capacity(v.len()); let mut norm_args = Vec::with_capacity(v.len());
for (expr, typ) in v.into_iter().zip(argtyps) { for (expr, typ) in v.into_iter().zip(argtyps) {
if *typ != TypeId::of::<Expr>() { if *typ != TypeId::of::<Expr>() {
@@ -77,13 +74,14 @@ pub(crate) struct Fun {
record: FunRecord, record: FunRecord,
} }
impl Fun { impl Fun {
pub async fn new<I, O, F: ExprFunc<I, O>>(path: Sym, ctx: SysCtx, f: F) -> Self { pub async fn new<I, O, F: ExprFunc<I, O>>(path: Sym, f: F) -> Self {
let ctx = ctx();
let funs: &FunsCtx = ctx.get_or_default(); let funs: &FunsCtx = ctx.get_or_default();
let mut fung = funs.0.lock().await; let mut fung = funs.0.lock().await;
let record = if let Some(record) = fung.get(&path) { let record = if let Some(record) = fung.get(&path) {
record.clone() record.clone()
} else { } else {
let record = process_args(path.to_string(), f).await; let record = process_args(f);
fung.insert(path.clone(), record.clone()); fung.insert(path.clone(), record.clone());
record record
}; };
@@ -101,20 +99,19 @@ impl OwnedAtom for Fun {
async fn call_ref(&self, arg: Expr) -> GExpr { async fn call_ref(&self, arg: Expr) -> GExpr {
let new_args = self.args.iter().cloned().chain([arg]).collect_vec(); let new_args = self.args.iter().cloned().chain([arg]).collect_vec();
if new_args.len() == self.record.argtyps.len() { if new_args.len() == self.record.argtyps.len() {
(self.record.fun)(new_args).await.to_expr().await (self.record.fun)(new_args).await.to_gen().await
} else { } else {
Self { args: new_args, record: self.record.clone(), path: self.path.clone() }.to_expr().await Self { args: new_args, record: self.record.clone(), path: self.path.clone() }.to_gen().await
} }
} }
async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await } async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await }
async fn serialize(&self, _: SysCtx, write: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs { async fn serialize(&self, write: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
self.path.to_api().encode(write).await; self.path.to_api().encode(write).await;
self.args.clone() self.args.clone()
} }
async fn deserialize(mut ctx: impl DeserializeCtx, args: Self::Refs) -> Self { async fn deserialize(mut ds_cx: impl DeserializeCtx, args: Self::Refs) -> Self {
let sys = ctx.sys(); let path = Sym::from_api(ds_cx.decode().await, &i()).await;
let path = Sym::from_api(ctx.decode().await, sys.i()).await; let record = (ctx().get::<FunsCtx>().0.lock().await.get(&path))
let record = (sys.get::<FunsCtx>().0.lock().await.get(&path))
.expect("Function missing during deserialization") .expect("Function missing during deserialization")
.clone(); .clone();
Self { args, path, record } Self { args, path, record }
@@ -134,11 +131,8 @@ pub struct Lambda {
record: FunRecord, record: FunRecord,
} }
impl Lambda { impl Lambda {
pub async fn new<I, O, F: ExprFunc<I, O>>( pub fn new<I, O, F: ExprFunc<I, O>>(f: F) -> Self {
debug: impl AsRef<str> + Clone + 'static, Self { args: vec![], record: process_args(f) }
f: F,
) -> Self {
Self { args: vec![], record: process_args(debug, f).await }
} }
} }
impl Atomic for Lambda { impl Atomic for Lambda {
@@ -151,9 +145,9 @@ impl OwnedAtom for Lambda {
async fn call_ref(&self, arg: Expr) -> GExpr { async fn call_ref(&self, arg: Expr) -> GExpr {
let new_args = self.args.iter().cloned().chain([arg]).collect_vec(); let new_args = self.args.iter().cloned().chain([arg]).collect_vec();
if new_args.len() == self.record.argtyps.len() { if new_args.len() == self.record.argtyps.len() {
(self.record.fun)(new_args).await.to_expr().await (self.record.fun)(new_args).await.to_gen().await
} else { } else {
Self { args: new_args, record: self.record.clone() }.to_expr().await Self { args: new_args, record: self.record.clone() }.to_gen().await
} }
} }
async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await } async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await }
@@ -176,7 +170,7 @@ mod expr_func_derives {
impl< impl<
$($t: TryFromExpr + 'static, )* $($t: TryFromExpr + 'static, )*
Out: ToExpr, Out: ToExpr,
Func: AsyncFn($($t,)*) -> Out + Clone + Send + Sync + 'static Func: AsyncFn($($t,)*) -> Out + Clone + 'static
> ExprFunc<($($t,)*), Out> for Func { > ExprFunc<($($t,)*), Out> for Func {
fn argtyps() -> &'static [TypeId] { fn argtyps() -> &'static [TypeId] {
static STORE: OnceLock<Vec<TypeId>> = OnceLock::new(); static STORE: OnceLock<Vec<TypeId>> = OnceLock::new();
@@ -185,7 +179,7 @@ mod expr_func_derives {
async fn apply<'a>(&self, _: ExecHandle<'a>, v: Vec<Expr>) -> OrcRes<GExpr> { async fn apply<'a>(&self, _: ExecHandle<'a>, v: Vec<Expr>) -> OrcRes<GExpr> {
assert_eq!(v.len(), Self::argtyps().len(), "Arity mismatch"); assert_eq!(v.len(), Self::argtyps().len(), "Arity mismatch");
let [$([< $t:lower >],)*] = v.try_into().unwrap_or_else(|_| panic!("Checked above")); let [$([< $t:lower >],)*] = v.try_into().unwrap_or_else(|_| panic!("Checked above"));
Ok(self($($t::try_from_expr([< $t:lower >]).await?,)*).await.to_expr().await) Ok(self($($t::try_from_expr([< $t:lower >]).await?,)*).await.to_gen().await)
} }
} }
} }

View File

@@ -6,12 +6,13 @@ use orchid_base::error::{OrcErr, OrcErrv};
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants}; use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::reqnot::Requester;
use orchid_base::{match_mapping, tl_cache}; use orchid_base::{match_mapping, tl_cache};
use crate::api; use crate::api;
use crate::atom::{AtomFactory, ToAtom}; use crate::atom::{AtomFactory, ToAtom};
use crate::context::ctx;
use crate::expr::Expr; use crate::expr::Expr;
use crate::system::SysCtx;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct GExpr { pub struct GExpr {
@@ -19,29 +20,32 @@ pub struct GExpr {
pub pos: Pos, pub pos: Pos,
} }
impl GExpr { impl GExpr {
pub async fn api_return(self, ctx: SysCtx) -> api::Expression { /// Release notifications will not be sent for the slots. Use this with
/// messages that imply ownership transfer
pub async fn serialize(self) -> api::Expression {
if let GExprKind::Slot(ex) = self.kind { if let GExprKind::Slot(ex) = self.kind {
let hand = ex.handle(); let hand = ex.handle();
mem::drop(ex); mem::drop(ex);
api::Expression { api::Expression {
location: api::Location::SlotTarget, location: api::Location::SlotTarget,
kind: match Rc::try_unwrap(hand) { // an instance is leaked here, we must take ownership of it when we receive this
Ok(h) => api::ExpressionKind::Slot { tk: h.serialize(), by_value: true }, kind: api::ExpressionKind::Slot(hand.serialize().await),
Err(rc) => api::ExpressionKind::Slot { tk: rc.tk, by_value: false },
},
} }
} else { } else {
api::Expression { api::Expression {
location: api::Location::Inherit, location: api::Location::Inherit,
kind: self.kind.api_return(ctx).boxed_local().await, kind: self.kind.serialize().boxed_local().await,
} }
} }
} }
pub fn at(self, pos: Pos) -> Self { GExpr { pos, kind: self.kind } } pub fn at(self, pos: Pos) -> Self { GExpr { pos, kind: self.kind } }
pub async fn create(self) -> Expr {
Expr::deserialize(ctx().reqnot().request(api::Create(self.serialize().await)).await).await
}
} }
impl Format for GExpr { impl Format for GExpr {
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
self.kind.print(c).await self.kind.print(c).boxed_local().await
} }
} }
@@ -57,21 +61,21 @@ pub enum GExprKind {
Bottom(OrcErrv), Bottom(OrcErrv),
} }
impl GExprKind { impl GExprKind {
pub async fn api_return(self, ctx: SysCtx) -> api::ExpressionKind { pub async fn serialize(self) -> api::ExpressionKind {
match_mapping!(self, Self => api::ExpressionKind { match_mapping!(self, Self => api::ExpressionKind {
Call( Call(
f => Box::new(f.api_return(ctx.clone()).await), f => Box::new(f.serialize().await),
x => Box::new(x.api_return(ctx).await) x => Box::new(x.serialize().await)
), ),
Seq( Seq(
a => Box::new(a.api_return(ctx.clone()).await), a => Box::new(a.serialize().await),
b => Box::new(b.api_return(ctx).await) b => Box::new(b.serialize().await)
), ),
Lambda(arg, body => Box::new(body.api_return(ctx).await)), Lambda(arg, body => Box::new(body.serialize().await)),
Arg(arg), Arg(arg),
Const(name.to_api()), Const(name.to_api()),
Bottom(err.to_api()), Bottom(err.to_api()),
NewAtom(fac.clone().build(ctx).await), NewAtom(fac.clone().build().await),
} { } {
Self::Slot(_) => panic!("processed elsewhere") Self::Slot(_) => panic!("processed elsewhere")
}) })
@@ -105,7 +109,7 @@ fn inherit(kind: GExprKind) -> GExpr { GExpr { pos: Pos::Inherit, kind } }
pub fn sym_ref(path: Sym) -> GExpr { inherit(GExprKind::Const(path)) } pub fn sym_ref(path: Sym) -> GExpr { inherit(GExprKind::Const(path)) }
pub fn atom<A: ToAtom>(atom: A) -> GExpr { inherit(GExprKind::NewAtom(atom.to_atom_factory())) } pub fn atom<A: ToAtom>(atom: A) -> GExpr { inherit(GExprKind::NewAtom(atom.to_atom_factory())) }
pub fn seq(ops: impl IntoIterator<Item = GExpr>) -> GExpr { pub fn seq(deps: impl IntoIterator<Item = GExpr>, val: GExpr) -> GExpr {
fn recur(mut ops: impl Iterator<Item = GExpr>) -> Option<GExpr> { fn recur(mut ops: impl Iterator<Item = GExpr>) -> Option<GExpr> {
let op = ops.next()?; let op = ops.next()?;
Some(match recur(ops) { Some(match recur(ops) {
@@ -113,19 +117,15 @@ pub fn seq(ops: impl IntoIterator<Item = GExpr>) -> GExpr {
Some(rec) => inherit(GExprKind::Seq(Box::new(op), Box::new(rec))), Some(rec) => inherit(GExprKind::Seq(Box::new(op), Box::new(rec))),
}) })
} }
recur(ops.into_iter()).expect("Empty list provided to seq!") recur(deps.into_iter().chain([val])).expect("Empty list provided to seq!")
} }
pub fn arg(n: u64) -> GExpr { inherit(GExprKind::Arg(n)) } pub fn arg(n: u64) -> GExpr { inherit(GExprKind::Arg(n)) }
pub fn lambda(n: u64, b: impl IntoIterator<Item = GExpr>) -> GExpr { pub fn lambda(n: u64, [b]: [GExpr; 1]) -> GExpr { inherit(GExprKind::Lambda(n, Box::new(b))) }
inherit(GExprKind::Lambda(n, Box::new(call(b))))
}
pub fn call(v: impl IntoIterator<Item = GExpr>) -> GExpr { pub fn call(f: GExpr, argv: impl IntoIterator<Item = GExpr>) -> GExpr {
v.into_iter() (argv.into_iter()).fold(f, |f, x| inherit(GExprKind::Call(Box::new(f), Box::new(x))))
.reduce(|f, x| inherit(GExprKind::Call(Box::new(f), Box::new(x))))
.expect("Empty call expression")
} }
pub fn bot(ev: impl IntoIterator<Item = OrcErr>) -> GExpr { pub fn bot(ev: impl IntoIterator<Item = OrcErr>) -> GExpr {

View File

@@ -12,38 +12,48 @@ use orchid_base::parse::ParseCtx;
use orchid_base::reqnot::Requester; use orchid_base::reqnot::Requester;
use crate::api; use crate::api;
use crate::context::{ctx, i};
use crate::expr::BorrowedExprStore; use crate::expr::BorrowedExprStore;
use crate::parser::PTokTree; use crate::parser::PTokTree;
use crate::system::SysCtx;
use crate::tree::GenTokTree; use crate::tree::GenTokTree;
pub async fn ekey_cascade(i: &Interner) -> Tok<String> { pub async fn ekey_cascade() -> Tok<String> {
i.i("An error cascading from a recursive call").await i().i("An error cascading from a recursive call").await
} }
pub async fn ekey_not_applicable(i: &Interner) -> Tok<String> { pub async fn ekey_not_applicable() -> Tok<String> {
i.i("Pseudo-error to communicate that the current branch in a dispatch doesn't apply").await i().i("Pseudo-error to communicate that the current branch in a dispatch doesn't apply").await
} }
const MSG_INTERNAL_ERROR: &str = "This error is a sentinel for the extension library.\ const MSG_INTERNAL_ERROR: &str = "This error is a sentinel for the extension library.\
it should not be emitted by the extension."; it should not be emitted by the extension.";
pub async fn err_cascade(i: &Interner) -> OrcErrv { pub async fn err_cascade() -> OrcErrv {
mk_errv(ekey_cascade(i).await, MSG_INTERNAL_ERROR, [Pos::None]) mk_errv(ekey_cascade().await, MSG_INTERNAL_ERROR, [Pos::None])
} }
pub async fn err_not_applicable(i: &Interner) -> OrcErrv { pub async fn err_not_applicable() -> OrcErrv {
mk_errv(ekey_not_applicable(i).await, MSG_INTERNAL_ERROR, [Pos::None]) mk_errv(ekey_not_applicable().await, MSG_INTERNAL_ERROR, [Pos::None])
} }
pub struct LexContext<'a> { pub struct LexContext<'a> {
pub(crate) exprs: &'a BorrowedExprStore, pub(crate) exprs: &'a BorrowedExprStore,
pub ctx: SysCtx,
pub text: &'a Tok<String>, pub text: &'a Tok<String>,
pub id: api::ParsId, pub id: api::ParsId,
pub pos: u32, pub pos: u32,
i: Interner,
pub(crate) src: Sym, pub(crate) src: Sym,
pub(crate) rep: &'a Reporter, pub(crate) rep: &'a Reporter,
} }
impl<'a> LexContext<'a> { impl<'a> LexContext<'a> {
pub fn new(
exprs: &'a BorrowedExprStore,
text: &'a Tok<String>,
id: api::ParsId,
pos: u32,
src: Sym,
rep: &'a Reporter,
) -> Self {
Self { exprs, i: i(), id, pos, rep, src, text }
}
pub fn src(&self) -> &Sym { &self.src } pub fn src(&self) -> &Sym { &self.src }
/// This function returns [PTokTree] because it can never return /// This function returns [PTokTree] because it can never return
/// [orchid_base::tree::Token::NewExpr]. You can use /// [orchid_base::tree::Token::NewExpr]. You can use
@@ -51,17 +61,10 @@ impl<'a> LexContext<'a> {
/// for embedding in the return value. /// for embedding in the return value.
pub async fn recurse(&self, tail: &'a str) -> OrcRes<(&'a str, PTokTree)> { pub async fn recurse(&self, tail: &'a str) -> OrcRes<(&'a str, PTokTree)> {
let start = self.pos(tail); let start = self.pos(tail);
let Some(lx) = self.ctx.reqnot().request(api::SubLex { pos: start, id: self.id }).await else { let Some(lx) = ctx().reqnot().request(api::SubLex { pos: start, id: self.id }).await else {
return Err(err_cascade(self.ctx.i()).await); return Err(err_cascade().await);
}; };
let tree = PTokTree::from_api( let tree = PTokTree::from_api(&lx.tree, &mut { self.exprs }, &mut (), &self.src, &i()).await;
&lx.tree,
&mut (self.ctx.clone(), self.exprs),
&mut (),
&self.src,
self.ctx.i(),
)
.await;
Ok((&self.text[lx.pos as usize..], tree)) Ok((&self.text[lx.pos as usize..], tree))
} }
@@ -75,7 +78,7 @@ impl<'a> LexContext<'a> {
} }
} }
impl ParseCtx for LexContext<'_> { impl ParseCtx for LexContext<'_> {
fn i(&self) -> &Interner { self.ctx.i() } fn i(&self) -> &Interner { &self.i }
fn rep(&self) -> &Reporter { self.rep } fn rep(&self) -> &Reporter { self.rep }
} }
@@ -83,7 +86,7 @@ pub trait Lexer: Send + Sync + Sized + Default + 'static {
const CHAR_FILTER: &'static [RangeInclusive<char>]; const CHAR_FILTER: &'static [RangeInclusive<char>];
fn lex<'a>( fn lex<'a>(
tail: &'a str, tail: &'a str,
ctx: &'a LexContext<'a>, lctx: &'a LexContext<'a>,
) -> impl Future<Output = OrcRes<(&'a str, GenTokTree)>>; ) -> impl Future<Output = OrcRes<(&'a str, GenTokTree)>>;
} }

View File

@@ -11,6 +11,7 @@ pub mod func_atom;
pub mod gen_expr; pub mod gen_expr;
pub mod lexer; pub mod lexer;
// pub mod msg; // pub mod msg;
pub mod context;
pub mod other_system; pub mod other_system;
pub mod parser; pub mod parser;
pub mod reflection; pub mod reflection;

View File

@@ -1,15 +1,12 @@
use std::marker::PhantomData;
use std::mem::size_of;
use crate::api; use crate::api;
use crate::system::{DynSystemCard, SystemCard}; use crate::system::{DynSystemCard, SystemCard};
pub struct SystemHandle<C: SystemCard> { pub struct SystemHandle<C: SystemCard> {
pub(crate) _card: PhantomData<C>, pub(crate) card: C,
pub(crate) id: api::SysId, pub(crate) id: api::SysId,
} }
impl<C: SystemCard> SystemHandle<C> { impl<C: SystemCard> SystemHandle<C> {
pub(crate) fn new(id: api::SysId) -> Self { Self { _card: PhantomData, id } } pub(crate) fn new(id: api::SysId) -> Self { Self { card: C::default(), id } }
pub fn id(&self) -> api::SysId { self.id } pub fn id(&self) -> api::SysId { self.id }
} }
impl<C: SystemCard> Clone for SystemHandle<C> { impl<C: SystemCard> Clone for SystemHandle<C> {
@@ -21,16 +18,7 @@ pub trait DynSystemHandle {
fn get_card(&self) -> &dyn DynSystemCard; fn get_card(&self) -> &dyn DynSystemCard;
} }
pub fn leak_card<T: Default>() -> &'static T {
const {
if 0 != size_of::<T>() {
panic!("Attempted to leak positively sized Card. Card types must always be zero-sized");
}
}
Box::leak(Box::default())
}
impl<C: SystemCard> DynSystemHandle for SystemHandle<C> { impl<C: SystemCard> DynSystemHandle for SystemHandle<C> {
fn id(&self) -> api::SysId { self.id } fn id(&self) -> api::SysId { self.id }
fn get_card(&self) -> &'static dyn DynSystemCard { leak_card::<C>() } fn get_card(&self) -> &dyn DynSystemCard { &self.card }
} }

View File

@@ -16,10 +16,10 @@ use orchid_base::reqnot::Requester;
use orchid_base::tree::{TokTree, Token, ttv_into_api}; use orchid_base::tree::{TokTree, Token, ttv_into_api};
use crate::api; use crate::api;
use crate::context::{SysCtxEntry, ctx, i};
use crate::conv::ToExpr; use crate::conv::ToExpr;
use crate::expr::Expr; use crate::expr::Expr;
use crate::gen_expr::GExpr; use crate::gen_expr::GExpr;
use crate::system::{SysCtx, SysCtxEntry};
use crate::tree::{GenTok, GenTokTree}; use crate::tree::{GenTok, GenTokTree};
pub type PTok = Token<Expr, Never>; pub type PTok = Token<Expr, Never>;
@@ -81,19 +81,18 @@ pub type ParserObj = &'static dyn DynParser;
pub struct ParsCtx<'a> { pub struct ParsCtx<'a> {
_parse: PhantomData<&'a mut ()>, _parse: PhantomData<&'a mut ()>,
ctx: SysCtx,
module: Sym, module: Sym,
reporter: &'a Reporter, reporter: &'a Reporter,
i: Interner,
} }
impl<'a> ParsCtx<'a> { impl<'a> ParsCtx<'a> {
pub(crate) fn new(ctx: SysCtx, module: Sym, reporter: &'a Reporter) -> Self { pub(crate) fn new(module: Sym, reporter: &'a Reporter) -> Self {
Self { _parse: PhantomData, ctx, module, reporter } Self { _parse: PhantomData, module, reporter, i: i() }
} }
pub fn ctx(&self) -> &SysCtx { &self.ctx }
pub fn module(&self) -> Sym { self.module.clone() } pub fn module(&self) -> Sym { self.module.clone() }
} }
impl ParseCtx for ParsCtx<'_> { impl ParseCtx for ParsCtx<'_> {
fn i(&self) -> &Interner { self.ctx.i() } fn i(&self) -> &Interner { &self.i }
fn rep(&self) -> &Reporter { self.reporter } fn rep(&self) -> &Reporter { self.reporter }
} }
@@ -118,7 +117,7 @@ impl ParsedLine {
name: Tok<String>, name: Tok<String>,
f: F, f: F,
) -> Self { ) -> Self {
let cb = Box::new(|ctx| async move { f(ctx).await.to_expr().await }.boxed_local()); let cb = Box::new(|ctx| async move { f(ctx).await.to_gen().await }.boxed_local());
let kind = ParsedLineKind::Mem(ParsedMem { name, exported, kind: ParsedMemKind::Const(cb) }); let kind = ParsedLineKind::Mem(ParsedMem { name, exported, kind: ParsedMemKind::Const(cb) });
let comments = comments.into_iter().cloned().collect(); let comments = comments.into_iter().cloned().collect();
ParsedLine { comments, sr: sr.clone(), kind } ParsedLine { comments, sr: sr.clone(), kind }
@@ -136,7 +135,7 @@ impl ParsedLine {
let comments = comments.into_iter().cloned().collect(); let comments = comments.into_iter().cloned().collect();
ParsedLine { comments, sr: sr.clone(), kind: line_kind } ParsedLine { comments, sr: sr.clone(), kind: line_kind }
} }
pub async fn into_api(self, mut ctx: SysCtx) -> api::ParsedLine { pub async fn into_api(self) -> api::ParsedLine {
api::ParsedLine { api::ParsedLine {
comments: self.comments.into_iter().map(|c| c.to_api()).collect(), comments: self.comments.into_iter().map(|c| c.to_api()).collect(),
source_range: self.sr.to_api(), source_range: self.sr.to_api(),
@@ -146,23 +145,23 @@ impl ParsedLine {
exported: mem.exported, exported: mem.exported,
kind: match mem.kind { kind: match mem.kind {
ParsedMemKind::Const(cb) => api::ParsedMemberKind::Constant(api::ParsedConstId( ParsedMemKind::Const(cb) => api::ParsedMemberKind::Constant(api::ParsedConstId(
ctx.get_or_default::<ParsedConstCtxEntry>().consts.add(cb).id(), ctx().get_or_default::<ParsedConstCtxEntry>().consts.add(cb).id(),
)), )),
ParsedMemKind::Mod { lines, use_prelude } => api::ParsedMemberKind::Module { ParsedMemKind::Mod { lines, use_prelude } => api::ParsedMemberKind::Module {
lines: linev_into_api(lines, ctx).boxed_local().await, lines: linev_into_api(lines).boxed_local().await,
use_prelude, use_prelude,
}, },
}, },
}), }),
ParsedLineKind::Rec(tv) => ParsedLineKind::Rec(tv) =>
api::ParsedLineKind::Recursive(ttv_into_api(tv, &mut (), &mut ctx).await), api::ParsedLineKind::Recursive(ttv_into_api(tv, &mut (), &mut ()).await),
}, },
} }
} }
} }
pub(crate) async fn linev_into_api(v: Vec<ParsedLine>, ctx: SysCtx) -> Vec<api::ParsedLine> { pub(crate) async fn linev_into_api(v: Vec<ParsedLine>) -> Vec<api::ParsedLine> {
join_all(v.into_iter().map(|l| l.into_api(ctx.clone()))).await join_all(v.into_iter().map(|l| l.into_api())).await
} }
pub enum ParsedLineKind { pub enum ParsedLineKind {
@@ -183,26 +182,23 @@ pub enum ParsedMemKind {
#[derive(Clone)] #[derive(Clone)]
pub struct ConstCtx { pub struct ConstCtx {
ctx: SysCtx,
constid: api::ParsedConstId, constid: api::ParsedConstId,
} }
impl ConstCtx { impl ConstCtx {
pub fn ctx(&self) -> &SysCtx { &self.ctx }
pub fn i(&self) -> &Interner { self.ctx.i() }
pub fn names<'b>( pub fn names<'b>(
&'b self, &'b self,
names: impl IntoIterator<Item = &'b Sym> + 'b, names: impl IntoIterator<Item = &'b Sym> + 'b,
) -> impl Stream<Item = OrcRes<Sym>> + 'b { ) -> impl Stream<Item = OrcRes<Sym>> + 'b {
let resolve_names = api::ResolveNames { let resolve_names = api::ResolveNames {
constid: self.constid, constid: self.constid,
sys: self.ctx.sys_id(), sys: ctx().sys_id(),
names: names.into_iter().map(|n| n.to_api()).collect_vec(), names: names.into_iter().map(|n| n.to_api()).collect_vec(),
}; };
stream(async |mut cx| { stream(async |mut cx| {
for name_opt in self.ctx.reqnot().request(resolve_names).await { for name_opt in ctx().reqnot().request(resolve_names).await {
cx.emit(match name_opt { cx.emit(match name_opt {
Err(e) => Err(OrcErrv::from_api(&e, self.ctx.i()).await), Err(e) => Err(OrcErrv::from_api(&e, &i()).await),
Ok(name) => Ok(Sym::from_api(name, self.ctx.i()).await), Ok(name) => Ok(Sym::from_api(name, &i()).await),
}) })
.await .await
} }
@@ -213,9 +209,9 @@ impl ConstCtx {
} }
} }
pub(crate) async fn get_const(id: api::ParsedConstId, ctx: SysCtx) -> GExpr { pub(crate) async fn get_const(id: api::ParsedConstId) -> GExpr {
let ent = ctx.get_or_default::<ParsedConstCtxEntry>(); let cb = (ctx().get_or_default::<ParsedConstCtxEntry>().consts.get(id.0))
let rec = ent.consts.get(id.0).expect("Bad ID or double read of parsed const"); .expect("Bad ID or double read of parsed const")
let ctx = ConstCtx { constid: id, ctx: ctx.clone() }; .remove();
rec.remove()(ctx).await cb(ConstCtx { constid: id }).await
} }

View File

@@ -9,59 +9,57 @@ use orchid_base::name::{NameLike, VPath};
use orchid_base::reqnot::Requester; use orchid_base::reqnot::Requester;
use crate::api; use crate::api;
use crate::system::{SysCtx, SysCtxEntry, WeakSysCtx}; use crate::context::{SysCtxEntry, ctx, i};
#[derive(Debug)]
pub struct ReflMemData { pub struct ReflMemData {
// None for inferred steps // None for inferred steps
public: OnceCell<bool>, public: OnceCell<bool>,
kind: ReflMemKind, kind: ReflMemKind,
} }
#[derive(Clone)] #[derive(Clone, Debug)]
pub struct ReflMem(Rc<ReflMemData>); pub struct ReflMem(Rc<ReflMemData>);
impl ReflMem { impl ReflMem {
pub fn kind(&self) -> ReflMemKind { self.0.kind.clone() } pub fn kind(&self) -> ReflMemKind { self.0.kind.clone() }
} }
#[derive(Clone)] #[derive(Clone, Debug)]
pub enum ReflMemKind { pub enum ReflMemKind {
Const, Const,
Mod(ReflMod), Mod(ReflMod),
} }
#[derive(Debug)]
pub struct ReflModData { pub struct ReflModData {
inferred: Mutex<bool>, inferred: Mutex<bool>,
path: VPath, path: VPath,
ctx: WeakSysCtx,
members: MemoMap<Tok<String>, ReflMem>, members: MemoMap<Tok<String>, ReflMem>,
} }
#[derive(Clone)] #[derive(Clone, Debug)]
pub struct ReflMod(Rc<ReflModData>); pub struct ReflMod(Rc<ReflModData>);
impl ReflMod { impl ReflMod {
fn ctx(&self) -> SysCtx {
self.0.ctx.upgrade().expect("ReflectedModule accessed after context drop")
}
pub fn path(&self) -> &[Tok<String>] { &self.0.path[..] } pub fn path(&self) -> &[Tok<String>] { &self.0.path[..] }
pub fn is_root(&self) -> bool { self.0.path.is_empty() } pub fn is_root(&self) -> bool { self.0.path.is_empty() }
async fn try_populate(&self) -> Result<(), api::LsModuleError> { async fn try_populate(&self) -> Result<(), api::LsModuleError> {
let ctx = self.ctx(); let path_tok = i().i(&self.0.path[..]).await;
let path_tok = ctx.i().i(&self.0.path[..]).await; let reply = match ctx().reqnot().request(api::LsModule(ctx().sys_id(), path_tok.to_api())).await
let reply = match ctx.reqnot().request(api::LsModule(ctx.sys_id(), path_tok.to_api())).await { {
Err(api::LsModuleError::TreeUnavailable) => Err(api::LsModuleError::TreeUnavailable) =>
panic!("Reflected tree accessed outside an interpreter call. This extension is faulty."), panic!("Reflected tree accessed outside an interpreter call. This extension is faulty."),
Err(err) => return Err(err), Err(err) => return Err(err),
Ok(details) => details, Ok(details) => details,
}; };
for (k, v) in reply.members { for (k, v) in reply.members {
let k = ctx.i().ex(k).await; let k = i().ex(k).await;
let mem = match self.0.members.get(&k) { let mem = match self.0.members.get(&k) {
Some(mem) => mem, Some(mem) => mem,
None => { None => {
let path = self.0.path.clone().name_with_suffix(k.clone()).to_sym(ctx.i()).await; let path = self.0.path.clone().name_with_suffix(k.clone()).to_sym(&i()).await;
let kind = match v.kind { let kind = match v.kind {
api::MemberInfoKind::Constant => ReflMemKind::Const, api::MemberInfoKind::Constant => ReflMemKind::Const,
api::MemberInfoKind::Module => api::MemberInfoKind::Module =>
ReflMemKind::Mod(default_module(&ctx, VPath::new(path.segs()))), ReflMemKind::Mod(default_module(VPath::new(path.segs()))),
}; };
self.0.members.get_or_insert(&k, || default_member(self.is_root(), kind)) self.0.members.get_or_insert(&k, || default_member(self.is_root(), kind))
}, },
@@ -89,7 +87,6 @@ impl ReflMod {
self.0.members.get(key).cloned() self.0.members.get(key).cloned()
} }
pub async fn get_by_path(&self, path: &[Tok<String>]) -> Result<ReflMem, InvalidPathError> { pub async fn get_by_path(&self, path: &[Tok<String>]) -> Result<ReflMem, InvalidPathError> {
let ctx = self.ctx();
let (next, tail) = path.split_first().expect("Attempted to walk by empty path"); let (next, tail) = path.split_first().expect("Attempted to walk by empty path");
let inferred_g = self.0.inferred.lock().await; let inferred_g = self.0.inferred.lock().await;
if let Some(next) = self.0.members.get(next) { if let Some(next) = self.0.members.get(next) {
@@ -105,7 +102,7 @@ impl ReflMod {
if !*inferred_g { if !*inferred_g {
return Err(InvalidPathError { keep_ancestry: true }); return Err(InvalidPathError { keep_ancestry: true });
} }
let candidate = default_module(&ctx, self.0.path.clone().suffix([next.clone()])); let candidate = default_module(self.0.path.clone().suffix([next.clone()]));
if tail.is_empty() { if tail.is_empty() {
return match candidate.try_populate().await { return match candidate.try_populate().await {
Ok(()) => { Ok(()) => {
@@ -116,8 +113,8 @@ impl ReflMod {
Err(api::LsModuleError::InvalidPath) => Err(InvalidPathError { keep_ancestry: false }), Err(api::LsModuleError::InvalidPath) => Err(InvalidPathError { keep_ancestry: false }),
Err(api::LsModuleError::IsConstant) => { Err(api::LsModuleError::IsConstant) => {
let const_mem = default_member(self.is_root(), ReflMemKind::Const); let const_mem = default_member(self.is_root(), ReflMemKind::Const);
self.0.members.insert(next.clone(), const_mem); self.0.members.insert(next.clone(), const_mem.clone());
Err(InvalidPathError { keep_ancestry: true }) Ok(const_mem)
}, },
Err(api::LsModuleError::TreeUnavailable) => unreachable!(), Err(api::LsModuleError::TreeUnavailable) => unreachable!(),
}; };
@@ -133,20 +130,17 @@ impl ReflMod {
} }
} }
#[derive(Clone)]
struct ReflRoot(ReflMod); struct ReflRoot(ReflMod);
impl SysCtxEntry for ReflRoot {} impl SysCtxEntry for ReflRoot {}
#[derive(Clone, Debug)]
pub struct InvalidPathError { pub struct InvalidPathError {
keep_ancestry: bool, keep_ancestry: bool,
} }
fn default_module(ctx: &SysCtx, path: VPath) -> ReflMod { fn default_module(path: VPath) -> ReflMod {
ReflMod(Rc::new(ReflModData { ReflMod(Rc::new(ReflModData { inferred: Mutex::new(true), path, members: MemoMap::new() }))
ctx: ctx.downgrade(),
inferred: Mutex::new(true),
path,
members: MemoMap::new(),
}))
} }
fn default_member(is_root: bool, kind: ReflMemKind) -> ReflMem { fn default_member(is_root: bool, kind: ReflMemKind) -> ReflMem {
@@ -156,8 +150,8 @@ fn default_member(is_root: bool, kind: ReflMemKind) -> ReflMem {
})) }))
} }
fn get_root(ctx: &SysCtx) -> &ReflRoot { fn get_root() -> ReflRoot {
ctx.get_or_insert(|| ReflRoot(default_module(ctx, VPath::new([])))) ctx().get_or_insert(|| ReflRoot(default_module(VPath::new([])))).clone()
} }
pub fn refl(ctx: &SysCtx) -> ReflMod { get_root(ctx).0.clone() } pub fn refl() -> ReflMod { get_root().0.clone() }

View File

@@ -1,22 +1,18 @@
use std::any::{Any, TypeId, type_name}; use std::any::{Any, TypeId};
use std::fmt;
use std::future::Future; use std::future::Future;
use std::num::NonZero; use std::num::NonZero;
use std::pin::Pin; use std::pin::Pin;
use std::rc::{Rc, Weak};
use futures::FutureExt;
use futures::future::LocalBoxFuture; use futures::future::LocalBoxFuture;
use memo_map::MemoMap; use orchid_api_traits::{Coding, Decode, Encode, Request};
use orchid_api_traits::{Coding, Decode};
use orchid_base::boxed_iter::BoxedIter; use orchid_base::boxed_iter::BoxedIter;
use orchid_base::builtin::Spawner;
use orchid_base::interner::Interner;
use orchid_base::logging::Logger;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::reqnot::{Receipt, ReqNot}; use orchid_base::reqnot::{Receipt, Requester};
use crate::api; use crate::api;
use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId, AtomicFeatures, ForeignAtom, TypAtom, get_info}; use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId, AtomicFeatures, ForeignAtom, TAtom, get_info};
use crate::context::ctx;
use crate::coroutine_exec::Replier; use crate::coroutine_exec::Replier;
use crate::entrypoint::ExtReq; use crate::entrypoint::ExtReq;
use crate::func_atom::{Fun, Lambda}; use crate::func_atom::{Fun, Lambda};
@@ -32,7 +28,7 @@ pub trait SystemCard: Default + Send + Sync + 'static {
fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>>; fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>>;
} }
pub trait DynSystemCard: Send + Sync + 'static { pub trait DynSystemCard: Send + Sync + Any + 'static {
fn name(&self) -> &'static str; fn name(&self) -> &'static str;
/// Atoms explicitly defined by the system card. Do not rely on this for /// Atoms explicitly defined by the system card. Do not rely on this for
/// querying atoms as it doesn't include the general atom types /// querying atoms as it doesn't include the general atom types
@@ -84,16 +80,16 @@ impl<T: SystemCard> DynSystemCard for T {
/// System as defined by author /// System as defined by author
pub trait System: Send + Sync + SystemCard + 'static { pub trait System: Send + Sync + SystemCard + 'static {
fn prelude(i: &Interner) -> impl Future<Output = Vec<Sym>>; fn prelude() -> impl Future<Output = Vec<Sym>>;
fn env() -> Vec<GenMember>; fn env() -> impl Future<Output = Vec<GenMember>>;
fn lexers() -> Vec<LexerObj>; fn lexers() -> Vec<LexerObj>;
fn parsers() -> Vec<ParserObj>; fn parsers() -> Vec<ParserObj>;
fn request(hand: ExtReq<'_>, req: Self::Req) -> impl Future<Output = Receipt<'_>>; fn request(hand: ExtReq<'_>, req: Self::Req) -> impl Future<Output = Receipt<'_>>;
} }
pub trait DynSystem: Send + Sync + DynSystemCard + 'static { pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
fn dyn_prelude<'a>(&'a self, i: &'a Interner) -> LocalBoxFuture<'a, Vec<Sym>>; fn dyn_prelude(&self) -> LocalBoxFuture<'_, Vec<Sym>>;
fn dyn_env(&'_ self) -> Vec<GenMember>; fn dyn_env(&self) -> LocalBoxFuture<'_, Vec<GenMember>>;
fn dyn_lexers(&self) -> Vec<LexerObj>; fn dyn_lexers(&self) -> Vec<LexerObj>;
fn dyn_parsers(&self) -> Vec<ParserObj>; fn dyn_parsers(&self) -> Vec<ParserObj>;
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>>; fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>>;
@@ -101,10 +97,8 @@ pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
} }
impl<T: System> DynSystem for T { impl<T: System> DynSystem for T {
fn dyn_prelude<'a>(&'a self, i: &'a Interner) -> LocalBoxFuture<'a, Vec<Sym>> { fn dyn_prelude(&self) -> LocalBoxFuture<'_, Vec<Sym>> { Box::pin(Self::prelude()) }
Box::pin(Self::prelude(i)) fn dyn_env(&self) -> LocalBoxFuture<'_, Vec<GenMember>> { Self::env().boxed_local() }
}
fn dyn_env(&'_ self) -> Vec<GenMember> { Self::env() }
fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() } fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() }
fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() } fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() }
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>> { fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>> {
@@ -115,10 +109,10 @@ impl<T: System> DynSystem for T {
fn card(&self) -> &dyn DynSystemCard { self } fn card(&self) -> &dyn DynSystemCard { self }
} }
pub async fn downcast_atom<A>(foreign: ForeignAtom) -> Result<TypAtom<A>, ForeignAtom> pub async fn downcast_atom<A>(foreign: ForeignAtom) -> Result<TAtom<A>, ForeignAtom>
where A: AtomicFeatures { where A: AtomicFeatures {
let mut data = &foreign.atom.data.0[..]; let mut data = &foreign.atom.data.0[..];
let ctx = foreign.ctx().clone(); let ctx = ctx();
let value = AtomTypeId::decode(Pin::new(&mut data)).await; let value = AtomTypeId::decode(Pin::new(&mut data)).await;
let own_inst = ctx.get::<CtedObj>().inst(); let own_inst = ctx.get::<CtedObj>().inst();
let owner = if *ctx.get::<api::SysId>() == foreign.atom.owner { let owner = if *ctx.get::<api::SysId>() == foreign.atom.owner {
@@ -128,74 +122,30 @@ where A: AtomicFeatures {
.ok_or_else(|| foreign.clone())? .ok_or_else(|| foreign.clone())?
.get_card() .get_card()
}; };
if owner.atoms().flatten().all(|dynfo| dynfo.tid() != TypeId::of::<A>()) {
return Err(foreign);
}
let (typ_id, dynfo) = get_info::<A>(owner); let (typ_id, dynfo) = get_info::<A>(owner);
if value != typ_id { if value != typ_id {
return Err(foreign); return Err(foreign);
} }
let val = dynfo.decode(AtomCtx(data, foreign.atom.drop, ctx)).await; let val = dynfo.decode(AtomCtx(data, foreign.atom.drop)).await;
let value = *val.downcast::<A::Data>().expect("atom decode returned wrong type"); let value = *val.downcast::<A::Data>().expect("atom decode returned wrong type");
Ok(TypAtom { value, untyped: foreign }) Ok(TAtom { value, untyped: foreign })
} }
#[derive(Clone)] pub async fn dep_req<Sys: SystemCard, Req: Request + Into<Sys::Req>>(req: Req) -> Req::Response {
pub struct WeakSysCtx(Weak<MemoMap<TypeId, Box<dyn Any>>>); let ctx = ctx();
impl WeakSysCtx { let mut msg = Vec::new();
pub fn upgrade(&self) -> Option<SysCtx> { Some(SysCtx(self.0.upgrade()?)) } req.into().encode(std::pin::pin!(&mut msg)).await;
let own_inst = ctx.get::<CtedObj>().inst();
let owner = if own_inst.card().type_id() == TypeId::of::<Sys>() {
ctx.sys_id()
} else {
(ctx.get::<CtedObj>().deps().find(|s| s.get_card().type_id() == TypeId::of::<Sys>()))
.expect("System not in dependency array")
.id()
};
let reply = ctx.reqnot().request(api::SysFwd(owner, msg)).await;
Req::Response::decode(std::pin::pin!(&reply[..])).await
} }
#[derive(Clone)]
pub struct SysCtx(Rc<MemoMap<TypeId, Box<dyn Any>>>);
impl SysCtx {
pub fn new(
id: api::SysId,
i: Interner,
reqnot: ReqNot<api::ExtMsgSet>,
spawner: Spawner,
logger: Logger,
cted: CtedObj,
) -> Self {
let this = Self(Rc::new(MemoMap::new()));
this.add(id).add(i).add(reqnot).add(spawner).add(logger).add(cted);
this
}
pub fn downgrade(&self) -> WeakSysCtx { WeakSysCtx(Rc::downgrade(&self.0)) }
pub fn add<T: SysCtxEntry>(&self, t: T) -> &Self {
assert!(self.0.insert(TypeId::of::<T>(), Box::new(t)), "Key already exists");
self
}
pub fn get_or_insert<T: SysCtxEntry>(&self, f: impl FnOnce() -> T) -> &T {
(self.0.get_or_insert_owned(TypeId::of::<T>(), || Box::new(f())).downcast_ref())
.expect("Keyed by TypeId")
}
pub fn get_or_default<T: SysCtxEntry + Default>(&self) -> &T { self.get_or_insert(T::default) }
pub fn try_get<T: SysCtxEntry>(&self) -> Option<&T> {
Some(self.0.get(&TypeId::of::<T>())?.downcast_ref().expect("Keyed by TypeId"))
}
pub fn get<T: SysCtxEntry>(&self) -> &T {
self.try_get().unwrap_or_else(|| panic!("Context {} missing", type_name::<T>()))
}
/// Shorthand to get the [Interner] instance
pub fn i(&self) -> &Interner { self.get::<Interner>() }
/// Shorthand to get the messaging link
pub fn reqnot(&self) -> &ReqNot<api::ExtMsgSet> { self.get::<ReqNot<api::ExtMsgSet>>() }
/// Shorthand to get the system ID
pub fn sys_id(&self) -> api::SysId { *self.get::<api::SysId>() }
/// Shorthand to get the task spawner callback
pub fn spawner(&self) -> &Spawner { self.get::<Spawner>() }
/// Shorthand to get the logger
pub fn logger(&self) -> &Logger { self.get::<Logger>() }
/// Shorthand to get the constructed system object
pub fn cted(&self) -> &CtedObj { self.get::<CtedObj>() }
}
impl fmt::Debug for SysCtx {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "SysCtx({:?})", self.sys_id())
}
}
pub trait SysCtxEntry: 'static + Sized {}
impl SysCtxEntry for api::SysId {}
impl SysCtxEntry for ReqNot<api::ExtMsgSet> {}
impl SysCtxEntry for Spawner {}
impl SysCtxEntry for CtedObj {}
impl SysCtxEntry for Logger {}
impl SysCtxEntry for Interner {}

View File

@@ -62,6 +62,8 @@ pub trait SystemCtor: Send + Sync + 'static {
type Instance: System; type Instance: System;
const NAME: &'static str; const NAME: &'static str;
const VERSION: f64; const VERSION: f64;
/// Create a system instance. When this function is called, a context object
/// isn't yet available
fn inst(deps: <Self::Deps as DepDef>::Sat) -> Self::Instance; fn inst(deps: <Self::Deps as DepDef>::Sat) -> Self::Instance;
} }

View File

@@ -14,19 +14,19 @@ use substack::Substack;
use trait_set::trait_set; use trait_set::trait_set;
use crate::api; use crate::api;
use crate::context::i;
use crate::conv::ToExpr; use crate::conv::ToExpr;
use crate::entrypoint::MemberRecord; use crate::entrypoint::MemberRecord;
use crate::expr::{BorrowedExprStore, Expr, ExprHandle}; use crate::expr::{BorrowedExprStore, Expr, ExprHandle};
use crate::func_atom::{ExprFunc, Fun}; use crate::func_atom::{ExprFunc, Fun};
use crate::gen_expr::{GExpr, sym_ref}; use crate::gen_expr::{GExpr, sym_ref};
use crate::system::SysCtx;
pub type GenTokTree = TokTree<Expr, GExpr>; pub type GenTokTree = TokTree<Expr, GExpr>;
pub type GenTok = Token<Expr, GExpr>; pub type GenTok = Token<Expr, GExpr>;
impl TokenVariant<api::Expression> for GExpr { impl TokenVariant<api::Expression> for GExpr {
type FromApiCtx<'a> = (); type FromApiCtx<'a> = ();
type ToApiCtx<'a> = SysCtx; type ToApiCtx<'a> = ();
async fn from_api( async fn from_api(
_: &api::Expression, _: &api::Expression,
_: &mut Self::FromApiCtx<'_>, _: &mut Self::FromApiCtx<'_>,
@@ -35,33 +35,31 @@ impl TokenVariant<api::Expression> for GExpr {
) -> Self { ) -> Self {
panic!("Received new expression from host") panic!("Received new expression from host")
} }
async fn into_api(self, ctx: &mut Self::ToApiCtx<'_>) -> api::Expression { async fn into_api(self, _: &mut Self::ToApiCtx<'_>) -> api::Expression { self.serialize().await }
self.api_return(ctx.clone()).await
}
} }
impl TokenVariant<api::ExprTicket> for Expr { impl TokenVariant<api::ExprTicket> for Expr {
type FromApiCtx<'a> = (SysCtx, &'a BorrowedExprStore); type FromApiCtx<'a> = &'a BorrowedExprStore;
async fn from_api( async fn from_api(
api: &api::ExprTicket, api: &api::ExprTicket,
(ctx, exprs): &mut Self::FromApiCtx<'_>, exprs: &mut Self::FromApiCtx<'_>,
_: SrcRange, _: SrcRange,
_: &Interner, _: &Interner,
) -> Self { ) -> Self {
// SAFETY: receiving trees from sublexers implies borrowing // SAFETY: receiving trees from sublexers implies borrowing
Expr::from_handle(ExprHandle::borrowed(ctx.clone(), *api, exprs)) Expr::from_handle(ExprHandle::borrowed(*api, exprs))
} }
type ToApiCtx<'a> = (); type ToApiCtx<'a> = ();
async fn into_api(self, (): &mut Self::ToApiCtx<'_>) -> api::ExprTicket { self.handle().tk } async fn into_api(self, (): &mut Self::ToApiCtx<'_>) -> api::ExprTicket { self.handle().ticket() }
} }
pub async fn x_tok(x: impl ToExpr) -> GenTok { GenTok::NewExpr(x.to_expr().await) } pub async fn x_tok(x: impl ToExpr) -> GenTok { GenTok::NewExpr(x.to_gen().await) }
pub async fn ref_tok(path: Sym) -> GenTok { GenTok::NewExpr(sym_ref(path)) } pub async fn ref_tok(path: Sym) -> GenTok { GenTok::NewExpr(sym_ref(path)) }
pub fn lazy( pub fn lazy(
public: bool, public: bool,
name: &str, name: &str,
cb: impl AsyncFnOnce(Sym, SysCtx) -> MemKind + Clone + 'static, cb: impl AsyncFnOnce(Sym) -> MemKind + Clone + 'static,
) -> Vec<GenMember> { ) -> Vec<GenMember> {
vec![GenMember { vec![GenMember {
name: name.to_string(), name: name.to_string(),
@@ -71,7 +69,7 @@ pub fn lazy(
}] }]
} }
pub fn cnst(public: bool, name: &str, value: impl ToExpr + Clone + 'static) -> Vec<GenMember> { pub fn cnst(public: bool, name: &str, value: impl ToExpr + Clone + 'static) -> Vec<GenMember> {
lazy(public, name, async |_, _| MemKind::Const(value.to_expr().await)) lazy(public, name, async |_| MemKind::Const(value.to_gen().await))
} }
pub fn module( pub fn module(
public: bool, public: bool,
@@ -86,8 +84,8 @@ pub fn root_mod(name: &str, mems: impl IntoIterator<Item = Vec<GenMember>>) -> (
(name.to_string(), kind) (name.to_string(), kind)
} }
pub fn fun<I, O>(public: bool, name: &str, xf: impl ExprFunc<I, O>) -> Vec<GenMember> { pub fn fun<I, O>(public: bool, name: &str, xf: impl ExprFunc<I, O>) -> Vec<GenMember> {
let fac = LazyMemberFactory::new(async move |sym, ctx| { let fac = LazyMemberFactory::new(async move |sym| {
MemKind::Const(Fun::new(sym, ctx, xf).await.to_expr().await) MemKind::Const(Fun::new(sym, xf).await.to_gen().await)
}); });
vec![GenMember { name: name.to_string(), kind: MemKind::Lazy(fac), public, comments: vec![] }] vec![GenMember { name: name.to_string(), kind: MemKind::Lazy(fac), public, comments: vec![] }]
} }
@@ -149,14 +147,14 @@ pub fn merge_trivial(trees: impl IntoIterator<Item = Vec<GenMember>>) -> Vec<Gen
trait_set! { trait_set! {
trait LazyMemberCallback = trait LazyMemberCallback =
FnOnce(Sym, SysCtx) -> LocalBoxFuture<'static, MemKind> + DynClone FnOnce(Sym) -> LocalBoxFuture<'static, MemKind> + DynClone
} }
pub struct LazyMemberFactory(Box<dyn LazyMemberCallback>); pub struct LazyMemberFactory(Box<dyn LazyMemberCallback>);
impl LazyMemberFactory { impl LazyMemberFactory {
pub fn new(cb: impl AsyncFnOnce(Sym, SysCtx) -> MemKind + Clone + 'static) -> Self { pub fn new(cb: impl AsyncFnOnce(Sym) -> MemKind + Clone + 'static) -> Self {
Self(Box::new(|s, ctx| cb(s, ctx).boxed_local())) Self(Box::new(|s| cb(s).boxed_local()))
} }
pub async fn build(self, path: Sym, ctx: SysCtx) -> MemKind { (self.0)(path, ctx).await } pub async fn build(self, path: Sym) -> MemKind { (self.0)(path).await }
} }
impl Clone for LazyMemberFactory { impl Clone for LazyMemberFactory {
fn clone(&self) -> Self { Self(clone_box(&*self.0)) } fn clone(&self) -> Self { Self(clone_box(&*self.0)) }
@@ -169,11 +167,10 @@ pub struct GenMember {
pub comments: Vec<String>, pub comments: Vec<String>,
} }
impl GenMember { impl GenMember {
pub async fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::Member { pub async fn into_api(self, tia_cx: &mut impl TreeIntoApiCtx) -> api::Member {
let name = ctx.sys().i().i::<String>(&self.name).await; let name = i().i::<String>(&self.name).await;
let kind = self.kind.into_api(&mut ctx.push_path(name.clone())).await; let kind = self.kind.into_api(&mut tia_cx.push_path(name.clone())).await;
let comments = let comments = join_all(self.comments.iter().map(async |cmt| i().i(cmt).await.to_api())).await;
join_all(self.comments.iter().map(|cmt| async { ctx.sys().i().i(cmt).await.to_api() })).await;
api::Member { kind, name: name.to_api(), comments, exported: self.public } api::Member { kind, name: name.to_api(), comments, exported: self.public }
} }
} }
@@ -187,7 +184,7 @@ impl MemKind {
pub async fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::MemberKind { pub async fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::MemberKind {
match self { match self {
Self::Lazy(lazy) => api::MemberKind::Lazy(ctx.with_lazy(lazy)), Self::Lazy(lazy) => api::MemberKind::Lazy(ctx.with_lazy(lazy)),
Self::Const(c) => api::MemberKind::Const(c.api_return(ctx.sys()).await), Self::Const(c) => api::MemberKind::Const(c.serialize().await),
Self::Mod { members } => api::MemberKind::Module(api::Module { Self::Mod { members } => api::MemberKind::Module(api::Module {
members: stream(async |mut cx| { members: stream(async |mut cx| {
for m in members { for m in members {
@@ -203,24 +200,20 @@ impl MemKind {
} }
pub trait TreeIntoApiCtx { pub trait TreeIntoApiCtx {
fn sys(&self) -> SysCtx;
fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId; fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId;
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx; fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx;
} }
pub struct TreeIntoApiCtxImpl<'a, 'b> { pub struct TreeIntoApiCtxImpl<'a, 'b> {
pub sys: SysCtx,
pub basepath: &'a [Tok<String>], pub basepath: &'a [Tok<String>],
pub path: Substack<'a, Tok<String>>, pub path: Substack<'a, Tok<String>>,
pub lazy_members: &'b mut HashMap<api::TreeId, MemberRecord>, pub lazy_members: &'b mut HashMap<api::TreeId, MemberRecord>,
} }
impl TreeIntoApiCtx for TreeIntoApiCtxImpl<'_, '_> { impl TreeIntoApiCtx for TreeIntoApiCtxImpl<'_, '_> {
fn sys(&self) -> SysCtx { self.sys.clone() }
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx { fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx {
TreeIntoApiCtxImpl { TreeIntoApiCtxImpl {
lazy_members: self.lazy_members, lazy_members: self.lazy_members,
sys: self.sys.clone(),
basepath: self.basepath, basepath: self.basepath,
path: self.path.push(seg), path: self.path.push(seg),
} }

View File

@@ -7,12 +7,12 @@ edition = "2024"
[dependencies] [dependencies]
async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" } async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
async-lock = "3.4.1"
async-once-cell = "0.5.4" async-once-cell = "0.5.4"
async-process = "2.4.0" async-process = "2.4.0"
bound = "0.6.0" bound = "0.6.0"
derive_destructure = "1.0.0" derive_destructure = "1.0.0"
futures = { version = "0.3.31", features = ["std"], default-features = false } futures = { version = "0.3.31", features = ["std"], default-features = false }
futures-locks = "0.7.1"
hashbrown = "0.16.0" hashbrown = "0.16.0"
itertools = "0.14.0" itertools = "0.14.0"
lazy_static = "1.5.0" lazy_static = "1.5.0"

View File

@@ -1,7 +1,7 @@
use std::fmt; use std::fmt;
use std::rc::{Rc, Weak}; use std::rc::{Rc, Weak};
use async_lock::OnceCell; use async_once_cell::OnceCell;
use derive_destructure::destructure; use derive_destructure::destructure;
use orchid_base::format::{FmtCtx, FmtUnit, Format, take_first_fmt}; use orchid_base::format::{FmtCtx, FmtUnit, Format, take_first_fmt};
use orchid_base::location::Pos; use orchid_base::location::Pos;
@@ -10,7 +10,7 @@ use orchid_base::tree::AtomRepr;
use crate::api; use crate::api;
use crate::ctx::Ctx; use crate::ctx::Ctx;
use crate::expr::{Expr, ExprParseCtx, PathSetBuilder}; use crate::expr::{Expr, PathSetBuilder};
use crate::extension::Extension; use crate::extension::Extension;
use crate::system::System; use crate::system::System;
@@ -58,15 +58,15 @@ impl AtomHand {
#[must_use] #[must_use]
pub async fn call(self, arg: Expr) -> Expr { pub async fn call(self, arg: Expr) -> Expr {
let owner_sys = self.0.owner.clone(); let owner_sys = self.0.owner.clone();
let ctx = owner_sys.ctx();
let reqnot = owner_sys.reqnot(); let reqnot = owner_sys.reqnot();
owner_sys.ext().exprs().give_expr(arg.clone()); ctx.exprs.give_expr(arg.clone());
let ret = match Rc::try_unwrap(self.0) { let ret = match Rc::try_unwrap(self.0) {
Ok(data) => reqnot.request(api::FinalCall(data.api(), arg.id())).await, Ok(data) => reqnot.request(api::FinalCall(data.api(), arg.id())).await,
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), arg.id())).await, Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), arg.id())).await,
}; };
let mut parse_ctx = ExprParseCtx { ctx: owner_sys.ctx(), exprs: owner_sys.ext().exprs() }; let val = Expr::from_api(&ret, PathSetBuilder::new(), ctx.clone()).await;
let val = Expr::from_api(&ret, PathSetBuilder::new(), &mut parse_ctx).await; ctx.exprs.take_expr(arg.id());
owner_sys.ext().exprs().take_expr(arg.id());
val val
} }
#[must_use] #[must_use]
@@ -85,7 +85,7 @@ impl AtomHand {
} }
impl Format for AtomHand { impl Format for AtomHand {
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
(self.0.display.get_or_init(|| async { (self.0.display.get_or_init(async {
FmtUnit::from_api(&self.0.owner.reqnot().request(api::AtomPrint(self.0.api_ref())).await) FmtUnit::from_api(&self.0.owner.reqnot().request(api::AtomPrint(self.0.api_ref())).await)
})) }))
.await .await

View File

@@ -3,7 +3,7 @@ use std::num::{NonZero, NonZeroU16};
use std::rc::{Rc, Weak}; use std::rc::{Rc, Weak};
use std::{fmt, ops}; use std::{fmt, ops};
use async_lock::RwLock; use futures_locks::RwLock;
use hashbrown::HashMap; use hashbrown::HashMap;
use orchid_base::builtin::Spawner; use orchid_base::builtin::Spawner;
use orchid_base::interner::Interner; use orchid_base::interner::Interner;
@@ -18,7 +18,7 @@ pub struct CtxData {
pub spawn: Spawner, pub spawn: Spawner,
pub systems: RwLock<HashMap<api::SysId, WeakSystem>>, pub systems: RwLock<HashMap<api::SysId, WeakSystem>>,
pub system_id: RefCell<NonZeroU16>, pub system_id: RefCell<NonZeroU16>,
pub common_exprs: ExprStore, pub exprs: ExprStore,
pub root: RwLock<WeakRoot>, pub root: RwLock<WeakRoot>,
} }
#[derive(Clone)] #[derive(Clone)]
@@ -43,7 +43,7 @@ impl Ctx {
i: Interner::default(), i: Interner::default(),
systems: RwLock::default(), systems: RwLock::default(),
system_id: RefCell::new(NonZero::new(1).unwrap()), system_id: RefCell::new(NonZero::new(1).unwrap()),
common_exprs: ExprStore::default(), exprs: ExprStore::default(),
root: RwLock::default(), root: RwLock::default(),
})) }))
} }

View File

@@ -48,13 +48,15 @@ pub async fn absolute_path(
) -> Result<VName, AbsPathError> { ) -> Result<VName, AbsPathError> {
let i_self = i.i("self").await; let i_self = i.i("self").await;
let i_super = i.i("super").await; let i_super = i.i("super").await;
let relative = rel.first().is_some_and(|s| *s != i_self && *s != i_super); let mut relative = false;
if let Some((_, tail)) = rel.split_first().filter(|(h, _)| **h != i_self) { if let Some((_, tail)) = rel.split_first().filter(|(h, _)| **h == i_self) {
rel = tail; rel = tail;
relative = true;
} else { } else {
while let Some((_, tail)) = rel.split_first().filter(|(h, _)| **h == i_super) { while let Some((_, tail)) = rel.split_first().filter(|(h, _)| **h == i_super) {
cwd = cwd.split_last().ok_or(AbsPathError::TooManySupers)?.1; cwd = cwd.split_last().ok_or(AbsPathError::TooManySupers)?.1;
rel = tail; rel = tail;
relative = true;
} }
} }
if relative { VName::new(cwd.iter().chain(rel).cloned()) } else { VName::new(rel.to_vec()) } if relative { VName::new(cwd.iter().chain(rel).cloned()) } else { VName::new(rel.to_vec()) }

View File

@@ -1,8 +1,8 @@
use std::mem; use std::mem;
use async_lock::RwLockWriteGuard;
use bound::Bound; use bound::Bound;
use futures::FutureExt; use futures::FutureExt;
use futures_locks::{RwLockWriteGuard, TryLockError};
use orchid_base::error::OrcErrv; use orchid_base::error::OrcErrv;
use orchid_base::format::{FmtCtxImpl, Format, take_first}; use orchid_base::format::{FmtCtxImpl, Format, take_first};
use orchid_base::location::Pos; use orchid_base::location::Pos;
@@ -12,7 +12,7 @@ use crate::ctx::Ctx;
use crate::expr::{Expr, ExprKind, PathSet, Step}; use crate::expr::{Expr, ExprKind, PathSet, Step};
use crate::tree::Root; use crate::tree::Root;
type ExprGuard = Bound<RwLockWriteGuard<'static, ExprKind>, Expr>; type ExprGuard = Bound<RwLockWriteGuard<ExprKind>, Expr>;
/// The stack operation associated with a transform /// The stack operation associated with a transform
enum StackOp { enum StackOp {
@@ -76,13 +76,13 @@ impl ExecCtx {
#[must_use] #[must_use]
pub async fn unpack_ident(&self, ex: &Expr) -> Expr { pub async fn unpack_ident(&self, ex: &Expr) -> Expr {
match ex.kind().try_write().as_deref_mut() { match ex.kind().try_write().as_deref_mut() {
Some(ExprKind::Identity(ex)) => { Ok(ExprKind::Identity(ex)) => {
let val = self.unpack_ident(ex).boxed_local().await; let val = self.unpack_ident(ex).boxed_local().await;
*ex = val.clone(); *ex = val.clone();
val val
}, },
Some(_) => ex.clone(), Ok(_) => ex.clone(),
None => panic!("Cycle encountered!"), Err(TryLockError) => panic!("Cycle encountered!"),
} }
} }
pub async fn execute(&mut self) { pub async fn execute(&mut self) {

View File

@@ -4,8 +4,8 @@ use std::num::NonZeroU64;
use std::rc::{Rc, Weak}; use std::rc::{Rc, Weak};
use std::{fmt, mem}; use std::{fmt, mem};
use async_lock::RwLock;
use futures::FutureExt; use futures::FutureExt;
use futures_locks::RwLock;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::OrcErrv; use orchid_base::error::OrcErrv;
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants}; use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
@@ -21,12 +21,6 @@ use crate::atom::AtomHand;
use crate::ctx::Ctx; use crate::ctx::Ctx;
use crate::expr_store::ExprStore; use crate::expr_store::ExprStore;
#[derive(Clone)]
pub struct ExprParseCtx<'a> {
pub ctx: &'a Ctx,
pub exprs: &'a ExprStore,
}
#[derive(Debug)] #[derive(Debug)]
pub struct ExprData { pub struct ExprData {
pos: Pos, pos: Pos,
@@ -41,9 +35,9 @@ impl Expr {
pub async fn try_into_owned_atom(self) -> Result<AtomHand, Self> { pub async fn try_into_owned_atom(self) -> Result<AtomHand, Self> {
match Rc::try_unwrap(self.0) { match Rc::try_unwrap(self.0) {
Err(e) => Err(Self(e)), Err(e) => Err(Self(e)),
Ok(data) => match data.kind.into_inner() { Ok(data) => match data.kind.try_unwrap().expect("This fields shouldn't be copied") {
ExprKind::Atom(a) => Ok(a), ExprKind::Atom(a) => Ok(a),
inner => Err(Self(Rc::new(ExprData { kind: inner.into(), pos: data.pos }))), inner => Err(Self(Rc::new(ExprData { kind: RwLock::new(inner), pos: data.pos }))),
}, },
} }
} }
@@ -61,42 +55,34 @@ impl Expr {
) )
} }
#[must_use] #[must_use]
pub async fn from_api( pub async fn from_api(api: &api::Expression, psb: PathSetBuilder<'_, u64>, ctx: Ctx) -> Self {
api: &api::Expression, let pos = Pos::from_api(&api.location, &ctx.i).await;
psb: PathSetBuilder<'_, u64>,
ctx: &mut ExprParseCtx<'_>,
) -> Self {
let pos = Pos::from_api(&api.location, &ctx.ctx.i).await;
let kind = match &api.kind { let kind = match &api.kind {
api::ExpressionKind::Arg(n) => { api::ExpressionKind::Arg(n) => {
assert!(psb.register_arg(n), "Arguments must be enclosed in a matching lambda"); assert!(psb.register_arg(n), "Arguments must be enclosed in a matching lambda");
ExprKind::Arg ExprKind::Arg
}, },
api::ExpressionKind::Bottom(bot) => api::ExpressionKind::Bottom(bot) => ExprKind::Bottom(OrcErrv::from_api(bot, &ctx.i).await),
ExprKind::Bottom(OrcErrv::from_api(bot, &ctx.ctx.i).await),
api::ExpressionKind::Call(f, x) => { api::ExpressionKind::Call(f, x) => {
let (lpsb, rpsb) = psb.split(); let (lpsb, rpsb) = psb.split();
ExprKind::Call( ExprKind::Call(
Expr::from_api(f, lpsb, ctx).boxed_local().await, Expr::from_api(f, lpsb, ctx.clone()).boxed_local().await,
Expr::from_api(x, rpsb, ctx).boxed_local().await, Expr::from_api(x, rpsb, ctx).boxed_local().await,
) )
}, },
api::ExpressionKind::Const(name) => ExprKind::Const(Sym::from_api(*name, &ctx.ctx.i).await), api::ExpressionKind::Const(name) => ExprKind::Const(Sym::from_api(*name, &ctx.i).await),
api::ExpressionKind::Lambda(x, body) => { api::ExpressionKind::Lambda(x, body) => {
let lbuilder = psb.lambda(x); let lbuilder = psb.lambda(x);
let body = Expr::from_api(body, lbuilder.stack(), ctx).boxed_local().await; let body = Expr::from_api(body, lbuilder.stack(), ctx).boxed_local().await;
ExprKind::Lambda(lbuilder.collect(), body) ExprKind::Lambda(lbuilder.collect(), body)
}, },
api::ExpressionKind::NewAtom(a) => api::ExpressionKind::NewAtom(a) =>
ExprKind::Atom(AtomHand::from_api(a, pos.clone(), &mut ctx.ctx.clone()).await), ExprKind::Atom(AtomHand::from_api(a, pos.clone(), &mut ctx.clone()).await),
api::ExpressionKind::Slot { tk, by_value: false } => api::ExpressionKind::Slot(tk) => return ctx.exprs.take_expr(*tk).expect("Invalid slot"),
return ctx.exprs.get_expr(*tk).expect("Invalid slot"),
api::ExpressionKind::Slot { tk, by_value: true } =>
return ctx.exprs.take_expr(*tk).expect("Invalid slot"),
api::ExpressionKind::Seq(a, b) => { api::ExpressionKind::Seq(a, b) => {
let (apsb, bpsb) = psb.split(); let (apsb, bpsb) = psb.split();
ExprKind::Seq( ExprKind::Seq(
Expr::from_api(a, apsb, ctx).boxed_local().await, Expr::from_api(a, apsb, ctx.clone()).boxed_local().await,
Expr::from_api(b, bpsb, ctx).boxed_local().await, Expr::from_api(b, bpsb, ctx).boxed_local().await,
) )
}, },
@@ -169,8 +155,8 @@ async fn print_exprkind<'a>(
ExprKind::Bottom(e) if e.len() == 1 => format!("Bottom({e})").into(), ExprKind::Bottom(e) if e.len() == 1 => format!("Bottom({e})").into(),
ExprKind::Bottom(e) => format!("Bottom(\n\t{}\n)", indent(&e.to_string())).into(), ExprKind::Bottom(e) => format!("Bottom(\n\t{}\n)", indent(&e.to_string())).into(),
ExprKind::Call(f, x) => tl_cache!(Rc<Variants>: Rc::new(Variants::default() ExprKind::Call(f, x) => tl_cache!(Rc<Variants>: Rc::new(Variants::default()
.unbounded("{0} {1l}") .unbounded("{0b} {1l}")
.bounded("({0} {1b})"))) .bounded("({0b} {1})")))
.units([print_expr(f, c, visited).await, print_expr(x, c, visited).await]), .units([print_expr(f, c, visited).await, print_expr(x, c, visited).await]),
ExprKind::Identity(id) => ExprKind::Identity(id) =>
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{{{0}}}"))).units([print_expr( tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{{{0}}}"))).units([print_expr(
@@ -180,11 +166,11 @@ async fn print_exprkind<'a>(
.await]), .await]),
ExprKind::Const(c) => format!("{c}").into(), ExprKind::Const(c) => format!("{c}").into(),
ExprKind::Lambda(None, body) => tl_cache!(Rc<Variants>: Rc::new(Variants::default() ExprKind::Lambda(None, body) => tl_cache!(Rc<Variants>: Rc::new(Variants::default()
.unbounded("\\.{0l}") // .unbounded("\\.{0l}")
.bounded("(\\.{0b})"))) .bounded("(\\.{0b})")))
.units([print_expr(body, c, visited).await]), .units([print_expr(body, c, visited).await]),
ExprKind::Lambda(Some(path), body) => tl_cache!(Rc<Variants>: Rc::new(Variants::default() ExprKind::Lambda(Some(path), body) => tl_cache!(Rc<Variants>: Rc::new(Variants::default()
.unbounded("\\{0b}. {1l}") // .unbounded("\\{0b}. {1l}")
.bounded("(\\{0b}. {1b})"))) .bounded("(\\{0b}. {1b})")))
.units([format!("{path}").into(), print_expr(body, c, visited).await]), .units([format!("{path}").into(), print_expr(body, c, visited).await]),
ExprKind::Seq(l, r) => ExprKind::Seq(l, r) =>
@@ -361,14 +347,14 @@ impl TokenVariant<api::ExprTicket> for Expr {
pub struct ExprWillPanic; pub struct ExprWillPanic;
impl TokenVariant<api::Expression> for Expr { impl TokenVariant<api::Expression> for Expr {
type FromApiCtx<'a> = ExprParseCtx<'a>; type FromApiCtx<'a> = Ctx;
async fn from_api( async fn from_api(
api: &api::Expression, api: &api::Expression,
ctx: &mut Self::FromApiCtx<'_>, ctx: &mut Self::FromApiCtx<'_>,
_: SrcRange, _: SrcRange,
_: &Interner, _: &Interner,
) -> Self { ) -> Self {
Self::from_api(api, PathSetBuilder::new(), ctx).await Self::from_api(api, PathSetBuilder::new(), ctx.clone()).await
} }
type ToApiCtx<'a> = ExprWillPanic; type ToApiCtx<'a> = ExprWillPanic;
async fn into_api(self, ExprWillPanic: &mut Self::ToApiCtx<'_>) -> api::Expression { async fn into_api(self, ExprWillPanic: &mut Self::ToApiCtx<'_>) -> api::Expression {

View File

@@ -13,7 +13,6 @@ use crate::expr::Expr;
pub struct ExprStoreData { pub struct ExprStoreData {
exprs: RefCell<HashMap<api::ExprTicket, (u32, Expr)>>, exprs: RefCell<HashMap<api::ExprTicket, (u32, Expr)>>,
parent: Option<ExprStore>, parent: Option<ExprStore>,
tracking_parent: bool,
} }
#[derive(Clone, Default)] #[derive(Clone, Default)]
pub struct ExprStore(Rc<ExprStoreData>); pub struct ExprStore(Rc<ExprStoreData>);
@@ -25,16 +24,12 @@ impl ExprStore {
/// but operations on the parent can access the child exprs too until this /// but operations on the parent can access the child exprs too until this
/// store is dropped. /// store is dropped.
#[must_use] #[must_use]
pub fn derive(&self, tracking_parent: bool) -> Self { pub fn derive(&self) -> Self {
Self(Rc::new(ExprStoreData { Self(Rc::new(ExprStoreData { exprs: RefCell::default(), parent: Some(self.clone()) }))
exprs: RefCell::default(),
parent: Some(self.clone()),
tracking_parent,
}))
} }
pub fn give_expr(&self, expr: Expr) { pub fn give_expr(&self, expr: Expr) {
if self.0.tracking_parent { if let Some(parent) = self.0.parent.as_ref() {
self.0.parent.as_ref().unwrap().give_expr(expr.clone()); parent.give_expr(expr.clone())
} }
match self.0.exprs.borrow_mut().entry(expr.id()) { match self.0.exprs.borrow_mut().entry(expr.id()) {
Entry::Occupied(mut oe) => oe.get_mut().0 += 1, Entry::Occupied(mut oe) => oe.get_mut().0 += 1,
@@ -44,8 +39,8 @@ impl ExprStore {
} }
} }
pub fn take_expr(&self, ticket: api::ExprTicket) -> Option<Expr> { pub fn take_expr(&self, ticket: api::ExprTicket) -> Option<Expr> {
if self.0.tracking_parent { if let Some(parent) = self.0.parent.as_ref() {
self.0.parent.as_ref().unwrap().take_expr(ticket); parent.take_expr(ticket);
} }
match self.0.exprs.borrow_mut().entry(ticket) { match self.0.exprs.borrow_mut().entry(ticket) {
Entry::Vacant(_) => panic!("Attempted to double-take expression"), Entry::Vacant(_) => panic!("Attempted to double-take expression"),
@@ -79,14 +74,12 @@ impl Drop for ExprStore {
if 1 < Rc::strong_count(&self.0) { if 1 < Rc::strong_count(&self.0) {
return; return;
} }
if !self.0.tracking_parent { if let Some(parent) = self.0.parent.as_ref() {
return;
}
let parent = self.0.parent.as_ref().unwrap();
for (id, (count, _)) in self.0.exprs.borrow().iter() { for (id, (count, _)) in self.0.exprs.borrow().iter() {
for _ in 0..*count { for _ in 0..*count {
parent.take_expr(*id); parent.take_expr(*id);
} }
} }
} }
}
} }

View File

@@ -28,7 +28,7 @@ use crate::api;
use crate::atom::AtomHand; use crate::atom::AtomHand;
use crate::ctx::Ctx; use crate::ctx::Ctx;
use crate::dealias::{ChildError, ChildErrorKind, walk}; use crate::dealias::{ChildError, ChildErrorKind, walk};
use crate::expr_store::ExprStore; use crate::expr::{Expr, PathSetBuilder};
use crate::system::SystemCtor; use crate::system::SystemCtor;
use crate::tree::MemberKind; use crate::tree::MemberKind;
@@ -46,7 +46,6 @@ pub struct ExtensionData {
systems: Vec<SystemCtor>, systems: Vec<SystemCtor>,
logger: Logger, logger: Logger,
next_pars: RefCell<NonZeroU64>, next_pars: RefCell<NonZeroU64>,
exprs: ExprStore,
exiting_snd: Sender<()>, exiting_snd: Sender<()>,
lex_recur: Mutex<HashMap<api::ParsId, Sender<ReqPair<api::SubLex>>>>, lex_recur: Mutex<HashMap<api::ParsId, Sender<ReqPair<api::SubLex>>>>,
} }
@@ -56,6 +55,7 @@ impl Drop for ExtensionData {
let mut exiting_snd = self.exiting_snd.clone(); let mut exiting_snd = self.exiting_snd.clone();
(self.ctx.spawn)(Box::pin(async move { (self.ctx.spawn)(Box::pin(async move {
reqnot.notify(api::HostExtNotif::Exit).await; reqnot.notify(api::HostExtNotif::Exit).await;
exiting_snd.send(()).await.unwrap() exiting_snd.send(()).await.unwrap()
})) }))
} }
@@ -90,7 +90,6 @@ impl Extension {
ExtensionData { ExtensionData {
name: init.name.clone(), name: init.name.clone(),
exiting_snd, exiting_snd,
exprs: ctx.common_exprs.derive(false),
ctx: ctx.clone(), ctx: ctx.clone(),
systems: (init.systems.iter().cloned()) systems: (init.systems.iter().cloned())
.map(|decl| SystemCtor { decl, ext: WeakExtension(weak.clone()) }) .map(|decl| SystemCtor { decl, ext: WeakExtension(weak.clone()) })
@@ -109,25 +108,15 @@ impl Extension {
} }
match notif { match notif {
api::ExtHostNotif::ExprNotif(api::ExprNotif::Acquire(acq)) => { api::ExtHostNotif::ExprNotif(api::ExprNotif::Acquire(acq)) => {
let target = this.0.exprs.get_expr(acq.1).expect("Invalid ticket"); let target = this.0.ctx.exprs.get_expr(acq.1).expect("Invalid ticket");
this.0.exprs.give_expr(target) this.0.ctx.exprs.give_expr(target)
} }
api::ExtHostNotif::ExprNotif(api::ExprNotif::Release(rel)) => { api::ExtHostNotif::ExprNotif(api::ExprNotif::Release(rel)) => {
if this.is_own_sys(rel.0).await { if this.is_own_sys(rel.0).await {
this.0.exprs.take_expr(rel.1); this.0.ctx.exprs.take_expr(rel.1);
} else { } else {
writeln!(this.reqnot().logger(), "Not our system {:?}", rel.0) writeln!(this.reqnot().logger(), "Not our system {:?}", rel.0)
} }
}
api::ExtHostNotif::ExprNotif(api::ExprNotif::Move(mov)) => {
if !this.is_own_sys(mov.dec).await {
writeln!(this.reqnot().logger(), "Not our system {:?}", mov.dec);
return;
}
let recp = this.ctx().system_inst(mov.inc).await.expect("invallid recipient sys id");
let expr = this.0.exprs.get_expr(mov.expr).expect("invalid ticket");
recp.ext().0.exprs.give_expr(expr);
this.0.exprs.take_expr(mov.expr);
}, },
api::ExtHostNotif::Log(api::Log(str)) => this.logger().log(str), api::ExtHostNotif::Log(api::Log(str)) => this.logger().log(str),
} }
@@ -178,10 +167,9 @@ impl Extension {
} }
hand.handle(&sl, &rep_out.next().await.unwrap()).await hand.handle(&sl, &rep_out.next().await.unwrap()).await
}, },
api::ExtHostReq::ExprReq(api::ExprReq::Inspect( api::ExtHostReq::ExprReq(expr_req) => match expr_req {
ins @ api::Inspect { target }, api::ExprReq::Inspect(ins @ api::Inspect { target }) => {
)) => { let expr = ctx.exprs.get_expr(target).expect("Invalid ticket");
let expr = this.exprs().get_expr(target).expect("Invalid ticket");
hand hand
.handle(&ins, &api::Inspected { .handle(&ins, &api::Inspected {
refcount: expr.strong_count() as u32, refcount: expr.strong_count() as u32,
@@ -190,6 +178,13 @@ impl Extension {
}) })
.await .await
}, },
api::ExprReq::Create(ref cre @ api::Create(ref expr)) => {
let expr = Expr::from_api(expr, PathSetBuilder::new(), ctx.clone()).await;
let expr_id = expr.id();
ctx.exprs.give_expr(expr);
hand.handle(cre, &expr_id).await
},
},
api::ExtHostReq::LsModule(ref ls @ api::LsModule(_sys, path)) => { api::ExtHostReq::LsModule(ref ls @ api::LsModule(_sys, path)) => {
let reply: <api::LsModule as Request>::Response = 'reply: { let reply: <api::LsModule as Request>::Response = 'reply: {
let path = i.ex(path).await; let path = i.ex(path).await;
@@ -264,8 +259,6 @@ impl Extension {
pub fn logger(&self) -> &Logger { &self.0.logger } pub fn logger(&self) -> &Logger { &self.0.logger }
pub fn system_ctors(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() } pub fn system_ctors(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() }
#[must_use] #[must_use]
pub fn exprs(&self) -> &ExprStore { &self.0.exprs }
#[must_use]
pub async fn is_own_sys(&self, id: api::SysId) -> bool { pub async fn is_own_sys(&self, id: api::SysId) -> bool {
let Some(sys) = self.ctx().system_inst(id).await else { let Some(sys) = self.ctx().system_inst(id).await else {
writeln!(self.logger(), "Invalid system ID {id:?}"); writeln!(self.logger(), "Invalid system ID {id:?}");

View File

@@ -13,7 +13,7 @@ use orchid_base::tree::recur;
use crate::api; use crate::api;
use crate::ctx::Ctx; use crate::ctx::Ctx;
use crate::expr::{Expr, ExprParseCtx}; use crate::expr::Expr;
use crate::expr_store::ExprStore; use crate::expr_store::ExprStore;
use crate::parsed::{ParsTok, ParsTokTree, tt_to_api}; use crate::parsed::{ParsTok, ParsTokTree, tt_to_api};
use crate::system::System; use crate::system::System;
@@ -60,14 +60,7 @@ impl<'a> LexCtx<'a> {
} }
#[must_use] #[must_use]
pub async fn des_subtree(&mut self, tree: &api::TokenTree, exprs: ExprStore) -> ParsTokTree { pub async fn des_subtree(&mut self, tree: &api::TokenTree, exprs: ExprStore) -> ParsTokTree {
ParsTokTree::from_api( ParsTokTree::from_api(tree, &mut { exprs }, &mut self.ctx.clone(), self.path, &self.ctx.i).await
tree,
&mut { exprs },
&mut ExprParseCtx { ctx: self.ctx, exprs: &self.ctx.common_exprs },
self.path,
&self.ctx.i,
)
.await
} }
#[must_use] #[must_use]
pub fn strip_char(&mut self, tgt: char) -> bool { pub fn strip_char(&mut self, tgt: char) -> bool {
@@ -146,9 +139,9 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
let mut errors = Vec::new(); let mut errors = Vec::new();
if ctx.tail.starts_with(|c| sys.can_lex(c)) { if ctx.tail.starts_with(|c| sys.can_lex(c)) {
let (source, pos, path) = (ctx.source.clone(), ctx.get_pos(), ctx.path.clone()); let (source, pos, path) = (ctx.source.clone(), ctx.get_pos(), ctx.path.clone());
let temp_store = ctx.ctx.exprs.derive();
let ctx_lck = &Mutex::new(&mut *ctx); let ctx_lck = &Mutex::new(&mut *ctx);
let errors_lck = &Mutex::new(&mut errors); let errors_lck = &Mutex::new(&mut errors);
let temp_store = sys.ext().exprs().derive(true);
let temp_store_cb = temp_store.clone(); let temp_store_cb = temp_store.clone();
let lx = sys let lx = sys
.lex(source, path, pos, |pos| { .lex(source, path, pos, |pos| {

View File

@@ -1,4 +1,4 @@
use futures::future::join_all; use futures::FutureExt;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::{OrcRes, Reporter, mk_errv}; use orchid_base::error::{OrcRes, Reporter, mk_errv};
use orchid_base::format::fmt; use orchid_base::format::fmt;
@@ -51,9 +51,14 @@ pub async fn parse_items(
items: ParsSnippet<'_>, items: ParsSnippet<'_>,
) -> OrcRes<Vec<Item>> { ) -> OrcRes<Vec<Item>> {
let lines = line_items(ctx, items).await; let lines = line_items(ctx, items).await;
let line_res = let mut line_ok = Vec::new();
join_all(lines.into_iter().map(|p| parse_item(ctx, path.clone(), p.output, p.tail))).await; for Parsed { output: comments, tail } in lines {
Ok(line_res.into_iter().flat_map(|l| l.ok().into_iter().flatten()).collect()) match parse_item(ctx, path.clone(), comments, tail).boxed_local().await {
Err(e) => ctx.rep().report(e),
Ok(l) => line_ok.extend(l),
}
}
Ok(line_ok)
} }
pub async fn parse_item( pub async fn parse_item(

View File

@@ -185,7 +185,7 @@ impl Tree for ParsedModule {
impl Format for ParsedModule { impl Format for ParsedModule {
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
let head_str = format!("export ::({})\n", self.exports.iter().join(", ")); let head_str = format!("export ::({})\n", self.exports.iter().join(", "));
Variants::sequence(self.items.len() + 1, "\n", None).units( Variants::default().sequence(self.items.len() + 1, "", "\n", "", None).units_own(
[head_str.into()].into_iter().chain(join_all(self.items.iter().map(|i| i.print(c))).await), [head_str.into()].into_iter().chain(join_all(self.items.iter().map(|i| i.print(c))).await),
) )
} }

View File

@@ -11,7 +11,6 @@ use orchid_base::tree::ttv_from_api;
use substack::Substack; use substack::Substack;
use crate::api; use crate::api;
use crate::expr::ExprParseCtx;
use crate::expr_store::ExprStore; use crate::expr_store::ExprStore;
use crate::parse::HostParseCtx; use crate::parse::HostParseCtx;
use crate::parsed::{ use crate::parsed::{
@@ -35,7 +34,7 @@ impl Parser {
comments: Vec<Comment>, comments: Vec<Comment>,
callback: &mut impl AsyncFnMut(ModPath<'_>, Vec<ParsTokTree>) -> OrcRes<Vec<Item>>, callback: &mut impl AsyncFnMut(ModPath<'_>, Vec<ParsTokTree>) -> OrcRes<Vec<Item>>,
) -> OrcRes<Vec<Item>> { ) -> OrcRes<Vec<Item>> {
let mut temp_store = self.system.ext().exprs().derive(true); let mut temp_store = self.system.ctx().exprs.derive();
let src_path = line.first().expect("cannot be empty").sr.path(); let src_path = line.first().expect("cannot be empty").sr.path();
let line = let line =
join_all((line.into_iter()).map(|t| async { tt_to_api(&mut temp_store.clone(), t).await })) join_all((line.into_iter()).map(|t| async { tt_to_api(&mut temp_store.clone(), t).await }))
@@ -57,7 +56,6 @@ impl Parser {
i: self.system.i(), i: self.system.i(),
mod_path: &mod_path, mod_path: &mod_path,
ext_exprs: &mut temp_store, ext_exprs: &mut temp_store,
pctx: &mut ExprParseCtx { ctx: self.system.ctx(), exprs: self.system.ext().exprs() },
src_path: &src_path, src_path: &src_path,
sys: &self.system, sys: &self.system,
}) })
@@ -73,7 +71,6 @@ struct ConvCtx<'a> {
src_path: &'a Sym, src_path: &'a Sym,
i: &'a Interner, i: &'a Interner,
ext_exprs: &'a mut ExprStore, ext_exprs: &'a mut ExprStore,
pctx: &'a mut ExprParseCtx<'a>,
} }
async fn conv( async fn conv(
parsed_v: Vec<api::ParsedLine>, parsed_v: Vec<api::ParsedLine>,
@@ -87,19 +84,21 @@ async fn conv(
api::ParsedLineKind::Member(api::ParsedMember { name, exported, kind }) => api::ParsedLineKind::Member(api::ParsedMember { name, exported, kind }) =>
(name, exported, kind), (name, exported, kind),
api::ParsedLineKind::Recursive(rec) => { api::ParsedLineKind::Recursive(rec) => {
let tokens = ttv_from_api(rec, ctx.ext_exprs, ctx.pctx, ctx.src_path, ctx.i).await; let tokens =
ttv_from_api(rec, ctx.ext_exprs, &mut ctx.sys.ctx().clone(), ctx.src_path, ctx.i).await;
items.extend(callback(module.clone(), tokens).await?); items.extend(callback(module.clone(), tokens).await?);
continue; continue;
}, },
}; };
let name = ctx.i.ex(name).await; let name = ctx.i.ex(name).await;
let mem_path = module.push(name.clone());
let mkind = match kind { let mkind = match kind {
api::ParsedMemberKind::Module { lines, use_prelude } => { api::ParsedMemberKind::Module { lines, use_prelude } => {
let items = conv(lines, module.push(name.clone()), callback, ctx).boxed_local().await?; let items = conv(lines, mem_path, callback, ctx).boxed_local().await?;
ParsedMemberKind::Mod(ParsedModule::new(use_prelude, items)) ParsedMemberKind::Mod(ParsedModule::new(use_prelude, items))
}, },
api::ParsedMemberKind::Constant(cid) => { api::ParsedMemberKind::Constant(cid) => {
ctx.sys.0.const_paths.insert(cid, ctx.mod_path.suffix(module.unreverse(), ctx.i).await); ctx.sys.0.const_paths.insert(cid, ctx.mod_path.suffix(mem_path.unreverse(), ctx.i).await);
ParsedMemberKind::Const(cid, ctx.sys.clone()) ParsedMemberKind::Const(cid, ctx.sys.clone())
}, },
}; };

View File

@@ -3,9 +3,9 @@ use std::fmt;
use std::future::Future; use std::future::Future;
use std::rc::{Rc, Weak}; use std::rc::{Rc, Weak};
use async_lock::RwLock;
use derive_destructure::destructure; use derive_destructure::destructure;
use futures::future::join_all; use futures::future::join_all;
use futures_locks::RwLock;
use hashbrown::HashMap; use hashbrown::HashMap;
use itertools::Itertools; use itertools::Itertools;
use memo_map::MemoMap; use memo_map::MemoMap;
@@ -163,6 +163,9 @@ impl System {
)), )),
None => (), None => (),
} }
if root_data.root.members.get(selector).is_some() {
return Ok(VName::new(rel.iter().cloned()).expect("split_first was called above"));
}
if tail.is_empty() { if tail.is_empty() {
return Ok(VPath::new(cwd.iter().cloned()).name_with_suffix(selector.clone())); return Ok(VPath::new(cwd.iter().cloned()).name_with_suffix(selector.clone()));
} }

View File

@@ -4,10 +4,10 @@ use std::cell::RefCell;
use std::rc::{Rc, Weak}; use std::rc::{Rc, Weak};
use std::slice; use std::slice;
use async_lock::RwLock;
use async_once_cell::OnceCell; use async_once_cell::OnceCell;
use derive_destructure::destructure; use derive_destructure::destructure;
use futures::{FutureExt, StreamExt, stream}; use futures::{FutureExt, StreamExt, stream};
use futures_locks::RwLock;
use hashbrown::HashMap; use hashbrown::HashMap;
use hashbrown::hash_map::Entry; use hashbrown::hash_map::Entry;
use itertools::Itertools; use itertools::Itertools;
@@ -22,7 +22,7 @@ use orchid_base::reqnot::Requester;
use crate::api; use crate::api;
use crate::ctx::Ctx; use crate::ctx::Ctx;
use crate::dealias::{ChildErrorKind, Tree, absolute_path, resolv_glob, walk}; use crate::dealias::{ChildErrorKind, Tree, absolute_path, resolv_glob, walk};
use crate::expr::{Expr, ExprParseCtx, PathSetBuilder}; use crate::expr::{Expr, PathSetBuilder};
use crate::parsed::{ItemKind, ParsedMemberKind, ParsedModule}; use crate::parsed::{ItemKind, ParsedMemberKind, ParsedModule};
use crate::system::System; use crate::system::System;
@@ -90,8 +90,7 @@ impl Root {
for (path, (sys_id, pc_id)) in deferred_consts { for (path, (sys_id, pc_id)) in deferred_consts {
let sys = this.ctx.system_inst(sys_id).await.expect("System dropped since parsing"); let sys = this.ctx.system_inst(sys_id).await.expect("System dropped since parsing");
let api_expr = sys.reqnot().request(api::FetchParsedConst(sys.id(), pc_id)).await; let api_expr = sys.reqnot().request(api::FetchParsedConst(sys.id(), pc_id)).await;
let mut xp_ctx = ExprParseCtx { ctx: &this.ctx, exprs: sys.ext().exprs() }; let expr = Expr::from_api(&api_expr, PathSetBuilder::new(), this.ctx.clone()).await;
let expr = Expr::from_api(&api_expr, PathSetBuilder::new(), &mut xp_ctx).await;
new.0.write().await.consts.insert(path, expr); new.0.write().await.consts.insert(path, expr);
} }
new new
@@ -178,8 +177,7 @@ impl Module {
api::MemberKind::Lazy(id) => api::MemberKind::Lazy(id) =>
(Some(LazyMemberHandle { id, sys: ctx.sys.id(), path: name.clone() }), None), (Some(LazyMemberHandle { id, sys: ctx.sys.id(), path: name.clone() }), None),
api::MemberKind::Const(val) => { api::MemberKind::Const(val) => {
let mut expr_ctx = ExprParseCtx { ctx: ctx.sys.ctx(), exprs: ctx.sys.ext().exprs() }; let expr = Expr::from_api(&val, PathSetBuilder::new(), ctx.sys.ctx().clone()).await;
let expr = Expr::from_api(&val, PathSetBuilder::new(), &mut expr_ctx).await;
ctx.consts.insert(name.clone(), expr); ctx.consts.insert(name.clone(), expr);
(None, Some(MemberKind::Const)) (None, Some(MemberKind::Const))
}, },
@@ -463,8 +461,7 @@ impl LazyMemberHandle {
let sys = ctx.system_inst(self.sys).await.expect("Missing system for lazy member"); let sys = ctx.system_inst(self.sys).await.expect("Missing system for lazy member");
match sys.get_tree(self.id).await { match sys.get_tree(self.id).await {
api::MemberKind::Const(c) => { api::MemberKind::Const(c) => {
let mut pctx = ExprParseCtx { ctx: &ctx, exprs: sys.ext().exprs() }; let expr = Expr::from_api(&c, PathSetBuilder::new(), ctx.clone()).await;
let expr = Expr::from_api(&c, PathSetBuilder::new(), &mut pctx).await;
let (.., path) = self.destructure(); let (.., path) = self.destructure();
consts.insert(path, expr); consts.insert(path, expr);
MemberKind::Const MemberKind::Const

View File

@@ -4,6 +4,7 @@ version = "0.1.0"
edition = "2024" edition = "2024"
[dependencies] [dependencies]
async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
async-once-cell = "0.5.4" async-once-cell = "0.5.4"
futures = { version = "0.3.31", features = ["std"], default-features = false } futures = { version = "0.3.31", features = ["std"], default-features = false }
hashbrown = "0.16.0" hashbrown = "0.16.0"
@@ -18,7 +19,9 @@ orchid-extension = { version = "0.1.0", path = "../orchid-extension", features =
"tokio", "tokio",
] } ] }
ordered-float = "5.0.0" ordered-float = "5.0.0"
rust_decimal = "1.37.2" pastey = "0.1.1"
rust_decimal = "1.38.0"
subslice-offset = "0.1.1"
substack = "1.1.1" substack = "1.1.1"
tokio = { version = "1.47.1", features = ["full"] } tokio = { version = "1.47.1", features = ["full"] }

View File

@@ -2,8 +2,12 @@ mod macros;
mod std; mod std;
pub use std::number::num_atom::{Float, HomoArray, Int, Num}; pub use std::number::num_atom::{Float, HomoArray, Int, Num};
pub use std::option::OrcOpt;
pub use std::reflection::sym_atom::{SymAtom, sym_expr};
pub use std::std_system::StdSystem; pub use std::std_system::StdSystem;
pub use std::string::str_atom::OrcString; pub use std::string::str_atom::OrcString;
pub use std::tuple::{HomoTpl, Tpl, Tuple, UntypedTuple};
pub use macros::macro_system::MacroSystem; pub use macros::macro_system::MacroSystem;
pub use macros::mactree::{MacTok, MacTree}; pub use macros::mactree::{MacTok, MacTree};
use orchid_api as api;

View File

@@ -1,13 +1,16 @@
use std::borrow::Cow; use std::borrow::Cow;
use never::Never; use never::Never;
use orchid_extension::atom::{Atomic, TypAtom}; use orchid_base::format::fmt;
use orchid_extension::atom::{Atomic, TAtom};
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own}; use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
use orchid_extension::conv::{ToExpr, TryFromExpr}; use orchid_extension::context::i;
use orchid_extension::conv::ToExpr;
use orchid_extension::coroutine_exec::exec;
use orchid_extension::expr::Expr; use orchid_extension::expr::Expr;
use orchid_extension::gen_expr::GExpr; use orchid_extension::gen_expr::GExpr;
use crate::macros::mactree::{MacTok, MacTree, map_mactree}; use crate::macros::mactree::{MacTok, MacTree};
#[derive(Clone)] #[derive(Clone)]
pub struct InstantiateTplCall { pub struct InstantiateTplCall {
@@ -24,26 +27,33 @@ impl OwnedAtom for InstantiateTplCall {
type Refs = Never; type Refs = Never;
// Technically must be supported but shouldn't actually ever be called // Technically must be supported but shouldn't actually ever be called
async fn call_ref(&self, arg: Expr) -> GExpr { async fn call_ref(&self, arg: Expr) -> GExpr {
if !self.argv.is_empty() {
eprintln!( eprintln!(
"Copying partially applied instantiate_tpl call. This is an internal value.\ "Copying partially applied instantiate_tpl call. This is an internal value.\
\nIt should be fully consumed within generated code." \nIt should be fully consumed within generated code."
); );
}
self.clone().call(arg).await self.clone().call(arg).await
} }
async fn call(mut self, arg: Expr) -> GExpr { async fn call(mut self, arg: Expr) -> GExpr {
match TypAtom::<MacTree>::try_from_expr(arg).await { exec(async move |mut h| {
Err(e) => return Err::<Never, _>(e).to_expr().await, match h.exec::<TAtom<MacTree>>(arg.clone()).await {
Ok(t) => self.argv.push(own(t).await), Err(_) => panic!("Expected a macro param, found {}", fmt(&arg, &i()).await),
Ok(t) => self.argv.push(own(&t).await),
}; };
if self.argv.len() < self.argc { if self.argv.len() < self.argc {
return self.to_expr().await; return self.to_gen().await;
} }
let mut args = self.argv.into_iter(); let mut args = self.argv.into_iter();
let ret = map_mactree(&self.tpl, &mut false, &mut |mt| match mt.tok() { let ret = self.tpl.map(&mut false, &mut |mt| match mt.tok() {
MacTok::Slot => Some(args.next().expect("Not enough arguments to fill all slots")), MacTok::Slot => Some(args.next().expect("Not enough arguments to fill all slots")),
_ => None, _ => None,
}); });
assert!(args.next().is_none(), "Too many arguments for all slots"); assert!(args.next().is_none(), "Too many arguments for all slots");
ret.to_expr().await ret.to_gen().await
})
.await
.to_gen()
.await
} }
} }

View File

@@ -10,10 +10,14 @@ use orchid_base::parse::{
}; };
use orchid_base::sym; use orchid_base::sym;
use orchid_base::tree::Paren; use orchid_base::tree::Paren;
use orchid_extension::atom::TAtom;
use orchid_extension::context::i;
use orchid_extension::conv::TryFromExpr;
use orchid_extension::gen_expr::{atom, call, sym_ref}; use orchid_extension::gen_expr::{atom, call, sym_ref};
use orchid_extension::parser::{ConstCtx, PSnippet, PTok, PTokTree, ParsCtx, ParsedLine, Parser}; use orchid_extension::parser::{ConstCtx, PSnippet, PTok, PTokTree, ParsCtx, ParsedLine, Parser};
use crate::macros::mactree::{MacTok, MacTree, glossary_v, map_mactree_v}; use crate::macros::mactree::{MacTok, MacTree, MacTreeSeq};
use crate::macros::ph_lexer::PhAtom;
#[derive(Default)] #[derive(Default)]
pub struct LetLine; pub struct LetLine;
@@ -37,21 +41,18 @@ impl Parser for LetLine {
let aliased = parse_tokv(tail, &ctx).await; let aliased = parse_tokv(tail, &ctx).await;
Ok(vec![ParsedLine::cnst(&line.sr(), &comments, exported, name, async move |ctx| { Ok(vec![ParsedLine::cnst(&line.sr(), &comments, exported, name, async move |ctx| {
let rep = Reporter::new(); let rep = Reporter::new();
let dealiased = dealias_mac_v(aliased, &ctx, &rep).await; let macro_input =
let macro_input = MacTok::S(Paren::Round, dealiased).at(sr.pos()); MacTok::S(Paren::Round, dealias_mac_v(&aliased, &ctx, &rep).await).at(sr.pos());
if let Some(e) = rep.errv() { if let Some(e) = rep.errv() {
return Err(e); return Err(e);
} }
Ok(call([ Ok(call(sym_ref(sym!(macros::resolve; i())), [atom(macro_input)]))
sym_ref(sym!(macros::lower; ctx.i()).await),
call([sym_ref(sym!(macros::resolve; ctx.i()).await), atom(macro_input)]),
]))
})]) })])
} }
} }
pub async fn dealias_mac_v(aliased: Vec<MacTree>, ctx: &ConstCtx, rep: &Reporter) -> Vec<MacTree> { pub async fn dealias_mac_v(aliased: &MacTreeSeq, ctx: &ConstCtx, rep: &Reporter) -> MacTreeSeq {
let keys = glossary_v(&aliased).collect_vec(); let keys = aliased.glossary().iter().cloned().collect_vec();
let mut names: HashMap<_, _> = HashMap::new(); let mut names: HashMap<_, _> = HashMap::new();
let mut stream = pin!(ctx.names(&keys).zip(stream::iter(&keys))); let mut stream = pin!(ctx.names(&keys).zip(stream::iter(&keys)));
while let Some((canonical, local)) = stream.next().await { while let Some((canonical, local)) = stream.next().await {
@@ -62,13 +63,13 @@ pub async fn dealias_mac_v(aliased: Vec<MacTree>, ctx: &ConstCtx, rep: &Reporter
}, },
} }
} }
map_mactree_v(&aliased, &mut false, &mut |tree| match &*tree.tok { aliased.map(&mut false, &mut |tree| match &*tree.tok {
MacTok::Name(n) => names.get(n).map(|new_n| MacTok::Name(new_n.clone()).at(tree.pos())), MacTok::Name(n) => names.get(n).map(|new_n| MacTok::Name(new_n.clone()).at(tree.pos())),
_ => None, _ => None,
}) })
} }
pub async fn parse_tokv(line: PSnippet<'_>, ctx: &impl ParseCtx) -> Vec<MacTree> { pub async fn parse_tokv(line: PSnippet<'_>, ctx: &impl ParseCtx) -> MacTreeSeq {
if let Some((idx, arg)) = line.iter().enumerate().find_map(|(i, x)| Some((i, x.as_lambda()?))) { if let Some((idx, arg)) = line.iter().enumerate().find_map(|(i, x)| Some((i, x.as_lambda()?))) {
let (head, lambda) = line.split_at(idx as u32); let (head, lambda) = line.split_at(idx as u32);
let (_, body) = lambda.pop_front().unwrap(); let (_, body) = lambda.pop_front().unwrap();
@@ -83,14 +84,14 @@ pub async fn parse_tokv(line: PSnippet<'_>, ctx: &impl ParseCtx) -> Vec<MacTree>
.await, .await,
), ),
}; };
all MacTreeSeq::new(all)
} else { } else {
parse_tokv_no_lambdas(&line, ctx).await MacTreeSeq::new(parse_tokv_no_lambdas(&line, ctx).await)
} }
} }
async fn parse_tokv_no_lambdas(line: &[PTokTree], ctx: &impl ParseCtx) -> Vec<MacTree> { async fn parse_tokv_no_lambdas(line: &[PTokTree], ctx: &impl ParseCtx) -> Vec<MacTree> {
stream::iter(line).filter_map(|tt| parse_tok(tt, ctx)).collect().await stream::iter(line).filter_map(|tt| parse_tok(tt, ctx)).collect::<Vec<_>>().await
} }
pub async fn parse_tok(tree: &PTokTree, ctx: &impl ParseCtx) -> Option<MacTree> { pub async fn parse_tok(tree: &PTokTree, ctx: &impl ParseCtx) -> Option<MacTree> {
@@ -110,7 +111,10 @@ pub async fn parse_tok(tree: &PTokTree, ctx: &impl ParseCtx) -> Option<MacTree>
return parse_tok(nested, ctx).boxed_local().await; return parse_tok(nested, ctx).boxed_local().await;
}, },
}, },
PTok::Handle(expr) => MacTok::Value(expr.clone()), PTok::Handle(expr) => match TAtom::<PhAtom>::try_from_expr(expr.clone()).await {
Err(_) => MacTok::Value(expr.clone()),
Ok(ta) => MacTok::Ph(ta.value.to_full().await),
},
PTok::NewExpr(never) => match *never {}, PTok::NewExpr(never) => match *never {},
PTok::LambdaHead(_) => panic!("Lambda-head handled in the sequence parser"), PTok::LambdaHead(_) => panic!("Lambda-head handled in the sequence parser"),
PTok::S(p, body) => PTok::S(p, body) =>

View File

@@ -1,83 +1,65 @@
use hashbrown::HashMap;
use itertools::Itertools;
use orchid_base::error::Reporter;
use orchid_base::sym; use orchid_base::sym;
use orchid_extension::atom::TypAtom; use orchid_extension::atom::TAtom;
use orchid_extension::atom_owned::own; use orchid_extension::atom_owned::own;
use orchid_extension::context::i;
use orchid_extension::conv::ToExpr; use orchid_extension::conv::ToExpr;
use orchid_extension::coroutine_exec::exec; use orchid_extension::gen_expr::{call, sym_ref};
use orchid_extension::gen_expr::{atom, call, sym_ref}; use orchid_extension::tree::{GenMember, fun, prefix};
use orchid_extension::reflection::{ReflMemKind, refl};
use orchid_extension::tree::{GenMember, comments, fun, prefix};
use substack::Substack;
use crate::Int; use crate::macros::mactree::MacTree;
use crate::macros::instantiate_tpl::InstantiateTplCall; use crate::macros::resolve::resolve;
use crate::macros::macro_line::{Macro, Matcher}; use crate::macros::utils::{build_macro, mactree, mactreev};
use crate::macros::mactree::{LowerCtx, MacTree};
use crate::macros::recur_state::RecurState;
use crate::macros::resolve::{ResolveCtx, resolve};
pub fn gen_macro_lib() -> Vec<GenMember> { pub async fn gen_macro_lib() -> Vec<GenMember> {
prefix("macros", [ prefix("macros", [
comments( fun(true, "resolve", async |tpl: TAtom<MacTree>| resolve(own(&tpl).await).await),
["This is an internal function, you can't obtain a value of its argument type.", "hidden"], // TODO test whether any of this worked
fun(true, "instantiate_tpl", |tpl: TypAtom<MacTree>, right: Int| async move { prefix("common", [
InstantiateTplCall { build_macro(None, ["..", "_"]).finish(),
tpl: own(tpl).await, build_macro(Some(1), ["+"])
argc: right.0.try_into().unwrap(), .rule(mactreev!("...$" lhs 0 macros::common::+ "...$" rhs 1), [async |[lhs, rhs]| {
argv: Vec::new(), call(sym_ref(sym!(std::number::add; i())), [resolve(lhs).await, resolve(rhs).await])
} }])
}), .finish(),
), build_macro(Some(2), ["*"])
fun(true, "resolve", |tpl: TypAtom<MacTree>| async move { .rule(mactreev!("...$" lhs 0 macros::common::* "...$" rhs 1), [async |[lhs, rhs]| {
call([ call(sym_ref(sym!(std::number::mul; i())), [resolve(lhs).await, resolve(rhs).await])
sym_ref(sym!(macros::resolve_recur; tpl.untyped.ctx().i()).await), }])
atom(RecurState::Bottom), .finish(),
tpl.untyped.ex().to_expr().await, build_macro(None, ["comma_list", ","])
.rule(
mactreev!(macros::common::comma_list ( "...$" head 0 macros::common::, "...$" tail 1)),
[async |[head, tail]| {
call(sym_ref(sym!(std::tuple::cat; i())), [
call(sym_ref(sym!(std::tuple::one; i())), [head.to_gen().await]),
resolve(mactree!(macros::common::comma_list "push" tail ;)).await,
]) ])
}), }],
fun(true, "lower", |tpl: TypAtom<MacTree>| async move { )
let ctx = LowerCtx { sys: tpl.untyped.ctx().clone(), rep: &Reporter::new() }; .rule(mactreev!(macros::common::comma_list ( "...$" final_tail 0 )), [async |[tail]| {
let res = own(tpl).await.lower(ctx, Substack::Bottom).await; call(sym_ref(sym!(std::tuple::one; i())), [tail.to_gen().await])
if let Some(e) = Reporter::new().errv() { Err(e) } else { Ok(res) } }])
}), .rule(mactreev!(macros::common::comma_list()), [async |[]| {
fun(true, "resolve_recur", |state: TypAtom<RecurState>, tpl: TypAtom<MacTree>| async move { sym_ref(sym!(std::tuple::empty; i()))
exec("macros::resolve_recur", async move |mut h| { }])
let ctx = tpl.ctx().clone(); .finish(),
let root = refl(&ctx); build_macro(None, ["semi_list", ";"])
let tpl = own(tpl.clone()).await; .rule(
let mut macros = HashMap::new(); mactreev!(macros::common::semi_list ( "...$" head 0 macros::common::; "...$" tail 1)),
for n in tpl.glossary() { [async |[head, tail]| {
if let Ok(ReflMemKind::Const) = root.get_by_path(n).await.map(|m| m.kind()) { call(sym_ref(sym!(std::tuple::cat; i())), [
let Ok(mac) = h.exec::<TypAtom<Macro>>(sym_ref(n.clone())).await else { continue }; call(sym_ref(sym!(std::tuple::one; i())), [resolve(head).await]),
let mac = own(mac).await; resolve(mactree!(macros::common::semi_list "push" tail ;)).await,
macros.entry(mac.0.own_kws[0].clone()).or_insert(mac); ])
} }],
} )
let mut named = HashMap::new(); .rule(mactreev!(macros::common::semi_list ( "...$" final_tail 0 )), [async |[tail]| {
let mut priod = Vec::new(); call(sym_ref(sym!(std::tuple::one; i())), [resolve(tail).await])
for (_, mac) in macros.iter() { }])
for rule in mac.0.rules.iter() { .rule(mactreev!(macros::common::semi_list()), [async |[]| {
if rule.glossary.is_subset(tpl.glossary()) { sym_ref(sym!(std::tuple::empty; i()))
match &rule.pattern { }])
Matcher::Named(m) => .finish(),
named.entry(m.head()).or_insert(Vec::new()).push((m, mac, rule)), ]),
Matcher::Priod(p) => priod.push((mac.0.prio, (p, mac, rule))),
}
}
}
}
let priod = priod.into_iter().sorted_unstable_by_key(|(p, _)| *p).map(|(_, r)| r).collect();
let mut rctx = ResolveCtx { h, recur: own(state).await, ctx: ctx.clone(), named, priod };
let resolve_res = resolve(&mut rctx, &tpl).await;
std::mem::drop(rctx);
match resolve_res {
Some(out_tree) => out_tree.to_expr().await,
None => tpl.to_expr().await,
}
})
.await
}),
]) ])
} }

View File

@@ -1,32 +1,26 @@
use std::borrow::Cow;
use std::cell::RefCell; use std::cell::RefCell;
use std::rc::Rc; use std::rc::Rc;
use async_once_cell::OnceCell; use async_once_cell::OnceCell;
use futures::{StreamExt, stream}; use futures::{StreamExt, stream};
use hashbrown::{HashMap, HashSet};
use itertools::Itertools; use itertools::Itertools;
use never::Never;
use orchid_base::error::{OrcRes, Reporter, mk_errv}; use orchid_base::error::{OrcRes, Reporter, mk_errv};
use orchid_base::interner::Tok;
use orchid_base::location::Pos;
use orchid_base::name::Sym;
use orchid_base::parse::{ use orchid_base::parse::{
Comment, ParseCtx, Parsed, Snippet, expect_end, expect_tok, line_items, token_errv, Comment, ParseCtx, Parsed, Snippet, expect_end, expect_tok, line_items, token_errv,
try_pop_no_fluff, try_pop_no_fluff,
}; };
use orchid_base::tree::{Paren, Token}; use orchid_base::tree::{Paren, Token};
use orchid_base::{clone, sym}; use orchid_base::{clone, sym};
use orchid_extension::atom::{Atomic, TypAtom}; use orchid_extension::atom::TAtom;
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant}; use orchid_extension::context::i;
use orchid_extension::conv::{ToExpr, TryFromExpr}; use orchid_extension::conv::{ToExpr, TryFromExpr};
use orchid_extension::gen_expr::{atom, call, sym_ref}; use orchid_extension::gen_expr::{atom, call, sym_ref};
use orchid_extension::parser::{PSnippet, ParsCtx, ParsedLine, Parser}; use orchid_extension::parser::{PSnippet, ParsCtx, ParsedLine, Parser};
use crate::macros::let_line::{dealias_mac_v, parse_tokv}; use crate::macros::let_line::{dealias_mac_v, parse_tokv};
use crate::macros::mactree::{glossary_v, map_mactree_v}; use crate::macros::macro_value::{Macro, MacroData, Rule};
use crate::macros::recur_state::{RecurState, RulePath}; use crate::macros::mactree::MacTreeSeq;
use crate::macros::rule::matcher::{NamedMatcher, PriodMatcher}; use crate::macros::rule::matcher::Matcher;
use crate::{Int, MacTok}; use crate::{Int, MacTok};
#[derive(Default)] #[derive(Default)]
@@ -48,42 +42,46 @@ impl Parser for MacroLine {
)); ));
} }
let module = ctx.module(); let module = ctx.module();
let Parsed { output, tail } = try_pop_no_fluff(&ctx, line).await?; let Parsed { output: prio_or_body, tail } = try_pop_no_fluff(&ctx, line).await?;
let bad_first_item_err = || { let bad_first_item_err = || {
token_errv(&ctx, output, "Expected priority or block", |s| { token_errv(&ctx, prio_or_body, "Expected priority or block", |s| {
format!("Expected a priority number or a () block, found {s}") format!("Expected a priority number or a () block, found {s}")
}) })
}; };
let (prio, body) = match &output.tok { let (prio, body) = match &prio_or_body.tok {
Token::S(Paren::Round, body) => (None, body), Token::S(Paren::Round, body) => {
Token::Handle(expr) => match TypAtom::<Int>::try_from_expr(expr.clone()).await { expect_end(&ctx, tail).await?;
(None, body)
},
Token::Handle(expr) => match TAtom::<Int>::try_from_expr(expr.clone()).await {
Err(e) => { Err(e) => {
return Err(e + bad_first_item_err().await); return Err(e + bad_first_item_err().await);
}, },
Ok(prio) => { Ok(prio) => {
let Token::S(Paren::Round, block) = &output.tok else { let Parsed { output: body, tail } = try_pop_no_fluff(&ctx, tail).await?;
let Token::S(Paren::Round, block) = &body.tok else {
return Err( return Err(
token_errv(&ctx, output, "Expected () block", |s| { token_errv(&ctx, prio_or_body, "Expected () block", |s| {
format!("Expected a () block, found {s}") format!("Expected a () block, found {s}")
}) })
.await, .await,
); );
}; };
expect_end(&ctx, tail).await?;
(Some(prio), block) (Some(prio), block)
}, },
}, },
_ => return Err(bad_first_item_err().await), _ => return Err(bad_first_item_err().await),
}; };
expect_end(&ctx, tail).await?; let lines = line_items(&ctx, Snippet::new(prio_or_body, body)).await;
let lines = line_items(&ctx, Snippet::new(output, body)).await;
let Some((kw_line, rule_lines)) = lines.split_first() else { return Ok(Vec::new()) }; let Some((kw_line, rule_lines)) = lines.split_first() else { return Ok(Vec::new()) };
let mut keywords = HashMap::new(); let mut keywords = Vec::new();
let Parsed { tail: kw_tail, .. } = let Parsed { tail: kw_tail, .. } =
expect_tok(&ctx, kw_line.tail, ctx.i().i("keywords").await).await?; expect_tok(&ctx, kw_line.tail, ctx.i().i("keywords").await).await?;
for kw_tok in kw_tail.iter().filter(|kw| !kw.is_fluff()) { for kw_tok in kw_tail.iter().filter(|kw| !kw.is_fluff()) {
match kw_tok.as_name() { match kw_tok.as_name() {
Some(kw) => { Some(kw) => {
keywords.insert(kw, kw_tok.sr()); keywords.push((kw, kw_tok.sr()));
}, },
None => ctx.rep().report( None => ctx.rep().report(
token_errv(&ctx, kw_tok, "invalid macro keywords list", |tok| { token_errv(&ctx, kw_tok, "invalid macro keywords list", |tok| {
@@ -93,7 +91,7 @@ impl Parser for MacroLine {
), ),
} }
} }
let Some(macro_name) = keywords.keys().next().cloned() else { let Some((macro_name, _)) = keywords.first().cloned() else {
return Err(mk_errv( return Err(mk_errv(
ctx.i().i("macro with no keywords").await, ctx.i().i("macro with no keywords").await,
"Macros must define at least one macro of their own.", "Macros must define at least one macro of their own.",
@@ -103,9 +101,8 @@ impl Parser for MacroLine {
let mut rules = Vec::new(); let mut rules = Vec::new();
let mut lines = Vec::new(); let mut lines = Vec::new();
for (idx, line) in rule_lines.iter().enumerate().map(|(n, v)| (n as u32, v)) { for (idx, line) in rule_lines.iter().enumerate().map(|(n, v)| (n as u32, v)) {
let path = RulePath { module: module.clone(), main_kw: macro_name.clone(), rule: idx };
let sr = line.tail.sr(); let sr = line.tail.sr();
let name = ctx.i().i(&path.name()).await; let name = ctx.i().i(&format!("rule::{}::{}", macro_name, idx)).await;
let Parsed { tail, .. } = expect_tok(&ctx, line.tail, ctx.i().i("rule").await).await?; let Parsed { tail, .. } = expect_tok(&ctx, line.tail, ctx.i().i("rule").await).await?;
let arrow_token = ctx.i().i("=>").await; let arrow_token = ctx.i().i("=>").await;
let Some((pattern, body)) = tail.split_once(|tok| tok.is_kw(arrow_token.clone())) else { let Some((pattern, body)) = tail.split_once(|tok| tok.is_kw(arrow_token.clone())) else {
@@ -118,7 +115,7 @@ impl Parser for MacroLine {
}; };
let pattern = parse_tokv(pattern, &ctx).await; let pattern = parse_tokv(pattern, &ctx).await;
let mut placeholders = Vec::new(); let mut placeholders = Vec::new();
map_mactree_v(&pattern, &mut false, &mut |tok| { pattern.map(&mut false, &mut |tok| {
if let MacTok::Ph(ph) = tok.tok() { if let MacTok::Ph(ph) = tok.tok() {
placeholders.push((ph.clone(), tok.pos())) placeholders.push((ph.clone(), tok.pos()))
} }
@@ -127,52 +124,42 @@ impl Parser for MacroLine {
let mut body_mactree = parse_tokv(body, &ctx).await; let mut body_mactree = parse_tokv(body, &ctx).await;
for (ph, ph_pos) in placeholders.iter().rev() { for (ph, ph_pos) in placeholders.iter().rev() {
let name = ctx.module().suffix([ph.name.clone()], ctx.i()).await; let name = ctx.module().suffix([ph.name.clone()], ctx.i()).await;
body_mactree = vec![ body_mactree =
MacTok::Lambda(MacTok::Name(name).at(ph_pos.clone()), body_mactree).at(ph_pos.clone()), MacTreeSeq::new([
] MacTok::Lambda(MacTok::Name(name).at(ph_pos.clone()), body_mactree).at(ph_pos.clone())
])
} }
let body_sr = body.sr(); let body_sr = body.sr();
rules.push((name.clone(), placeholders, rules.len() as u32, sr.pos(), pattern)); rules.push((name.clone(), placeholders, pattern));
lines.push(ParsedLine::cnst(&sr, &line.output, true, name, async move |ctx| { lines.push(ParsedLine::cnst(&sr, &line.output, true, name, async move |ctx| {
let rep = Reporter::new(); let rep = Reporter::new();
let body = dealias_mac_v(body_mactree, &ctx, &rep).await; let body = dealias_mac_v(&body_mactree, &ctx, &rep).await;
let macro_input = MacTok::S(Paren::Round, body).at(body_sr.pos()); let macro_input = MacTok::S(Paren::Round, body).at(body_sr.pos());
if let Some(e) = rep.errv() { if let Some(e) = rep.errv() {
return Err(e); return Err(e);
} }
Ok(call([ Ok(call(sym_ref(sym!(macros::resolve; i())), [macro_input.to_gen().await]))
sym_ref(sym!(macros::resolve_recur; ctx.i()).await),
atom(RecurState::base(path)),
macro_input.to_expr().await,
]))
})) }))
} }
let mac_cell = Rc::new(OnceCell::new()); let mac_cell = Rc::new(OnceCell::new());
let keywords = Rc::new(keywords);
let rules = Rc::new(RefCell::new(Some(rules))); let rules = Rc::new(RefCell::new(Some(rules)));
for (kw, sr) in &*keywords { for (kw, sr) in &*keywords {
clone!(mac_cell, keywords, rules, module, prio); clone!(mac_cell, rules, module, prio);
lines.push(ParsedLine::cnst(&sr.clone(), &comments, true, kw.clone(), async move |cctx| { lines.push(ParsedLine::cnst(&sr.clone(), &comments, true, kw.clone(), async move |cctx| {
let mac = mac_cell let mac = mac_cell
.get_or_init(async { .get_or_init(async {
let rep = Reporter::new(); let rep = Reporter::new();
let rules = rules.borrow_mut().take().expect("once cell initializer runs"); let rules = rules.borrow_mut().take().expect("once cell initializer runs");
let rules = stream::iter(rules) let rules = stream::iter(rules)
.then(|(body_name, placeholders, index, pos, pattern_macv)| { .then(|(body_name, placeholders, pattern_rel)| {
let cctx = &cctx; let cctx = &cctx;
let rep = &rep; let rep = &rep;
let prio = &prio;
async move { async move {
let pattern_abs = dealias_mac_v(pattern_macv, cctx, rep).await; let pattern = dealias_mac_v(&pattern_rel, cctx, rep).await;
let glossary = glossary_v(&pattern_abs).collect(); let pattern_res = Matcher::new(pattern.clone()).await;
let pattern_res = match prio {
None => NamedMatcher::new(&pattern_abs, cctx.i()).await.map(Matcher::Named),
Some(_) => PriodMatcher::new(&pattern_abs, cctx.i()).await.map(Matcher::Priod),
};
let placeholders = placeholders.into_iter().map(|(ph, _)| ph.name).collect_vec(); let placeholders = placeholders.into_iter().map(|(ph, _)| ph.name).collect_vec();
match pattern_res { match pattern_res {
Ok(pattern) => Ok(matcher) => Some(Rule { body_name, matcher, pattern, placeholders }),
Some(Rule { index, pos, body_name, pattern, glossary, placeholders }),
Err(e) => { Err(e) => {
rep.report(e); rep.report(e);
None None
@@ -183,8 +170,7 @@ impl Parser for MacroLine {
.flat_map(stream::iter) .flat_map(stream::iter)
.collect::<Vec<_>>() .collect::<Vec<_>>()
.await; .await;
let own_kws = keywords.keys().cloned().collect_vec(); Macro(Rc::new(MacroData { module, prio: prio.map(|i| i.0 as u64), rules }))
Macro(Rc::new(MacroData { module, prio: prio.map(|i| i.0 as u64), rules, own_kws }))
}) })
.await; .await;
atom(mac.clone()) atom(mac.clone())
@@ -193,36 +179,3 @@ impl Parser for MacroLine {
Ok(lines) Ok(lines)
} }
} }
#[derive(Debug)]
pub struct MacroData {
pub module: Sym,
pub prio: Option<u64>,
pub rules: Vec<Rule>,
pub own_kws: Vec<Tok<String>>,
}
#[derive(Clone, Debug)]
pub struct Macro(pub Rc<MacroData>);
#[derive(Debug)]
pub struct Rule {
pub index: u32,
pub pos: Pos,
pub pattern: Matcher,
pub glossary: HashSet<Sym>,
pub placeholders: Vec<Tok<String>>,
pub body_name: Tok<String>,
}
#[derive(Debug)]
pub enum Matcher {
Named(NamedMatcher),
Priod(PriodMatcher),
}
impl Atomic for Macro {
type Data = ();
type Variant = OwnedVariant;
}
impl OwnedAtom for Macro {
type Refs = Never;
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
}

View File

@@ -1,22 +1,27 @@
use never::Never; use never::Never;
use orchid_base::interner::Interner;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::reqnot::Receipt; use orchid_base::reqnot::Receipt;
use orchid_base::sym;
use orchid_extension::atom::{AtomDynfo, AtomicFeatures}; use orchid_extension::atom::{AtomDynfo, AtomicFeatures};
use orchid_extension::context::i;
use orchid_extension::entrypoint::ExtReq; use orchid_extension::entrypoint::ExtReq;
use orchid_extension::lexer::LexerObj; use orchid_extension::lexer::LexerObj;
use orchid_extension::other_system::SystemHandle; use orchid_extension::other_system::SystemHandle;
use orchid_extension::parser::ParserObj; use orchid_extension::parser::ParserObj;
use orchid_extension::system::{System, SystemCard}; use orchid_extension::system::{System, SystemCard};
use orchid_extension::system_ctor::SystemCtor; use orchid_extension::system_ctor::SystemCtor;
use orchid_extension::tree::GenMember; use orchid_extension::tree::{GenMember, merge_trivial};
use crate::macros::instantiate_tpl::InstantiateTplCall; use crate::macros::instantiate_tpl::InstantiateTplCall;
use crate::macros::let_line::LetLine; use crate::macros::let_line::LetLine;
use crate::macros::macro_lib::gen_macro_lib; use crate::macros::macro_lib::gen_macro_lib;
use crate::macros::macro_line::{Macro, MacroLine}; use crate::macros::macro_line::MacroLine;
use crate::macros::macro_value::Macro;
use crate::macros::mactree_lexer::MacTreeLexer; use crate::macros::mactree_lexer::MacTreeLexer;
use crate::macros::recur_state::RecurState; use crate::macros::match_macros::gen_match_macro_lib;
use crate::macros::ph_lexer::{PhAtom, PhLexer};
use crate::macros::std_macros::gen_std_macro_lib;
use crate::macros::utils::MacroBodyArgCollector;
use crate::{MacTree, StdSystem}; use crate::{MacTree, StdSystem};
#[derive(Default)] #[derive(Default)]
@@ -35,15 +40,25 @@ impl SystemCard for MacroSystem {
[ [
Some(InstantiateTplCall::dynfo()), Some(InstantiateTplCall::dynfo()),
Some(MacTree::dynfo()), Some(MacTree::dynfo()),
Some(RecurState::dynfo()),
Some(Macro::dynfo()), Some(Macro::dynfo()),
Some(PhAtom::dynfo()),
Some(MacroBodyArgCollector::dynfo()),
] ]
} }
} }
impl System for MacroSystem { impl System for MacroSystem {
async fn request(_: ExtReq<'_>, req: Self::Req) -> Receipt<'_> { match req {} } async fn request(_: ExtReq<'_>, req: Never) -> Receipt<'_> { match req {} }
async fn prelude(_: &Interner) -> Vec<Sym> { vec![] } async fn prelude() -> Vec<Sym> {
fn lexers() -> Vec<LexerObj> { vec![&MacTreeLexer] } vec![
sym!(macros::common::+; i()),
sym!(macros::common::*; i()),
sym!(macros::common::,; i()),
sym!(std::tuple::t; i()),
]
}
fn lexers() -> Vec<LexerObj> { vec![&MacTreeLexer, &PhLexer] }
fn parsers() -> Vec<ParserObj> { vec![&LetLine, &MacroLine] } fn parsers() -> Vec<ParserObj> { vec![&LetLine, &MacroLine] }
fn env() -> Vec<GenMember> { gen_macro_lib() } async fn env() -> Vec<GenMember> {
merge_trivial([gen_macro_lib().await, gen_std_macro_lib().await, gen_match_macro_lib().await])
}
} }

View File

@@ -0,0 +1,43 @@
use std::borrow::Cow;
use std::rc::Rc;
use never::Never;
use orchid_base::interner::Tok;
use orchid_base::name::Sym;
use orchid_extension::atom::Atomic;
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
use orchid_extension::context::i;
use crate::macros::mactree::MacTreeSeq;
use crate::macros::rule::matcher::Matcher;
#[derive(Debug)]
pub struct MacroData {
pub module: Sym,
pub prio: Option<u64>,
pub rules: Vec<Rule>,
}
#[derive(Clone, Debug)]
pub struct Macro(pub Rc<MacroData>);
impl Macro {
pub async fn canonical_name(&self) -> Sym {
self.0.module.suffix([self.0.rules[0].body_name.clone()], &i()).await
}
}
#[derive(Debug)]
pub struct Rule {
pub pattern: MacTreeSeq,
pub matcher: Matcher,
pub placeholders: Vec<Tok<String>>,
pub body_name: Tok<String>,
}
impl Atomic for Macro {
type Data = ();
type Variant = OwnedVariant;
}
impl OwnedAtom for Macro {
type Refs = Never;
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
}

View File

@@ -5,9 +5,9 @@ use std::rc::Rc;
use futures::FutureExt; use futures::FutureExt;
use futures::future::join_all; use futures::future::join_all;
use hashbrown::HashSet; use hashbrown::HashSet;
use itertools::Itertools; use orchid_api_derive::Coding;
use orchid_base::error::{OrcErrv, Reporter, mk_errv}; use orchid_base::error::OrcErrv;
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants, fmt}; use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
use orchid_base::interner::Tok; use orchid_base::interner::Tok;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::Sym; use orchid_base::name::Sym;
@@ -15,16 +15,89 @@ use orchid_base::tl_cache;
use orchid_base::tree::{Paren, indent}; use orchid_base::tree::{Paren, indent};
use orchid_extension::atom::Atomic; use orchid_extension::atom::Atomic;
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant}; use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
use orchid_extension::conv::ToExpr;
use orchid_extension::expr::Expr; use orchid_extension::expr::Expr;
use orchid_extension::gen_expr::{GExpr, arg, bot, call, lambda, sym_ref};
use orchid_extension::system::SysCtx;
use substack::Substack;
#[derive(Clone)] fn union_rc_sets(seq: impl IntoIterator<Item = Rc<HashSet<Sym>>>) -> Rc<HashSet<Sym>> {
pub struct LowerCtx<'a> { let mut acc = Rc::<HashSet<Sym>>::default();
pub sys: SysCtx, for right in seq {
pub rep: &'a Reporter, if acc.is_empty() {
acc = right;
continue;
}
if right.is_empty() {
continue;
}
acc = match (Rc::try_unwrap(acc), Rc::try_unwrap(right)) {
(Ok(mut left), Ok(right)) => {
left.extend(right);
Rc::new(left)
},
(Ok(mut owned), Err(borrowed)) | (Err(borrowed), Ok(mut owned)) => {
owned.extend(borrowed.iter().cloned());
Rc::new(owned)
},
(Err(left), Err(right)) => Rc::new(left.union(&right).cloned().collect()),
}
}
acc
}
#[derive(Debug, Clone)]
pub struct MacTreeSeq {
pub items: Rc<Vec<MacTree>>,
pub top_glossary: Rc<HashSet<Sym>>,
pub glossary: Rc<HashSet<Sym>>,
}
impl MacTreeSeq {
pub fn new(i: impl IntoIterator<Item = MacTree>) -> Self {
let mut items = Vec::new();
let mut top_glossary = HashSet::new();
let mut glossary = HashSet::new();
for item in i {
glossary.extend(item.glossary().iter().cloned());
if let MacTok::Name(n) = item.tok() {
top_glossary.insert(n.clone());
}
items.push(item);
}
Self { items: Rc::new(items), top_glossary: Rc::new(top_glossary), glossary: Rc::new(glossary) }
}
pub fn map<F: FnMut(MacTree) -> Option<MacTree>>(&self, changed: &mut bool, map: &mut F) -> Self {
Self::new(self.items.iter().map(|tree| ro(changed, |changed| tree.map(changed, map))))
}
pub fn glossary(&self) -> &HashSet<Sym> { &self.glossary }
pub fn concat(self, other: Self) -> Self {
if self.items.is_empty() {
return other;
} else if other.items.is_empty() {
return self;
}
let items = match (Rc::try_unwrap(self.items), Rc::try_unwrap(other.items)) {
(Ok(mut left), Ok(mut right)) => {
left.append(&mut right);
left
},
(Ok(mut left), Err(right)) => {
left.extend_from_slice(&right[..]);
left
},
(Err(left), Ok(mut right)) => {
right.splice(0..0, left.iter().cloned());
right
},
(Err(left), Err(right)) => left.iter().chain(&right[..]).cloned().collect(),
};
Self {
items: Rc::new(items),
top_glossary: union_rc_sets([self.top_glossary, other.top_glossary]),
glossary: union_rc_sets([self.glossary, other.glossary]),
}
}
}
impl Format for MacTreeSeq {
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
mtreev_fmt(&self.items[..], c).await
}
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@@ -37,48 +110,21 @@ impl MacTree {
pub fn tok(&self) -> &MacTok { &self.tok } pub fn tok(&self) -> &MacTok { &self.tok }
pub fn pos(&self) -> Pos { self.pos.clone() } pub fn pos(&self) -> Pos { self.pos.clone() }
pub fn glossary(&self) -> &HashSet<Sym> { &self.glossary } pub fn glossary(&self) -> &HashSet<Sym> { &self.glossary }
pub async fn lower(&self, ctx: LowerCtx<'_>, args: Substack<'_, Sym>) -> GExpr { pub fn map<F: FnMut(Self) -> Option<Self>>(&self, changed: &mut bool, map: &mut F) -> Self {
let expr = match self.tok() { let tok = match map(self.clone()) {
MacTok::Bottom(e) => bot(e.clone()), Some(new_tok) => {
MacTok::Lambda(arg, body) => { *changed = true;
let MacTok::Name(name) = &*arg.tok else { return new_tok;
let err = mk_errv( },
ctx.sys.i().i("Syntax error after macros").await, None => match &*self.tok {
"This token ends up as a binding, consider replacing it with a name", MacTok::Lambda(arg, body) =>
[arg.pos()], MacTok::Lambda(ro(changed, |changed| arg.map(changed, map)), body.map(changed, map)),
); MacTok::Name(_) | MacTok::Value(_) => return self.clone(),
ctx.rep.report(err.clone()); MacTok::Slot | MacTok::Ph(_) | MacTok::Bottom(_) => return self.clone(),
return bot(err); MacTok::S(p, body) => MacTok::S(*p, body.map(changed, map)),
},
}; };
lambda(args.len() as u64, lower_v(body, ctx, args.push(name.clone())).await) if *changed { tok.at(self.pos()) } else { self.clone() }
},
MacTok::Name(name) => match args.iter().enumerate().find(|(_, n)| *n == name) {
None => sym_ref(name.clone()),
Some((i, _)) => arg((args.len() - i) as u64),
},
MacTok::Ph(ph) => {
let err = mk_errv(
ctx.sys.i().i("Placeholder in value").await,
format!("Placeholder {ph} is only supported in macro patterns"),
[self.pos()],
);
ctx.rep.report(err.clone());
return bot(err);
},
MacTok::S(Paren::Round, body) => call(lower_v(body, ctx, args).await),
MacTok::S(..) => {
let err = mk_errv(
ctx.sys.i().i("[] or {} after macros").await,
format!("{} didn't match any macro", fmt(self, ctx.sys.i()).await),
[self.pos()],
);
ctx.rep.report(err.clone());
return bot(err);
},
MacTok::Slot => panic!("Uninstantiated template should never be exposed"),
MacTok::Value(v) => v.clone().to_expr().await,
};
expr.at(self.pos())
} }
} }
impl Atomic for MacTree { impl Atomic for MacTree {
@@ -90,7 +136,8 @@ impl OwnedAtom for MacTree {
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) } async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
async fn print_atom<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print_atom<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
self.tok.print(c).await tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("'{0}")))
.units([self.tok.print(c).await])
} }
} }
impl Format for MacTree { impl Format for MacTree {
@@ -99,57 +146,49 @@ impl Format for MacTree {
} }
} }
pub async fn lower_v(v: &[MacTree], ctx: LowerCtx<'_>, args: Substack<'_, Sym>) -> Vec<GExpr> {
join_all(v.iter().map(|t| t.lower(ctx.clone(), args.clone())).collect::<Vec<_>>()).await
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum MacTok { pub enum MacTok {
S(Paren, Vec<MacTree>), S(Paren, MacTreeSeq),
Name(Sym), Name(Sym),
/// Only permitted in arguments to `instantiate_tpl` /// Only permitted in arguments to `instantiate_tpl`
Slot, Slot,
Value(Expr), Value(Expr),
Lambda(MacTree, Vec<MacTree>), Lambda(MacTree, MacTreeSeq),
/// Only permitted in "pattern" values produced by macro blocks, which are /// Only permitted in "pattern" values produced by macro blocks, which are
/// never accessed as variables by usercode /// never accessed as variables by usercode
Ph(Ph), Ph(Ph),
Bottom(OrcErrv), Bottom(OrcErrv),
} }
impl MacTok { impl MacTok {
pub fn build_glossary(&self) -> HashSet<Sym> { pub fn build_glossary(&self) -> Rc<HashSet<Sym>> {
match self { match self {
MacTok::Bottom(_) | MacTok::Ph(_) | MacTok::Slot | MacTok::Value(_) => HashSet::new(), MacTok::Bottom(_) | MacTok::Ph(_) | MacTok::Slot | MacTok::Value(_) => Rc::default(),
MacTok::Name(sym) => HashSet::from([sym.clone()]), MacTok::Name(sym) => Rc::new(HashSet::from([sym.clone()])),
MacTok::S(_, body) => body.iter().flat_map(|mt| &*mt.glossary).cloned().collect(), MacTok::S(_, body) => union_rc_sets(body.items.iter().map(|mt| mt.glossary.clone())),
MacTok::Lambda(arg, body) => MacTok::Lambda(arg, body) =>
body.iter().chain([arg]).flat_map(|mt| &*mt.glossary).cloned().collect(), union_rc_sets(body.items.iter().chain([arg]).map(|mt| mt.glossary.clone())),
} }
} }
pub fn at(self, pos: impl Into<Pos>) -> MacTree { pub fn at(self, pos: impl Into<Pos>) -> MacTree {
MacTree { pos: pos.into(), glossary: Rc::new(self.build_glossary()), tok: Rc::new(self) } MacTree { pos: pos.into(), glossary: self.build_glossary(), tok: Rc::new(self) }
} }
} }
impl Format for MacTok { impl Format for MacTok {
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
match self { match self {
Self::Value(v) => v.print(c).await, Self::Value(v) => v.print(c).await,
Self::Lambda(arg, b) => FmtUnit::new( Self::Lambda(arg, b) => tl_cache!(Rc<Variants>: Rc::new(Variants::default()
tl_cache!(Rc<Variants>: Rc::new(Variants::default() .unbounded("\\{0} {1l}")
.unbounded("\\{0b}.{1l}") .bounded("(\\{0} {1b})")))
.bounded("(\\{0b}.{1b})"))), .units([arg.print(c).boxed_local().await, b.print(c).await]),
[arg.print(c).boxed_local().await, mtreev_fmt(b, c).await],
),
Self::Name(n) => format!("{n}").into(), Self::Name(n) => format!("{n}").into(),
Self::Ph(ph) => format!("{ph}").into(), Self::Ph(ph) => format!("{ph}").into(),
Self::S(p, body) => FmtUnit::new( Self::S(p, body) => match *p {
match *p { Paren::Round => tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("({0b})"))),
Paren::Round => Rc::new(Variants::default().bounded("({0b})")), Paren::Curly => tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{{0b}}"))),
Paren::Curly => Rc::new(Variants::default().bounded("{{0b}}")), Paren::Square => tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("[{0b}]"))),
Paren::Square => Rc::new(Variants::default().bounded("[{0b}]")), }
}, .units([body.print(c).await]),
[mtreev_fmt(body, c).await],
),
Self::Slot => "$SLOT".into(), Self::Slot => "$SLOT".into(),
Self::Bottom(err) if err.len() == 1 => format!("Bottom({}) ", err.one().unwrap()).into(), Self::Bottom(err) if err.len() == 1 => format!("Bottom({}) ", err.one().unwrap()).into(),
Self::Bottom(err) => format!("Botttom(\n{}) ", indent(&err.to_string())).into(), Self::Bottom(err) => format!("Botttom(\n{}) ", indent(&err.to_string())).into(),
@@ -161,7 +200,7 @@ pub async fn mtreev_fmt<'b>(
v: impl IntoIterator<Item = &'b MacTree>, v: impl IntoIterator<Item = &'b MacTree>,
c: &(impl FmtCtx + ?Sized), c: &(impl FmtCtx + ?Sized),
) -> FmtUnit { ) -> FmtUnit {
FmtUnit::sequence(" ", None, join_all(v.into_iter().map(|t| t.print(c))).await) FmtUnit::sequence("", " ", "", None, join_all(v.into_iter().map(|t| t.print(c))).await)
} }
#[derive(Clone, Debug, Hash, PartialEq, Eq)] #[derive(Clone, Debug, Hash, PartialEq, Eq)]
@@ -181,42 +220,12 @@ impl Display for Ph {
} }
} }
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, Coding)]
pub enum PhKind { pub enum PhKind {
Scalar, Scalar,
Vector { at_least_one: bool, priority: u8 }, Vector { at_least_one: bool, priority: u8 },
} }
pub fn map_mactree<F: FnMut(MacTree) -> Option<MacTree>>(
src: &MacTree,
changed: &mut bool,
map: &mut F,
) -> MacTree {
let tok = match map(src.clone()) {
Some(new_tok) => {
*changed = true;
return new_tok;
},
None => match &*src.tok {
MacTok::Lambda(arg, body) => MacTok::Lambda(
ro(changed, |changed| map_mactree(arg, changed, map)),
map_mactree_v(body, changed, map),
),
MacTok::Name(_) | MacTok::Value(_) => return src.clone(),
MacTok::Slot | MacTok::Ph(_) | MacTok::Bottom(_) => return src.clone(),
MacTok::S(p, body) => MacTok::S(*p, map_mactree_v(body, changed, map)),
},
};
if *changed { tok.at(src.pos()) } else { src.clone() }
}
pub fn map_mactree_v<F: FnMut(MacTree) -> Option<MacTree>>(
src: &[MacTree],
changed: &mut bool,
map: &mut F,
) -> Vec<MacTree> {
src.iter().map(|tree| ro(changed, |changed| map_mactree(tree, changed, map))).collect_vec()
}
/// reverse "or". Inside, the flag is always false, but raising it will raise /// reverse "or". Inside, the flag is always false, but raising it will raise
/// the outside flag too. /// the outside flag too.
fn ro<T>(flag: &mut bool, cb: impl FnOnce(&mut bool) -> T) -> T { fn ro<T>(flag: &mut bool, cb: impl FnOnce(&mut bool) -> T) -> T {
@@ -225,7 +234,3 @@ fn ro<T>(flag: &mut bool, cb: impl FnOnce(&mut bool) -> T) -> T {
*flag |= new_flag; *flag |= new_flag;
val val
} }
pub fn glossary_v(src: &[MacTree]) -> impl Iterator<Item = Sym> {
src.iter().flat_map(|mt| mt.glossary()).cloned()
}

View File

@@ -1,46 +1,44 @@
use std::ops::RangeInclusive; use std::ops::RangeInclusive;
use futures::FutureExt; use futures::FutureExt;
use itertools::chain;
use orchid_base::error::{OrcRes, mk_errv}; use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::parse::ParseCtx; use orchid_base::parse::ParseCtx;
use orchid_base::sym;
use orchid_base::tokens::PARENS; use orchid_base::tokens::PARENS;
use orchid_base::tree::Paren; use orchid_base::tree::Paren;
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable}; use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
use orchid_extension::parser::p_tree2gen; use orchid_extension::parser::p_tree2gen;
use orchid_extension::tree::{GenTok, GenTokTree, ref_tok, x_tok}; use orchid_extension::tree::{GenTok, GenTokTree, x_tok};
use crate::macros::instantiate_tpl::InstantiateTplCall;
use crate::macros::let_line::parse_tok; use crate::macros::let_line::parse_tok;
use crate::macros::mactree::{MacTok, MacTree}; use crate::macros::mactree::{MacTok, MacTree, MacTreeSeq};
#[derive(Default)] #[derive(Default)]
pub struct MacTreeLexer; pub struct MacTreeLexer;
impl Lexer for MacTreeLexer { impl Lexer for MacTreeLexer {
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['\''..='\'']; const CHAR_FILTER: &'static [RangeInclusive<char>] = &['\''..='\''];
async fn lex<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> { async fn lex<'a>(tail: &'a str, lctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
let Some(tail2) = tail.strip_prefix('\'') else { let Some(tail2) = tail.strip_prefix('\'') else {
return Err(err_not_applicable(ctx.i()).await); return Err(err_not_applicable().await);
}; };
let tail3 = tail2.trim_start(); let tail3 = tail2.trim_start();
let mut args = Vec::new(); let mut args = Vec::new();
return match mac_tree(tail3, &mut args, ctx).await { return match mac_tree(tail3, &mut args, lctx).await {
Ok((tail4, mactree)) => { Ok((tail4, mactree)) => {
let range = ctx.pos_tt(tail, tail4); let range = lctx.pos_tt(tail, tail4);
let tok = match &args[..] { let tok = match &args[..] {
[] => x_tok(mactree).await, [] => x_tok(mactree).await,
_ => { _ => {
let call = ([ let instantiate_tpl_call =
ref_tok(sym!(macros::instantiate_tpl; ctx.i()).await).await.at(range.clone()), InstantiateTplCall { argc: args.len(), argv: vec![], tpl: mactree };
x_tok(mactree).await.at(range.clone()), let call = chain!([x_tok(instantiate_tpl_call).await.at(range.clone())], args);
]
.into_iter())
.chain(args.into_iter());
GenTok::S(Paren::Round, call.collect()) GenTok::S(Paren::Round, call.collect())
}, },
}; };
Ok((tail4, tok.at(range))) Ok((tail4, tok.at(range)))
}, },
Err(e) => Ok((tail2, GenTok::Bottom(e).at(ctx.pos_lt(1, tail2)))), Err(e) => Ok((tail2, GenTok::Bottom(e).at(lctx.pos_lt(1, tail2)))),
}; };
async fn mac_tree<'a>( async fn mac_tree<'a>(
tail: &'a str, tail: &'a str,
@@ -53,7 +51,8 @@ impl Lexer for MacTreeLexer {
return loop { return loop {
let tail2 = body_tail.trim_start(); let tail2 = body_tail.trim_start();
if let Some(tail3) = tail2.strip_prefix(*rp) { if let Some(tail3) = tail2.strip_prefix(*rp) {
break Ok((tail3, MacTok::S(*paren, items).at(ctx.pos_tt(tail, tail3).pos()))); let tok = MacTok::S(*paren, MacTreeSeq::new(items));
break Ok((tail3, tok.at(ctx.pos_tt(tail, tail3).pos())));
} else if tail2.is_empty() { } else if tail2.is_empty() {
return Err(mk_errv( return Err(mk_errv(
ctx.i().i("Unclosed block").await, ctx.i().i("Unclosed block").await,
@@ -85,7 +84,7 @@ impl Lexer for MacTreeLexer {
body.push(body_tok); body.push(body_tok);
tail3 = tail5; tail3 = tail5;
} }
Ok((tail3, MacTok::Lambda(param, body).at(ctx.pos_tt(tail, tail3).pos()))) Ok((tail3, MacTok::Lambda(param, MacTreeSeq::new(body)).at(ctx.pos_tt(tail, tail3).pos())))
} else { } else {
let (tail2, sub) = ctx.recurse(tail).await?; let (tail2, sub) = ctx.recurse(tail).await?;
let parsed = parse_tok(&sub, ctx).await.expect("Unexpected invalid token"); let parsed = parse_tok(&sub, ctx).await.expect("Unexpected invalid token");

View File

@@ -0,0 +1,178 @@
use std::borrow::Cow;
use async_fn_stream::stream;
use futures::future::join_all;
use futures::{Stream, StreamExt, stream};
use never::Never;
use orchid_api::ExprTicket;
use orchid_api_derive::Coding;
use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::format::fmt;
use orchid_base::name::Sym;
use orchid_base::sym;
use orchid_extension::atom::{Atomic, TAtom};
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
use orchid_extension::context::i;
use orchid_extension::conv::ToExpr;
use orchid_extension::coroutine_exec::{ExecHandle, exec};
use orchid_extension::expr::{Expr, ExprHandle};
use orchid_extension::gen_expr::{GExpr, arg, bot, call, lambda, sym_ref};
use orchid_extension::tree::{GenMember, fun, prefix};
use crate::macros::resolve::resolve;
use crate::macros::utils::{build_macro, mactree, mactreev};
use crate::std::reflection::sym_atom::SymAtom;
use crate::std::tuple::Tuple;
use crate::{HomoTpl, MacTok, MacTree, OrcOpt, Tpl, UntypedTuple, api};
#[derive(Clone, Coding)]
pub struct MatcherData {
keys: Vec<api::TStrv>,
matcher: ExprTicket,
}
impl MatcherData {
async fn matcher(&self) -> Expr { Expr::from_handle(ExprHandle::from_ticket(self.matcher).await) }
pub async fn run_matcher(
&self,
h: &mut ExecHandle<'_>,
val: impl ToExpr,
) -> OrcRes<OrcOpt<HomoTpl<Expr>>> {
h.exec::<OrcOpt<HomoTpl<Expr>>>(call(self.matcher().await.to_gen().await, [val.to_gen().await]))
.await
}
pub fn keys(&self) -> impl Stream<Item = Sym> {
stream(async |mut h| {
for tk in &self.keys {
h.emit(Sym::from_api(*tk, &i()).await).await
}
})
}
}
#[derive(Clone)]
pub struct MatcherAtom {
/// The names that subresults may be bound to
pub(super) keys: Vec<Sym>,
/// Takes the value-to-be-matched, returns an `option (tuple T1..TN)` of the
/// subresults to be bound to the names returned by [Self::keys]
pub(super) matcher: Expr,
}
impl Atomic for MatcherAtom {
type Data = MatcherData;
type Variant = OwnedVariant;
}
impl OwnedAtom for MatcherAtom {
type Refs = Never;
async fn val(&self) -> std::borrow::Cow<'_, Self::Data> {
Cow::Owned(MatcherData {
keys: self.keys.iter().map(|t| t.to_api()).collect(),
matcher: self.matcher.handle().ticket(),
})
}
}
pub async fn gen_match_macro_lib() -> Vec<GenMember> {
prefix("pattern", [
fun(
true,
"match_one",
async |mat: TAtom<MatcherAtom>, value: Expr, then: Expr, default: Expr| {
exec(async move |mut h| match mat.run_matcher(&mut h, value).await? {
OrcOpt(Some(values)) =>
Ok(call(then.to_gen().await, join_all(values.0.into_iter().map(|x| x.to_gen())).await)),
OrcOpt(None) => Ok(default.to_gen().await),
})
.await
},
),
fun(true, "matcher", async |names: HomoTpl<TAtom<SymAtom>>, matcher: Expr| MatcherAtom {
keys: join_all(names.0.iter().map(async |atm| Sym::from_api(atm.0, &i()).await)).await,
matcher,
}),
build_macro(None, ["match", "match_rule", "_row", "=>"])
.rule(mactreev!("pattern::match" { "..$" rules 0 }), [async |[rules]| {
exec(async move |mut h| {
let rule_lines = h
.exec::<TAtom<Tuple>>(call(sym_ref(sym!(macros::resolve; i())), [
mactree!(macros::common::semi_list "push" rules.clone();).to_gen().await,
]))
.await?;
let mut rule_atoms = Vec::<(TAtom<MatcherAtom>, Expr)>::new();
for line_exprh in rule_lines.iter() {
let line_mac = h
.exec::<TAtom<MacTree>>(Expr::from_handle(ExprHandle::from_ticket(*line_exprh).await))
.await?;
let Tpl((matcher, body)) = h
.exec(call(sym_ref(sym!(macros::resolve; i())), [
mactree!(pattern::_row "push" own(&line_mac).await ;).to_gen().await,
]))
.await?;
rule_atoms.push((matcher, body));
}
let base_case = lambda(0, [bot(mk_errv(
i().i("No branches match").await,
"None of the pattern provided matches this value",
[rules.pos()],
))]);
let match_expr = stream::iter(rule_atoms.into_iter().rev())
.fold(base_case, async |tail, (mat, body)| {
lambda(0, [call(sym_ref(sym!(pattern::match_one; i())), [
mat.to_gen().await,
arg(0),
body.to_gen().await,
call(tail, [arg(0)]),
])])
})
.await;
Ok(match_expr)
})
.await
}])
.rule(mactreev!(pattern::match_rule (( "...$" pattern 0 ))), [async |[pattern]| {
resolve(mactree!(pattern::match_rule "push" pattern; )).await
}])
.rule(mactreev!(pattern::_row ( "...$" pattern 0 pattern::=> "...$" value 1 )), [
async |[pattern, mut value]| {
exec(async move |mut h| -> OrcRes<Tpl<(TAtom<MatcherAtom>, GExpr)>> {
let Ok(pat) = h
.exec::<TAtom<MatcherAtom>>(call(sym_ref(sym!(macros::resolve; i())), [
mactree!(pattern::match_rule "push" pattern.clone();).to_gen().await,
]))
.await
else {
return Err(mk_errv(
i().i("Invalid pattern").await,
format!("Could not parse {} as a match pattern", fmt(&pattern, &i()).await),
[pattern.pos()],
));
};
value = (pat.keys())
.fold(value, async |value, name| mactree!("l_" name; ( "push" value ; )))
.await;
Ok(Tpl((pat, resolve(value).await)))
})
.await
},
])
.finish(),
fun(true, "ref_body", async |val| OrcOpt(Some(UntypedTuple(vec![val])))),
build_macro(None, ["ref"])
.rule(mactreev!(pattern::match_rule(pattern::ref "$" name)), [async |[name]| {
let MacTok::Name(name) = name.tok() else {
return Err(mk_errv(
i().i("pattern 'ref' requires a name to bind to").await,
format!(
"'ref' was interpreted as a binding matcher, \
but it was followed by {} instead of a name",
fmt(&name, &i()).await
),
[name.pos()],
));
};
Ok(MatcherAtom {
keys: vec![name.clone()],
matcher: sym_ref(sym!(pattern::ref_body; i())).to_expr().await,
})
}])
.finish(),
])
}

View File

@@ -3,10 +3,14 @@ mod let_line;
mod macro_lib; mod macro_lib;
mod macro_line; mod macro_line;
pub mod macro_system; pub mod macro_system;
mod macro_value;
pub mod mactree; pub mod mactree;
mod mactree_lexer; mod mactree_lexer;
pub mod recur_state; pub mod match_macros;
mod ph_lexer;
mod resolve; mod resolve;
mod rule; mod rule;
pub mod std_macros;
mod utils;
use mactree::{MacTok, MacTree}; use mactree::{MacTok, MacTree};

View File

@@ -0,0 +1,77 @@
use orchid_api_derive::Coding;
use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::format::FmtUnit;
use orchid_base::parse::{name_char, name_start};
use orchid_extension::atom::Atomic;
use orchid_extension::atom_thin::{ThinAtom, ThinVariant};
use orchid_extension::context::i;
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
use orchid_extension::tree::{GenTokTree, x_tok};
use crate::macros::mactree::{Ph, PhKind};
#[derive(Clone, Coding)]
pub struct PhAtom(orchid_api::TStr, PhKind);
impl PhAtom {
pub async fn to_full(&self) -> Ph { Ph { kind: self.1, name: i().ex(self.0).await } }
}
impl Atomic for PhAtom {
type Data = Self;
type Variant = ThinVariant;
}
impl ThinAtom for PhAtom {
async fn print(&self) -> FmtUnit {
Ph { name: i().ex(self.0).await, kind: self.1 }.to_string().into()
}
}
#[derive(Default)]
pub struct PhLexer;
impl Lexer for PhLexer {
const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['$'..='$', '.'..='.'];
async fn lex<'a>(line: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
let (tail, name, phkind) = if let Some(tail) = line.strip_prefix("$")
&& tail.starts_with(name_start)
{
let name = tail.split_once(|c| !name_char(c)).map_or("", |(h, _)| h);
let tail = tail.split_at(name.len()).1;
(tail, name, PhKind::Scalar)
} else {
async fn name_and_prio<'a>(
tail: &'a str,
ctx: &'a LexContext<'a>,
) -> OrcRes<(&'a str, u8, &'a str)> {
let name = tail.split_once(|c| !name_char(c)).map_or("", |(h, _)| h);
let tail = tail.split_at(name.len()).1;
let (prio, tail) = match tail.strip_prefix(":") {
None => (0, tail),
Some(tail) => {
let prio = tail.split_once(|c: char| c.is_ascii_digit()).map_or("", |(h, _)| h);
let tail = tail.split_at(prio.len()).1;
if let Ok(prio_num) = prio.parse::<u8>() {
(prio_num, tail)
} else {
return Err(mk_errv(
i().i("Invalid priority, must be 0-255").await,
format!("{prio} is not a valid placeholder priority"),
[ctx.pos_lt(prio.len(), tail)],
));
}
},
};
Ok((name, prio, tail))
}
if let Some(tail) = line.strip_prefix("..$") {
let (name, priority, tail) = name_and_prio(tail, ctx).await?;
(tail, name, PhKind::Vector { at_least_one: false, priority })
} else if let Some(tail) = line.strip_prefix("...$") {
let (name, priority, tail) = name_and_prio(tail, ctx).await?;
(tail, name, PhKind::Vector { at_least_one: true, priority })
} else {
return Err(err_not_applicable().await);
}
};
let ph_atom = PhAtom(i().i::<String>(name).await.to_api(), phkind);
Ok((tail, x_tok(ph_atom).await.at(ctx.pos_tt(line, tail))))
}
}

View File

@@ -1,71 +0,0 @@
use std::borrow::Cow;
use std::fmt;
use std::rc::Rc;
use never::Never;
use orchid_base::format::{FmtCtx, FmtUnit};
use orchid_base::interner::Tok;
use orchid_base::name::Sym;
use orchid_extension::atom::Atomic;
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct RulePath {
pub module: Sym,
pub main_kw: Tok<String>,
pub rule: u32,
}
impl RulePath {
pub fn name(&self) -> String { format!("rule::{}::{}", self.main_kw, self.rule) }
}
impl fmt::Display for RulePath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Rule {}::({})::{}", self.module, self.main_kw, self.rule)
}
}
#[derive(Clone)]
pub enum RecurState {
Bottom,
Recursive { path: RulePath, prev: Rc<RecurState> },
}
impl RecurState {
pub fn base(path: RulePath) -> Self {
RecurState::Recursive { path, prev: Rc::new(RecurState::Bottom) }
}
pub fn push(&self, new: RulePath) -> Option<Self> {
let mut cur = self;
while let Self::Recursive { path, prev } = cur {
if &new == path {
return None;
}
cur = prev;
}
Some(Self::Recursive { path: new, prev: Rc::new(self.clone()) })
}
}
impl Atomic for RecurState {
type Data = Option<()>;
type Variant = OwnedVariant;
}
impl OwnedAtom for RecurState {
type Refs = Never;
async fn val(&self) -> Cow<'_, Self::Data> {
Cow::Owned(match self {
Self::Bottom => None,
Self::Recursive { .. } => Some(()),
})
}
async fn print_atom<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
self.to_string().into()
}
}
impl fmt::Display for RecurState {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Bottom => write!(f, "RecurState::Bottom"),
Self::Recursive { path, prev } => write!(f, "{path}\n{prev}"),
}
}
}

View File

@@ -1,110 +1,275 @@
use futures::FutureExt; use std::ops::{Add, Range};
use hashbrown::HashMap;
use async_fn_stream::stream;
use futures::{FutureExt, StreamExt};
use hashbrown::{HashMap, HashSet};
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::mk_errv; use orchid_base::error::mk_errv;
use orchid_base::format::fmt;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::sym;
use orchid_base::tree::Paren; use orchid_base::tree::Paren;
use orchid_extension::atom::TAtom;
use orchid_extension::atom_owned::own;
use orchid_extension::context::i;
use orchid_extension::conv::ToExpr; use orchid_extension::conv::ToExpr;
use orchid_extension::coroutine_exec::ExecHandle; use orchid_extension::coroutine_exec::{ExecHandle, exec};
use orchid_extension::gen_expr::{GExpr, bot, call, sym_ref}; use orchid_extension::gen_expr::{GExpr, bot, call, lambda, sym_ref};
use orchid_extension::system::SysCtx; use orchid_extension::reflection::{ReflMemKind, refl};
use subslice_offset::SubsliceOffset;
use substack::Substack;
use crate::macros::macro_line::{Macro, Rule}; use crate::macros::macro_value::{Macro, Rule};
use crate::macros::recur_state::{RecurState, RulePath}; use crate::macros::mactree::MacTreeSeq;
use crate::macros::rule::matcher::{NamedMatcher, PriodMatcher};
use crate::macros::rule::state::{MatchState, StateEntry}; use crate::macros::rule::state::{MatchState, StateEntry};
use crate::{MacTok, MacTree}; use crate::{MacTok, MacTree};
pub struct ResolveCtx<'a> { pub async fn resolve(tpl: MacTree) -> GExpr {
pub ctx: SysCtx, exec(async move |mut h| {
pub recur: RecurState, let root = refl();
pub h: ExecHandle<'a>, let mut macros = HashMap::new();
pub named: HashMap<Sym, Vec<(&'a NamedMatcher, &'a Macro, &'a Rule)>>, for n in tpl.glossary() {
pub priod: Vec<(&'a PriodMatcher, &'a Macro, &'a Rule)>, if let Ok(ReflMemKind::Const) = root.get_by_path(n).await.map(|m| m.kind()) {
let Ok(mac) = h.exec::<TAtom<Macro>>(sym_ref(n.clone())).await else { continue };
let mac = own(&mac).await;
macros.entry(mac.canonical_name().await).or_insert(mac);
}
}
let mut exclusive = Vec::new();
let mut prios = Vec::<u64>::new();
let mut priod = Vec::<FilteredMacroRecord>::new();
for (_, mac) in macros.iter() {
let mut record = FilteredMacroRecord { mac, rules: Vec::new() };
for (rule_i, rule) in mac.0.rules.iter().enumerate() {
if rule.pattern.glossary.is_subset(tpl.glossary()) {
record.rules.push(rule_i);
}
}
if !record.rules.is_empty() {
match mac.0.prio {
None => exclusive.push(record),
Some(prio) => {
let i = prios.partition_point(|p| *p > prio);
prios.insert(i, prio);
priod.insert(i, record);
},
}
}
}
let mut rctx = ResolveCtx { h, exclusive, priod };
resolve_one(&mut rctx, Substack::Bottom, &tpl).await
})
.await
} }
pub async fn resolve(ctx: &mut ResolveCtx<'_>, value: &MacTree) -> Option<MacTree> { /// Rules belonging to one macro that passed a particular filter
pub struct FilteredMacroRecord<'a> {
mac: &'a Macro,
/// The rules in increasing order of index
rules: Vec<usize>,
}
struct ResolveCtx<'a> {
pub h: ExecHandle<'a>,
/// If these overlap, that's a compile-time error
pub exclusive: Vec<FilteredMacroRecord<'a>>,
/// If these overlap, the priorities decide the order. In case of a tie, the
/// order is unspecified
pub priod: Vec<FilteredMacroRecord<'a>>,
}
async fn resolve_one(
ctx: &mut ResolveCtx<'_>,
arg_stk: Substack<'_, Sym>,
value: &MacTree,
) -> GExpr {
match value.tok() { match value.tok() {
MacTok::Ph(_) | MacTok::Slot => panic!("Forbidden element in value mactree"), MacTok::Ph(_) | MacTok::Slot => panic!("Forbidden element in value mactree"),
MacTok::Bottom(_) | MacTok::Value(_) | MacTok::Name(_) => None, MacTok::Bottom(err) => bot(err.clone()),
MacTok::Lambda(arg, body) => MacTok::Value(v) => v.clone().to_gen().await,
Some(MacTok::Lambda(arg.clone(), resolve_seq(ctx, body).await?).at(value.pos())), MacTok::Name(n) => sym_ref(n.clone()),
MacTok::S(ptyp, body) => Some(MacTok::S(*ptyp, resolve_seq(ctx, body).await?).at(value.pos())), MacTok::Lambda(arg, body) => {
} let MacTok::Name(name) = &*arg.tok else {
}
pub async fn resolve_seq(ctx: &mut ResolveCtx<'_>, val: &[MacTree]) -> Option<Vec<MacTree>> {
let mut any_changed = false;
let mut i = 0;
let mut val = val.to_vec();
'all_named: while i < val.len() {
'one_named: {
let MacTok::Name(key) = val[i].tok() else { break 'one_named };
let Some(options) = ctx.named.get(key) else { break 'one_named };
let matches = (options.iter())
.filter_map(|r| Some((r.1, r.2, r.0.apply(&val[i..], |_| false)?)))
.collect_vec();
match matches.len() {
0 => break 'one_named,
1 => {
any_changed = true;
let (mac, rule, (state, tail)) = matches.into_iter().exactly_one().unwrap();
let end = val.len() - tail.len();
let body_call = mk_body_call(mac, rule, &state, &ctx.ctx, ctx.recur.clone()).await;
std::mem::drop(state);
val.splice(i..end, [MacTok::Value(ctx.h.register(body_call).await).at(Pos::None)]);
i = end;
},
2.. => todo!("Named macros conflict!"),
}
continue 'all_named;
}
i += 1;
}
for (matcher, mac, rule) in &ctx.priod {
let Some(state) = matcher.apply(&val, |_| false) else { continue };
return Some(vec![
MacTok::Value(
ctx.h.register(mk_body_call(mac, rule, &state, &ctx.ctx, ctx.recur.clone()).await).await,
)
.at(Pos::None),
]);
}
for expr in val.iter_mut() {
if let Some(new) = resolve(ctx, expr).boxed_local().await {
*expr = new;
any_changed = true;
}
}
if any_changed { Some(val) } else { None }
}
async fn mk_body_call(
mac: &Macro,
rule: &Rule,
state: &MatchState<'_>,
ctx: &SysCtx,
recur: RecurState,
) -> GExpr {
let rule_path =
RulePath { module: mac.0.module.clone(), main_kw: mac.0.own_kws[0].clone(), rule: rule.index };
let Some(new_recur) = recur.push(rule_path.clone()) else {
return bot(mk_errv( return bot(mk_errv(
ctx.i().i("Circular macro dependency").await, i().i("Syntax error after macros").await,
format!("The definition of {rule_path} is circular"), "This token ends up as a binding, consider replacing it with a name",
[rule.pos.clone()], [arg.pos()],
)); ));
}; };
let mut call_args = vec![sym_ref(mac.0.module.suffix([rule.body_name.clone()], ctx.i()).await)]; let arg_pos = arg_stk.len() as u64;
let arg_stk = arg_stk.push(name.clone());
lambda(arg_pos, [resolve_seq(ctx, arg_stk, body.clone(), value.pos()).await])
},
MacTok::S(Paren::Round, body) => resolve_seq(ctx, arg_stk, body.clone(), value.pos()).await,
MacTok::S(..) => bot(mk_errv(
i().i("Leftover [] or {} not matched by macro").await,
format!("{} was not matched by any macro", fmt(value, &i()).await),
[value.pos()],
)),
}
}
type XMatches<'a> = Vec<(Range<usize>, &'a Macro, &'a Rule, MatchState<'a>)>;
/// find the subsection of the slice that satisfies both the lower and upper
/// limit.
fn subsection<T>(
slice: &[T],
lower_limit: impl FnMut(&T) -> bool,
mut upper_limit: impl FnMut(&T) -> bool,
) -> Range<usize> {
let start = slice.partition_point(lower_limit);
let len = slice[start..].partition_point(|t| !upper_limit(t));
start..start + len
}
async fn resolve_seq(
ctx: &mut ResolveCtx<'_>,
arg_stk: Substack<'_, Sym>,
val: MacTreeSeq,
fallback_pos: Pos,
) -> GExpr {
if val.items.is_empty() {
return bot(mk_errv(
i().i("Empty sequence").await,
"() or (\\arg ) left after macro execution. \
This is usually caused by an incomplete call to a macro with bad error detection",
[fallback_pos],
));
}
// A sorted collection of overlapping but non-nested matches to exclusive
// macros
let mut x_matches: XMatches = Vec::new();
let top_glossary = val.top_glossary.clone();
let mut new_val = val.items.to_vec();
'x_macros: for x in &ctx.exclusive {
let mut rules_iter = x.rules.iter();
let ((before, state, after), rule) = 'rules: loop {
let Some(ridx) = rules_iter.next() else { continue 'x_macros };
let rule = &x.mac.0.rules[*ridx];
if rule.pattern.top_glossary.is_subset(&top_glossary)
&& let Some(record) = rule.matcher.apply(&val.items[..], &|_| true).await
{
break 'rules (record, rule);
};
};
let new_r = (before.len()..new_val.len() - after.len(), x.mac, rule, state);
// elements that overlap with us
let overlap =
subsection(&x_matches[..], |r| new_r.0.start < r.0.end, |r| r.0.start < new_r.0.end);
let overlapping = &x_matches[overlap.clone()];
// elements that fully contain us
let geq_range =
subsection(overlapping, |r| r.0.start <= new_r.0.start, |r| new_r.0.end <= r.0.end);
let geq = &overlapping[geq_range.clone()];
// if any of these is equal to us, all of them must be, otherwise the larger
// ranges would have overridden the smaller ones
if let Some(example) = geq.first() {
// if they are equal to us, record the conflict.
if example.0 == new_r.0 {
let idx = (x_matches.subslice_offset(geq))
.expect("this slice is statically derived from x_matches");
x_matches.insert(idx, new_r);
}
// either way, we matched so no further rules can run.
continue 'x_macros;
}
// elements we fully contain. Equal ranges have been handled above
let lt_range =
subsection(overlapping, |r| new_r.0.start <= r.0.start, |r| r.0.end <= new_r.0.end);
let lt = &overlapping[lt_range.clone()];
if lt.is_empty() {
// an empty range
let i = x_matches.partition_point(|r| r.0.start < new_r.0.start);
x_matches.insert(i, new_r);
} else {
let lt_start =
x_matches.subslice_offset(overlapping).expect("Slice statically derived from x_matches");
x_matches.splice(lt_start..lt_start + lt_range.len(), [new_r]);
}
}
// apply exclusive matches
if !x_matches.is_empty() {
// ranges of indices into x_matches which setwise conflict with each other.
// Pairwise conflict reporting is excess noise, but a single conflict error
// doesn't reveal where within the parenthesized block to look, so it's easiest
// to group them setwise even if these sets may associate macros which don't
// directly conflict.
let conflict_sets = (0..x_matches.len()).map(|x| x..x + 1).coalesce(|lran, rran| {
// each index was mapped to a range that contains only itself. Now we check if
// the last match in the first range overlaps the first match in the second
// range, and combine them if this is the case.
if x_matches[rran.start].0.start < x_matches[lran.end].0.end {
Ok(lran.start..rran.end)
} else {
Err((lran, rran))
}
});
let mac_conflict_tk = i().i("Macro conflict").await;
let error = conflict_sets
.filter(|r| 1 < r.len())
.map(|set| {
mk_errv(
mac_conflict_tk.clone(),
"Multiple partially overlapping syntax elements detected. \n\
Try parenthesizing whichever side is supposed to be the subexpression.",
x_matches[set].iter().flat_map(|rec| rec.3.names()).flat_map(|name| name.1).cloned(),
)
})
.reduce(|l, r| l + r);
if let Some(error) = error {
return bot(error);
}
// no conflicts, apply all exclusive matches
for (range, mac, rule, state) in x_matches.into_iter().rev() {
// backwards so that the non-overlapping ranges remain valid
let pos = (state.names().flat_map(|r| r.1).cloned().reduce(Pos::add))
.expect("All macro rules must contain at least one locally defined name");
let subex = ctx.h.register(mk_body_call(mac, rule, &state, pos.clone()).await).await;
new_val.splice(range, [MacTok::Value(subex).at(pos)]);
}
};
// Does this glossary refresh actually pay off?
let top_glossary = (new_val.iter())
.flat_map(|t| if let MacTok::Name(t) = t.tok() { Some(t.clone()) } else { None })
.collect::<HashSet<_>>();
for FilteredMacroRecord { mac, rules } in &ctx.priod {
for ridx in rules {
let rule = &mac.0.rules[*ridx];
if !rule.pattern.top_glossary.is_subset(&top_glossary) {
continue;
}
let Some((pre, state, suf)) = rule.matcher.apply(&new_val, &|_| true).await else { continue };
let range = pre.len()..new_val.len() - suf.len();
let pos = (state.names().flat_map(|pair| pair.1).cloned().reduce(Pos::add))
.expect("All macro rules must contain at least one locally defined name");
let subex = ctx.h.register(mk_body_call(mac, rule, &state, pos.clone()).await).await;
std::mem::drop(state);
new_val.splice(range, [MacTok::Value(subex).at(pos)]);
}
}
let exprs = stream(async |mut h| {
for mt in new_val {
h.emit(resolve_one(ctx, arg_stk.clone(), &mt).await).await
}
})
.collect::<Vec<_>>()
.boxed_local()
.await;
exprs.into_iter().reduce(|f, x| call(f, [x])).expect(
"We checked first that it isn't empty, and named macros get replaced with their results",
)
}
async fn mk_body_call(mac: &Macro, rule: &Rule, state: &MatchState<'_>, pos: Pos) -> GExpr {
let mut call_args = vec![];
for name in rule.placeholders.iter() { for name in rule.placeholders.iter() {
call_args.push(match state.get(name).expect("Missing state entry for placeholder") { call_args.push(match state.get(name).expect("Missing state entry for placeholder") {
StateEntry::Scalar(scal) => (**scal).clone().to_expr().await, StateEntry::Scalar(scal) => (**scal).clone().to_gen().await,
StateEntry::Vec(vec) => MacTok::S(Paren::Round, vec.to_vec()).at(Pos::None).to_expr().await, StateEntry::Vec(vec) =>
MacTok::S(Paren::Round, MacTreeSeq::new(vec.iter().cloned())).at(Pos::None).to_gen().await,
}); });
} }
call_args call(sym_ref(mac.0.module.suffix([rule.body_name.clone()], &i()).await), call_args)
.push(call([sym_ref(sym!(macros::resolve_recur; ctx.i()).await), new_recur.to_expr().await])); .at(pos.clone())
call(call_args)
} }

View File

@@ -2,9 +2,10 @@ use futures::FutureExt;
use futures::future::join_all; use futures::future::join_all;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::{OrcRes, mk_errv}; use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::interner::{Interner, Tok}; use orchid_base::interner::Tok;
use orchid_base::join_ok; use orchid_base::join_ok;
use orchid_base::side::Side; use orchid_base::side::Side;
use orchid_extension::context::i;
use super::shared::{AnyMatcher, ScalMatcher, VecMatcher}; use super::shared::{AnyMatcher, ScalMatcher, VecMatcher};
use super::vec_attrs::vec_attrs; use super::vec_attrs::vec_attrs;
@@ -31,29 +32,29 @@ fn scal_cnt<'a>(iter: impl Iterator<Item = &'a MacTree>) -> usize {
iter.take_while(|expr| vec_attrs(expr).is_none()).count() iter.take_while(|expr| vec_attrs(expr).is_none()).count()
} }
pub async fn mk_any(pattern: &[MacTree], i: &Interner) -> OrcRes<AnyMatcher> { pub async fn mk_any(pattern: &[MacTree]) -> OrcRes<AnyMatcher> {
let left_split = scal_cnt(pattern.iter()); let left_split = scal_cnt(pattern.iter());
if pattern.len() <= left_split { if pattern.len() <= left_split {
return Ok(AnyMatcher::Scalar(mk_scalv(pattern, i).await?)); return Ok(AnyMatcher::Scalar(mk_scalv(pattern).await?));
} }
let (left, not_left) = pattern.split_at(left_split); let (left, not_left) = pattern.split_at(left_split);
let right_split = not_left.len() - scal_cnt(pattern.iter().rev()); let right_split = not_left.len() - scal_cnt(pattern.iter().rev());
let (mid, right) = not_left.split_at(right_split); let (mid, right) = not_left.split_at(right_split);
join_ok! { join_ok! {
left = mk_scalv(left, i).await; left = mk_scalv(left).await;
mid = mk_vec(mid, i).await; mid = mk_vec(mid).await;
right = mk_scalv(right, i).await; right = mk_scalv(right).await;
} }
Ok(AnyMatcher::Vec { left, mid, right }) Ok(AnyMatcher::Vec { left, mid, right })
} }
/// Pattern MUST NOT contain vectorial placeholders /// Pattern MUST NOT contain vectorial placeholders
async fn mk_scalv(pattern: &[MacTree], i: &Interner) -> OrcRes<Vec<ScalMatcher>> { async fn mk_scalv(pattern: &[MacTree]) -> OrcRes<Vec<ScalMatcher>> {
join_all(pattern.iter().map(|pat| mk_scalar(pat, i))).await.into_iter().collect() join_all(pattern.iter().map(mk_scalar)).await.into_iter().collect()
} }
/// Pattern MUST start and end with a vectorial placeholder /// Pattern MUST start and end with a vectorial placeholder
pub async fn mk_vec(pattern: &[MacTree], i: &Interner) -> OrcRes<VecMatcher> { pub async fn mk_vec(pattern: &[MacTree]) -> OrcRes<VecMatcher> {
debug_assert!(!pattern.is_empty(), "pattern cannot be empty"); debug_assert!(!pattern.is_empty(), "pattern cannot be empty");
debug_assert!(pattern.first().map(vec_attrs).is_some(), "pattern must start with a vectorial"); debug_assert!(pattern.first().map(vec_attrs).is_some(), "pattern must start with a vectorial");
debug_assert!(pattern.last().map(vec_attrs).is_some(), "pattern must end with a vectorial"); debug_assert!(pattern.last().map(vec_attrs).is_some(), "pattern must end with a vectorial");
@@ -68,8 +69,8 @@ pub async fn mk_vec(pattern: &[MacTree], i: &Interner) -> OrcRes<VecMatcher> {
(&[], &[]) => Ok(VecMatcher::Placeh { key, nonzero }), (&[], &[]) => Ok(VecMatcher::Placeh { key, nonzero }),
(&[], _) => { (&[], _) => {
join_ok! { join_ok! {
sep = mk_scalv(r_sep, i).await; sep = mk_scalv(r_sep).await;
right = mk_vec(r_side, i).boxed_local().await; right = mk_vec(r_side).boxed_local().await;
} }
Ok(VecMatcher::Scan { Ok(VecMatcher::Scan {
direction: Side::Left, direction: Side::Left,
@@ -80,8 +81,8 @@ pub async fn mk_vec(pattern: &[MacTree], i: &Interner) -> OrcRes<VecMatcher> {
}, },
(_, &[]) => { (_, &[]) => {
join_ok! { join_ok! {
left = mk_vec(l_side, i).boxed_local().await; left = mk_vec(l_side).boxed_local().await;
sep = mk_scalv(l_sep, i).await; sep = mk_scalv(l_sep).await;
} }
Ok(VecMatcher::Scan { Ok(VecMatcher::Scan {
direction: Side::Right, direction: Side::Right,
@@ -95,10 +96,10 @@ pub async fn mk_vec(pattern: &[MacTree], i: &Interner) -> OrcRes<VecMatcher> {
l_side.iter().chain(r_side.iter()).filter_map(vec_attrs).collect::<Vec<_>>(); l_side.iter().chain(r_side.iter()).filter_map(vec_attrs).collect::<Vec<_>>();
key_order.sort_by_key(|(_, prio, _)| -(*prio as i64)); key_order.sort_by_key(|(_, prio, _)| -(*prio as i64));
join_ok! { join_ok! {
left = mk_vec(l_side, i).boxed_local().await; left = mk_vec(l_side).boxed_local().await;
left_sep = mk_scalv(l_sep, i).await; left_sep = mk_scalv(l_sep).await;
right_sep = mk_scalv(r_sep, i).await; right_sep = mk_scalv(r_sep).await;
right = mk_vec(r_side, i).boxed_local().await; right = mk_vec(r_side).boxed_local().await;
} }
Ok(VecMatcher::Middle { Ok(VecMatcher::Middle {
left: Box::new(left), left: Box::new(left),
@@ -113,7 +114,7 @@ pub async fn mk_vec(pattern: &[MacTree], i: &Interner) -> OrcRes<VecMatcher> {
} }
/// Pattern MUST NOT be a vectorial placeholder /// Pattern MUST NOT be a vectorial placeholder
async fn mk_scalar(pattern: &MacTree, i: &Interner) -> OrcRes<ScalMatcher> { async fn mk_scalar(pattern: &MacTree) -> OrcRes<ScalMatcher> {
Ok(match &*pattern.tok { Ok(match &*pattern.tok {
MacTok::Name(n) => ScalMatcher::Name(n.clone()), MacTok::Name(n) => ScalMatcher::Name(n.clone()),
MacTok::Ph(Ph { name, kind }) => match kind { MacTok::Ph(Ph { name, kind }) => match kind {
@@ -122,10 +123,10 @@ async fn mk_scalar(pattern: &MacTree, i: &Interner) -> OrcRes<ScalMatcher> {
}, },
PhKind::Scalar => ScalMatcher::Placeh { key: name.clone() }, PhKind::Scalar => ScalMatcher::Placeh { key: name.clone() },
}, },
MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body, i).boxed_local().await?)), MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(&body.items).boxed_local().await?)),
MacTok::Lambda(..) => MacTok::Lambda(..) =>
return Err(mk_errv( return Err(mk_errv(
i.i("Lambda in matcher").await, i().i("Lambda in matcher").await,
"Lambdas can't be matched for, only generated in templates", "Lambdas can't be matched for, only generated in templates",
[pattern.pos()], [pattern.pos()],
)), )),
@@ -136,50 +137,52 @@ async fn mk_scalar(pattern: &MacTree, i: &Interner) -> OrcRes<ScalMatcher> {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use orchid_base::interner::Interner;
use orchid_base::location::SrcRange; use orchid_base::location::SrcRange;
use orchid_base::sym; use orchid_base::sym;
use orchid_base::tokens::Paren; use orchid_base::tokens::Paren;
use orchid_extension::context::{i, mock_ctx, with_ctx};
use test_executors::spin_on; use test_executors::spin_on;
use super::mk_any; use super::mk_any;
use crate::macros::MacTok; use crate::macros::MacTok;
use crate::macros::mactree::{Ph, PhKind}; use crate::macros::mactree::{MacTreeSeq, Ph, PhKind};
#[test] #[test]
fn test_scan() { fn test_scan() {
spin_on(async { spin_on(with_ctx(mock_ctx(), async {
let i = Interner::new_master(); let ex = |tok: MacTok| async { tok.at(SrcRange::mock(&i()).await.pos()) };
let ex = |tok: MacTok| async { tok.at(SrcRange::mock(&i).await.pos()) };
let pattern = vec![ let pattern = vec![
ex(MacTok::Ph(Ph { ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 0, at_least_one: false }, kind: PhKind::Vector { priority: 0, at_least_one: false },
name: i.i("::prefix").await, name: i().i("::prefix").await,
})) }))
.await, .await,
ex(MacTok::Name(sym!(prelude::do; i).await)).await, ex(MacTok::Name(sym!(prelude::do; i()))).await,
ex(MacTok::S(Paren::Round, vec![ ex(MacTok::S(
Paren::Round,
MacTreeSeq::new([
ex(MacTok::Ph(Ph { ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 0, at_least_one: false }, kind: PhKind::Vector { priority: 0, at_least_one: false },
name: i.i("expr").await, name: i().i("expr").await,
})) }))
.await, .await,
ex(MacTok::Name(sym!(prelude::; ; i).await)).await, ex(MacTok::Name(sym!(prelude::; ; i()))).await,
ex(MacTok::Ph(Ph { ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 1, at_least_one: false }, kind: PhKind::Vector { priority: 1, at_least_one: false },
name: i.i("rest").await, name: i().i("rest").await,
})) }))
.await, .await,
])) ]),
))
.await, .await,
ex(MacTok::Ph(Ph { ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 0, at_least_one: false }, kind: PhKind::Vector { priority: 0, at_least_one: false },
name: i.i("::suffix").await, name: i().i("::suffix").await,
})) }))
.await, .await,
]; ];
let matcher = mk_any(&pattern, &i).await.expect("This matcher isn't broken"); let matcher = mk_any(&pattern).await.expect("This matcher isn't broken");
println!("{matcher}"); println!("{matcher}");
}) }))
} }
} }

View File

@@ -1,87 +1,61 @@
use std::fmt; use std::fmt;
use std::rc::Rc;
use itertools::Itertools;
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use orchid_base::interner::{Interner, Tok};
use orchid_base::location::Pos;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_extension::context::i;
use super::any_match::any_match; use super::any_match::any_match;
use super::build::{mk_any, mk_vec}; use super::build::mk_any;
use super::shared::{AnyMatcher, VecMatcher}; use super::shared::AnyMatcher;
use super::state::{MatchState, StateEntry}; use super::state::{MatchState, StateEntry};
use super::vec_attrs::vec_attrs; use super::vec_attrs::vec_attrs;
use super::vec_match::vec_match; use crate::macros::mactree::{MacTreeSeq, Ph, PhKind};
use crate::macros::mactree::{Ph, PhKind};
use crate::macros::{MacTok, MacTree}; use crate::macros::{MacTok, MacTree};
pub struct NamedMatcher { pub struct Matcher {
inner: AnyMatcher, inner: AnyMatcher,
head: Sym,
after_tok: Tok<String>,
}
impl NamedMatcher {
pub async fn new(pattern: &[MacTree], i: &Interner) -> OrcRes<Self> {
let head = match pattern.first().map(|tree| tree.tok()) {
Some(MacTok::Name(name)) => name.clone(),
_ => panic!("Named matchers must begin with a name"),
};
let after_tok = i.i("::after").await;
let inner = match pattern.last().and_then(vec_attrs).is_some() {
true => mk_any(pattern, i).await?,
false => {
let kind = PhKind::Vector { priority: 0, at_least_one: false };
let suffix = [MacTok::Ph(Ph { name: after_tok.clone(), kind }).at(Pos::None)];
mk_any(&pattern.iter().cloned().chain(suffix).collect_vec(), i).await?
},
};
Ok(Self { after_tok, inner, head })
}
pub fn head(&self) -> Sym { self.head.clone() }
/// Also returns the tail, if any, which should be matched further
/// Note that due to how priod works below, the main usable information from
/// the tail is its length
pub fn apply<'a>(
&self,
seq: &'a [MacTree],
save_loc: impl Fn(Sym) -> bool,
) -> Option<(MatchState<'a>, &'a [MacTree])> {
let mut state = any_match(&self.inner, seq, &save_loc)?;
match state.remove(self.after_tok.clone()) {
Some(StateEntry::Scalar(_)) => panic!("{} can never be a scalar entry!", self.after_tok),
Some(StateEntry::Vec(v)) => Some((state, v)),
None => Some((state, &[][..])),
}
}
}
impl fmt::Display for NamedMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) }
}
impl fmt::Debug for NamedMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "NamedMatcher({self})") }
} }
pub struct PriodMatcher(VecMatcher); impl Matcher {
impl PriodMatcher { pub async fn new(pattern: MacTreeSeq) -> OrcRes<Self> {
pub async fn new(pattern: &[MacTree], i: &Interner) -> OrcRes<Self> { let mut pattern = Rc::unwrap_or_clone(pattern.items);
assert!( let kind = PhKind::Vector { at_least_one: false, priority: 0 };
pattern.first().and_then(vec_attrs).is_some() && pattern.last().and_then(vec_attrs).is_some(), let first = pattern.first().expect("Empty pattern is not allowed");
"Prioritized matchers must start and end with a vectorial", if vec_attrs(first).is_none() {
); let pos = first.pos();
Ok(Self(mk_vec(pattern, i).await?)) pattern.insert(0, MacTok::Ph(Ph { name: i().i("::before").await, kind }).at(pos));
} }
/// tokens before the offset always match the prefix let last = pattern.last().expect("first returned Some above");
pub fn apply<'a>( if vec_attrs(last).is_none() {
let pos = last.pos();
pattern.insert(0, MacTok::Ph(Ph { name: i().i("::after").await, kind }).at(pos));
}
Ok(Matcher { inner: mk_any(&pattern).await? })
}
/// Also returns the head and tail, which should be matched by overarching
/// matchers attempted later.
pub async fn apply<'a>(
&self, &self,
seq: &'a [MacTree], seq: &'a [MacTree],
save_loc: impl Fn(Sym) -> bool, save_loc: &dyn Fn(Sym) -> bool,
) -> Option<MatchState<'a>> { ) -> Option<(&'a [MacTree], MatchState<'a>, &'a [MacTree])> {
vec_match(&self.0, seq, &save_loc) let mut result = any_match(&self.inner, seq, &save_loc)?;
async fn remove_frame<'a>(result: &mut MatchState<'a>, key: &str) -> &'a [MacTree] {
match result.remove(i().i(key).await) {
Some(StateEntry::Scalar(_)) => panic!("{key} is defined in the constructor as a Vec"),
Some(StateEntry::Vec(v)) => v,
None => &[],
}
}
let before = remove_frame(&mut result, "::before").await;
let after = remove_frame(&mut result, "::after").await;
Some((before, result, after))
} }
} }
impl fmt::Display for PriodMatcher { impl fmt::Display for Matcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) }
} }
impl fmt::Debug for PriodMatcher { impl fmt::Debug for Matcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "PriodMatcher({self})") } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "NamedMatcher({self})") }
} }

View File

@@ -19,7 +19,7 @@ pub fn scal_match<'a>(
(ScalMatcher::Placeh { key }, _) => (ScalMatcher::Placeh { key }, _) =>
Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))), Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))),
(ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 => (ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 =>
any_match(b_mat, &body[..], save_loc), any_match(b_mat, &body.items, save_loc),
_ => None, _ => None,
} }
} }

View File

@@ -54,6 +54,9 @@ impl<'a> MatchState<'a> {
pub fn from_name(name: Sym, location: Pos) -> Self { pub fn from_name(name: Sym, location: Pos) -> Self {
Self { name_posv: HashMap::from([(name, vec![location])]), placeholders: HashMap::new() } Self { name_posv: HashMap::from([(name, vec![location])]), placeholders: HashMap::new() }
} }
pub fn names(&self) -> impl Iterator<Item = (Sym, &[Pos])> {
self.name_posv.iter().map(|(sym, vec)| (sym.clone(), &vec[..]))
}
pub fn get(&self, key: &Tok<String>) -> Option<&StateEntry<'a>> { self.placeholders.get(key) } pub fn get(&self, key: &Tok<String>) -> Option<&StateEntry<'a>> { self.placeholders.get(key) }
pub fn remove(&mut self, name: Tok<String>) -> Option<StateEntry<'a>> { pub fn remove(&mut self, name: Tok<String>) -> Option<StateEntry<'a>> {
self.placeholders.remove(&name) self.placeholders.remove(&name)

View File

@@ -36,7 +36,6 @@ pub fn vec_match<'a>(
} }
None None
}, },
// XXX predict heap space usage and allocation count
VecMatcher::Middle { left, left_sep, mid, right_sep, right, key_order } => { VecMatcher::Middle { left, left_sep, mid, right_sep, right, key_order } => {
if seq.len() < left_sep.len() + right_sep.len() { if seq.len() < left_sep.len() + right_sep.len() {
return None; return None;

View File

@@ -0,0 +1,177 @@
use futures::{StreamExt, stream};
use orchid_base::error::OrcRes;
use orchid_base::sym;
use orchid_extension::atom::TAtom;
use orchid_extension::atom_owned::own;
use orchid_extension::context::i;
use orchid_extension::conv::ToExpr;
use orchid_extension::coroutine_exec::exec;
use orchid_extension::expr::Expr;
use orchid_extension::gen_expr::{GExpr, call, sym_ref};
use orchid_extension::tree::{GenMember, fun, prefix};
use crate::macros::match_macros::MatcherAtom;
use crate::macros::resolve::resolve;
use crate::macros::utils::{build_macro, mactree, mactreev};
use crate::{HomoTpl, MacTree, OrcOpt, Tpl};
pub async fn gen_std_macro_lib() -> Vec<GenMember> {
prefix("std", [
prefix("option", [
fun(false, "is_some_body", |sub: TAtom<MatcherAtom>, val: OrcOpt<Expr>| {
exec(async move |mut h| {
let Some(sub_val) = val.0 else { return Ok(OrcOpt(None)) };
h.exec::<OrcOpt<Expr>>(call(sub.to_gen().await, [sub_val.to_gen().await])).await
})
}),
fun(false, "is_none_body", async |val: OrcOpt<Expr>| {
if val.0.is_none() { OrcOpt(Some(Tpl(()))) } else { OrcOpt(None) }
}),
build_macro(None, ["of", "empty"])
.rule(mactreev!(pattern::match_rule ( std::option::of "...$" sub_pattern 0)), [
|[sub]: [_; _]| {
exec(async move |mut h| {
let sub = h
.exec::<TAtom<MatcherAtom>>(
resolve(mactree!(pattern::match_rule "push" sub;)).await,
)
.await?;
Ok(MatcherAtom {
keys: sub.keys().collect().await,
matcher: h
.register(call(sym_ref(sym!(std::option::is_some_body; i())), [sub
.to_gen()
.await]))
.await,
})
})
},
])
.rule(mactreev!(pattern::match_rule(std::option::empty)), [|[]: [_; _]| {
exec(async |mut h| {
Ok(MatcherAtom {
keys: vec![],
matcher: h.register(sym_ref(sym!(std::option::is_none_body; i()))).await,
})
})
}])
.finish(),
]),
prefix("tuple", [
build_macro(None, ["t"])
.rule(mactreev!(std::tuple::t [ "...$" elements 0 ]), [|[elements]: [_; _]| {
exec(async move |mut h| {
let tup = h
.exec::<HomoTpl<TAtom<MacTree>>>(call(sym_ref(sym!(macros::resolve; i())), [
mactree!((macros::common::comma_list "push" elements ;)).to_gen().await,
]))
.await?;
let val = stream::iter(&tup.0[..])
.fold(sym_ref(sym!(std::tuple::empty; i())), async |head, new| {
call(sym_ref(sym!(std::tuple::cat; i())), [
head,
call(sym_ref(sym!(std::tuple::one; i())), [call(
sym_ref(sym!(macros::resolve; i())),
[new.clone().to_gen().await],
)]),
])
})
.await;
Ok(val)
})
}])
.rule(
mactreev!(pattern::match_rule(std::tuple::t[ "...$" elements 0 macros::common::..])),
[async |[elements]: [_; _]| parse_tpl(elements, Some(mactree!(macros::common::_))).await],
)
.rule(
mactreev!(pattern::match_rule(
std::tuple::t[ "...$" elements 1 macros::common::.. "...$" tail 0]
)),
[async |[elements, tail]: [_; _]| parse_tpl(elements, Some(tail)).await],
)
.rule(mactreev!(pattern::match_rule(std::tuple::t[ "...$" elements 0])), [
|[elements]: [_; _]| parse_tpl(elements, None),
])
.finish(),
fun(false, "matcher_body", tuple_matcher_body),
]),
])
}
fn parse_tpl(elements: MacTree, tail_matcher: Option<MacTree>) -> impl Future<Output = GExpr> {
exec(async move |mut h| -> OrcRes<MatcherAtom> {
let tup = h
.exec::<HomoTpl<TAtom<MacTree>>>(call(sym_ref(sym!(macros::resolve; i())), [
mactree!((macros::common::comma_list "push" elements ;)).to_gen().await,
]))
.await?;
let mut subs = Vec::with_capacity(tup.0.len());
for mac_a in &tup.0[..] {
let mac = own(mac_a).await;
let sub = h
.exec::<TAtom<MatcherAtom>>(call(sym_ref(sym!(macros::resolve; i())), [
mactree!(pattern::match_rule "push" mac ;).to_gen().await,
]))
.await?;
subs.push(sub);
}
let tail_matcher = match tail_matcher {
Some(mac) => Some(
h.exec::<TAtom<MatcherAtom>>(call(sym_ref(sym!(macros::resolve; i())), [
mactree!(pattern::match_rule "push" mac ;).to_gen().await,
]))
.await?,
),
None => None,
};
Ok(MatcherAtom {
keys: stream::iter(&subs[..])
.flat_map(|t| t.keys())
.chain(stream::iter(&tail_matcher).flat_map(|mat| mat.keys()))
.collect()
.await,
matcher: call(sym_ref(sym!(std::tuple::matcher_body; i())), [
HomoTpl(subs).to_gen().await,
OrcOpt(tail_matcher).to_gen().await,
])
.to_expr()
.await,
})
})
}
fn tuple_matcher_body(
children: HomoTpl<TAtom<MatcherAtom>>,
tail: OrcOpt<TAtom<MatcherAtom>>,
value: HomoTpl<Expr>,
) -> impl Future<Output = GExpr> {
exec(async move |mut h| -> OrcRes<OrcOpt<GExpr>> {
if value.0.len() < children.0.len() {
return Ok(OrcOpt(None));
}
let mut binds = Vec::new();
for (sub_mat, sub_val) in children.0.iter().zip(&value.0) {
match sub_mat.run_matcher(&mut h, sub_val.clone()).await? {
OrcOpt(None) => return Ok(OrcOpt(None)),
OrcOpt(Some(subres)) => binds.extend(subres.0),
}
}
match tail.0 {
None if children.0.len() < value.0.len() => return Ok(OrcOpt(None)),
None => (),
Some(tail_mat) => {
let tail_tpl = stream::iter(&value.0[children.0.len()..])
.fold(sym_ref(sym!(std::tuple::empty; i())), async |prefix, new| {
call(sym_ref(sym!(std::tuple::cat; i())), [prefix, new.clone().to_gen().await])
})
.await;
match tail_mat.run_matcher(&mut h, tail_tpl).await? {
OrcOpt(Some(tail_binds)) => binds.extend(tail_binds.0),
OrcOpt(None) => return Ok(OrcOpt(None)),
}
},
};
todo!()
})
}

View File

@@ -0,0 +1,275 @@
use std::borrow::Cow;
use std::rc::Rc;
use async_fn_stream::stream;
use futures::StreamExt;
use futures::future::LocalBoxFuture;
use itertools::{Itertools, chain};
use never::Never;
use orchid_base::name::{NameLike, Sym, VPath};
use orchid_extension::atom::{Atomic, TAtom};
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
use orchid_extension::context::i;
use orchid_extension::conv::ToExpr;
use orchid_extension::gen_expr::{GExpr, sym_ref};
use orchid_extension::tree::{GenMember, MemKind, cnst, lazy};
use crate::macros::macro_value::{Macro, MacroData, Rule};
use crate::macros::mactree::MacTreeSeq;
use crate::macros::rule::matcher::Matcher;
use crate::{MacTok, MacTree};
pub type Args = Vec<MacTree>;
#[derive(Clone)]
pub struct MacroBodyArgCollector {
argc: usize,
args: Args,
cb: Rc<dyn Fn(Args) -> LocalBoxFuture<'static, GExpr>>,
}
impl Atomic for MacroBodyArgCollector {
type Data = ();
type Variant = OwnedVariant;
}
impl OwnedAtom for MacroBodyArgCollector {
type Refs = Never;
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
async fn call_ref(&self, arg: orchid_extension::expr::Expr) -> GExpr {
eprintln!("This is an intermediary value. It should never be copied");
self.clone().call(arg).await
}
async fn call(mut self, arg: orchid_extension::expr::Expr) -> GExpr {
let atom = (TAtom::downcast(arg.handle()).await).unwrap_or_else(|_| {
panic!("This is an intermediary value, the argument types are known in advance")
});
self.args.push(own(&atom).await);
if self.argc == self.args.len() {
(self.cb)(self.args).await.to_gen().await
} else {
self.to_gen().await
}
}
}
fn body_name(name: &str, counter: usize) -> String { format!("({name})::{counter}") }
pub(crate) fn build_macro(
prio: Option<u64>,
own_kws: impl IntoIterator<Item = &'static str>,
) -> MacroBuilder {
MacroBuilder {
prio,
own_kws: own_kws.into_iter().collect(),
patterns: Vec::new(),
body_consts: Vec::new(),
}
}
pub(crate) struct MacroBuilder {
prio: Option<u64>,
own_kws: Vec<&'static str>,
patterns: Vec<MacTreeSeq>,
body_consts: Vec<GenMember>,
}
impl MacroBuilder {
pub(crate) fn rule<const N: usize, R: ToExpr>(
mut self,
pat: MacTreeSeq,
body: [impl AsyncFn([MacTree; N]) -> R + 'static; 1],
) -> Self {
let [body] = body;
let body = Rc::new(body);
let name = &body_name(self.own_kws[0], self.body_consts.len());
self.body_consts.extend(match N {
0 => lazy(true, name, async move |_| {
let argv = [].into_iter().collect_array().expect("N is 0");
MemKind::Const(body(argv).await.to_gen().await)
}),
1.. => cnst(true, name, MacroBodyArgCollector {
argc: N,
args: Vec::new(),
cb: Rc::new(move |argv| {
let arr = argv.into_iter().collect_array::<N>().expect("argc should enforce the length");
let body = body.clone();
Box::pin(async move { body(arr).await.to_gen().await })
}),
}),
});
self.patterns.push(pat);
self
}
pub(crate) fn finish(self) -> Vec<GenMember> {
let Self { own_kws, prio, patterns, body_consts } = self;
let name = own_kws[0];
let main_const = lazy(true, name, async move |path| {
let module = (Sym::new(path.split_last_seg().1.iter().cloned(), &i()).await)
.expect("Default macro in global root");
MemKind::Const(
Macro(Rc::new(MacroData {
module,
prio,
rules: stream(async |mut h| {
for (counter, pattern) in patterns.into_iter().enumerate() {
let mut placeholders = Vec::new();
pattern.map(&mut false, &mut |tt| {
if let MacTok::Ph(ph) = &*tt.tok {
placeholders.push(ph.name.clone())
}
None
});
h.emit(Rule {
matcher: Matcher::new(pattern.clone()).await.unwrap(),
pattern,
placeholders,
body_name: i().i(&format!("({name})::{counter}")).await,
})
.await;
}
})
.collect()
.await,
}))
.to_gen()
.await,
)
});
let kw_consts = own_kws[1..].iter().flat_map(|kw| {
lazy(true, kw, async |path| {
let main_const_name = VPath::new(path.split_last_seg().1.iter().cloned())
.name_with_suffix(i().i(name).await)
.to_sym(&i())
.await;
MemKind::Const(sym_ref(main_const_name))
})
});
chain!(main_const, kw_consts, body_consts).collect()
}
}
macro_rules! mactree {
($($body:tt)*) => {
$crate::macros::utils::mactreev!(($($body)*)).items[0].clone()
};
}
macro_rules! mactreev_impl {
(@RECUR $ret:ident) => {};
(@RECUR $ret:ident "..$" $name:ident $prio:literal $($tail:tt)*) => {
$ret.push($crate::macros::mactree::MacTok::Ph($crate::macros::mactree::Ph{
name: orchid_extension::context::i().i(stringify!($name)).await,
kind: $crate::macros::mactree::PhKind::Vector{ at_least_one: false, priority: $prio }
}).at(orchid_base::location::Pos::Inherit));
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
};
(@RECUR $ret:ident "...$" $name:ident $prio:literal $($tail:tt)*) => {
$ret.push($crate::macros::mactree::MacTok::Ph($crate::macros::mactree::Ph{
name: orchid_extension::context::i().i(stringify!($name)).await,
kind: $crate::macros::mactree::PhKind::Vector{ at_least_one: true, priority: $prio }
}).at(orchid_base::location::Pos::Inherit));
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
};
(@RECUR $ret:ident "$" $name:ident $($tail:tt)*) => {
$ret.push($crate::macros::mactree::MacTok::Ph($crate::macros::mactree::Ph{
name: orchid_extension::context::i().i(stringify!(name)).await,
kind: $crate::macros::mactree::PhKind::Scalar
}).at(orchid_base::location::Pos::Inherit));
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
};
(@RECUR $ret:ident "Val" $arg:expr ; $($tail:tt)*) => {
$ret.push(
$crate::macros::mactree::MacTok::Value($arg)
.at(orchid_base::location::Pos::Inherit)
);
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
};
(@RECUR $ret:ident "push" $arg:expr ; $($tail:tt)*) => {
$ret.push($arg);
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
};
(@RECUR $ret:ident "l_" $arg:expr ; ($($body:tt)*) $($tail:tt)*) => {
$ret.push(MacTok::Lambda(
MacTok::Name($arg).at(orchid_base::location::Pos::Inherit),
$crate::macros::utils::mactreev!($($body)*)
).at(orchid_base::location::Pos::Inherit));
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
};
(@RECUR $ret:ident "l" $argh:tt $(:: $arg:tt)+ ($($body:tt)*) $($tail:tt)*) => {
$ret.push(MacTok::Lambda(
MacTok::Name(sym!($argh $(:: $arg)+; orchid_extension::context::i()).await).at(orchid_base::location::Pos::Inherit),
$crate::macros::utils::mactreev!($($body)*)
).at(orchid_base::location::Pos::Inherit));
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
};
(@RECUR $ret:ident $name:literal $($tail:tt)*) => {
assert!(
$name.contains("::"),
"{} was treated as a name, but it doesn't have a namespace prefix",
$name
);
let sym = orchid_base::name::Sym::parse(
$name,
&orchid_extension::context::i()
).await.expect("Empty string in sym literal in Rust");
$ret.push(
$crate::macros::mactree::MacTok::Name(sym)
.at(orchid_base::location::Pos::Inherit)
);
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
};
(@RECUR $ret:ident ( $($body:tt)* ) $($tail:tt)*) => {
$ret.push(
$crate::macros::mactree::MacTok::S(
orchid_base::tree::Paren::Round,
$crate::macros::utils::mactreev!($($body)*)
)
.at(orchid_base::location::Pos::Inherit)
);
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
};
(@RECUR $ret:ident [ $($body:tt)* ] $($tail:tt)*) => {
$ret.push(
$crate::macros::mactree::MacTok::S(
orchid_base::tree::Paren::Square,
$crate::macros::utils::mactreev!($($body)*)
)
.at(orchid_base::location::Pos::Inherit)
);
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
};
(@RECUR $ret:ident { $($body:tt)* } $($tail:tt)*) => {
$ret.push(
$crate::macros::mactree::MacTok::S(
orchid_base::tree::Paren::Curly,
$crate::macros::utils::mactreev!($($body)*)
)
.at(orchid_base::location::Pos::Inherit)
);
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
};
(@RECUR $ret:ident $ns:ident :: $nhead:tt $($tail:tt)*) => {
$crate::macros::utils::mactreev_impl!(@NAME_MUNCHER $ret ($ns :: $nhead) $($tail)*)
};
(@NAME_MUNCHER $ret:ident ($($munched:tt)*) :: $name:tt $($tail:tt)*) => {
$crate::macros::utils::mactreev_impl!(@NAME_MUNCHER $ret ($($munched)* :: $name) $($tail)*)
};
(@NAME_MUNCHER $ret:ident ($($munched:tt)*) $($tail:tt)*) => {
let sym = orchid_base::sym!($($munched)* ; orchid_extension::context::i());
$ret.push(
$crate::macros::mactree::MacTok::Name(sym)
.at(orchid_base::location::Pos::Inherit)
);
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
};
() => { Vec::new() };
}
macro_rules! mactreev {
($($tail:tt)*) => {
{
let mut ret = Vec::<$crate::macros::mactree::MacTree>::new();
ret.extend([]); // silence unneeded mut warning
$crate::macros::utils::mactreev_impl!(@RECUR ret $($tail)*);
$crate::macros::mactree::MacTreeSeq::new(ret)
}
};
}
pub(crate) use {mactree, mactreev, mactreev_impl};

View File

@@ -1,4 +1,8 @@
pub mod number; pub mod number;
pub mod string; pub mod option;
pub mod protocol;
pub mod record;
pub mod reflection;
pub mod std_system; pub mod std_system;
pub mod string;
pub mod tuple;

View File

@@ -1,15 +1,20 @@
use orchid_api_derive::Coding; use orchid_api_derive::Coding;
use orchid_api_traits::Request;
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use orchid_base::format::FmtUnit; use orchid_base::format::FmtUnit;
use orchid_base::name::Sym;
use orchid_base::number::Numeric; use orchid_base::number::Numeric;
use orchid_extension::atom::{AtomFactory, Atomic, AtomicFeatures, ToAtom, TypAtom}; use orchid_extension::atom::{AtomFactory, Atomic, AtomicFeatures, Supports, TAtom, ToAtom};
use orchid_extension::atom_thin::{ThinAtom, ThinVariant}; use orchid_extension::atom_thin::{ThinAtom, ThinVariant};
use orchid_extension::context::i;
use orchid_extension::conv::TryFromExpr; use orchid_extension::conv::TryFromExpr;
use orchid_extension::expr::Expr; use orchid_extension::expr::Expr;
use orchid_extension::system::SysCtx;
use ordered_float::NotNan; use ordered_float::NotNan;
use rust_decimal::prelude::Zero; use rust_decimal::prelude::Zero;
use crate::std::protocol::types::GetTagIdMethod;
use crate::std::string::to_string::ToStringMethod;
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub struct Int(pub i64); pub struct Int(pub i64);
impl Atomic for Int { impl Atomic for Int {
@@ -17,11 +22,21 @@ impl Atomic for Int {
type Data = Self; type Data = Self;
} }
impl ThinAtom for Int { impl ThinAtom for Int {
async fn print(&self, _: SysCtx) -> FmtUnit { self.0.to_string().into() } async fn print(&self) -> FmtUnit { self.0.to_string().into() }
} }
impl TryFromExpr for Int { impl TryFromExpr for Int {
async fn try_from_expr(expr: Expr) -> OrcRes<Self> { async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
TypAtom::<Int>::try_from_expr(expr).await.map(|t| t.value) TAtom::<Int>::try_from_expr(expr).await.map(|t| t.value)
}
}
impl Supports<GetTagIdMethod> for Int {
async fn handle(&self, _: GetTagIdMethod) -> <GetTagIdMethod as Request>::Response {
Sym::parse("std::number::Int", &i()).await.unwrap().to_api()
}
}
impl Supports<ToStringMethod> for Int {
async fn handle(&self, _: ToStringMethod) -> <ToStringMethod as Request>::Response {
self.0.to_string()
} }
} }
@@ -32,13 +47,18 @@ impl Atomic for Float {
type Data = Self; type Data = Self;
} }
impl ThinAtom for Float { impl ThinAtom for Float {
async fn print(&self, _: SysCtx) -> FmtUnit { self.0.to_string().into() } async fn print(&self) -> FmtUnit { self.0.to_string().into() }
} }
impl TryFromExpr for Float { impl TryFromExpr for Float {
async fn try_from_expr(expr: Expr) -> OrcRes<Self> { async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
Ok(Self(Num::try_from_expr(expr).await?.0.to_f64())) Ok(Self(Num::try_from_expr(expr).await?.0.to_f64()))
} }
} }
impl Supports<ToStringMethod> for Float {
async fn handle(&self, _: ToStringMethod) -> <ToStringMethod as Request>::Response {
self.0.to_string()
}
}
pub struct Num(pub Numeric); pub struct Num(pub Numeric);
impl TryFromExpr for Num { impl TryFromExpr for Num {
@@ -47,7 +67,7 @@ impl TryFromExpr for Num {
Ok(t) => return Ok(Num(Numeric::Int(t.0))), Ok(t) => return Ok(Num(Numeric::Int(t.0))),
Err(e) => e, Err(e) => e,
}; };
match TypAtom::<Float>::try_from_expr(expr).await { match TAtom::<Float>::try_from_expr(expr).await {
Ok(t) => Ok(Num(Numeric::Float(t.0))), Ok(t) => Ok(Num(Numeric::Float(t.0))),
Err(e2) => Err(e + e2), Err(e2) => Err(e + e2),
} }

View File

@@ -3,6 +3,7 @@ use std::ops::RangeInclusive;
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use orchid_base::number::{num_to_errv, parse_num}; use orchid_base::number::{num_to_errv, parse_num};
use orchid_extension::atom::ToAtom; use orchid_extension::atom::ToAtom;
use orchid_extension::context::i;
use orchid_extension::lexer::{LexContext, Lexer}; use orchid_extension::lexer::{LexContext, Lexer};
use orchid_extension::tree::{GenTokTree, x_tok}; use orchid_extension::tree::{GenTokTree, x_tok};
@@ -12,13 +13,13 @@ use super::num_atom::Num;
pub struct NumLexer; pub struct NumLexer;
impl Lexer for NumLexer { impl Lexer for NumLexer {
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['0'..='9']; const CHAR_FILTER: &'static [RangeInclusive<char>] = &['0'..='9'];
async fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> { async fn lex<'a>(all: &'a str, lxcx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
let ends_at = all.find(|c: char| !c.is_ascii_hexdigit() && !"xX._pP".contains(c)); let ends_at = all.find(|c: char| !c.is_ascii_hexdigit() && !"xX._pP".contains(c));
let (chars, tail) = all.split_at(ends_at.unwrap_or(all.len())); let (chars, tail) = all.split_at(ends_at.unwrap_or(all.len()));
let fac = match parse_num(chars) { let fac = match parse_num(chars) {
Ok(numeric) => Num(numeric).to_atom_factory(), Ok(numeric) => Num(numeric).to_atom_factory(),
Err(e) => return Err(num_to_errv(e, ctx.pos(all), ctx.src(), ctx.ctx.i()).await), Err(e) => return Err(num_to_errv(e, lxcx.pos(all), lxcx.src(), &i()).await),
}; };
Ok((tail, x_tok(fac).await.at(ctx.pos_lt(chars.len(), tail)))) Ok((tail, x_tok(fac).await.at(lxcx.pos_lt(chars.len(), tail))))
} }
} }

View File

@@ -6,28 +6,28 @@ use super::num_atom::{Float, HomoArray, Int, Num};
pub fn gen_num_lib() -> Vec<GenMember> { pub fn gen_num_lib() -> Vec<GenMember> {
prefix("std::number", [ prefix("std::number", [
fun(true, "add", |a: Num, b: Num| async move { fun(true, "add", async |a: Num, b: Num| {
Num(match HomoArray::new([a.0, b.0]) { Num(match HomoArray::new([a.0, b.0]) {
HomoArray::Int([a, b]) => Numeric::Int(a + b), HomoArray::Int([a, b]) => Numeric::Int(a + b),
HomoArray::Float([a, b]) => Numeric::Float(a + b), HomoArray::Float([a, b]) => Numeric::Float(a + b),
}) })
}), }),
fun(true, "neg", |a: Num| async move { fun(true, "neg", async |a: Num| {
Num(match a.0 { Num(match a.0 {
Numeric::Int(i) => Numeric::Int(-i), Numeric::Int(i) => Numeric::Int(-i),
Numeric::Float(f) => Numeric::Float(-f), Numeric::Float(f) => Numeric::Float(-f),
}) })
}), }),
fun(true, "mul", |a: Num, b: Num| async move { fun(true, "mul", async |a: Num, b: Num| {
Num(match HomoArray::new([a.0, b.0]) { Num(match HomoArray::new([a.0, b.0]) {
HomoArray::Int([a, b]) => Numeric::Int(a * b), HomoArray::Int([a, b]) => Numeric::Int(a * b),
HomoArray::Float([a, b]) => Numeric::Float(a * b), HomoArray::Float([a, b]) => Numeric::Float(a * b),
}) })
}), }),
fun(true, "idiv", |a: Int, b: Int| async move { Int(a.0 / b.0) }), fun(true, "idiv", async |a: Int, b: Int| Int(a.0 / b.0)),
fun(true, "imod", |a: Int, b: Int| async move { Int(a.0 % b.0) }), fun(true, "imod", async |a: Int, b: Int| Int(a.0 % b.0)),
fun(true, "fdiv", |a: Float, b: Float| async move { Float(a.0 / b.0) }), fun(true, "fdiv", async |a: Float, b: Float| Float(a.0 / b.0)),
fun(true, "fmod", |a: Float, b: Float| async move { fun(true, "fmod", async |a: Float, b: Float| {
Float(a.0 - NotNan::new((a.0 / b.0).trunc()).unwrap() * b.0) Float(a.0 - NotNan::new((a.0 / b.0).trunc()).unwrap() * b.0)
}), }),
]) ])

View File

@@ -0,0 +1,75 @@
use std::borrow::Cow;
use std::pin::Pin;
use futures::AsyncWrite;
use orchid_api_traits::Encode;
use orchid_base::error::mk_errv;
use orchid_base::sym;
use orchid_extension::atom::{Atomic, ForeignAtom, TAtom};
use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
use orchid_extension::context::i;
use orchid_extension::conv::{ToExpr, TryFromExpr};
use orchid_extension::expr::{Expr, ExprHandle};
use orchid_extension::gen_expr::{call, sym_ref};
use orchid_extension::tree::{GenMember, cnst, fun, prefix};
use crate::{OrcString, api};
#[derive(Clone)]
pub struct OptAtom(Option<Expr>);
impl Atomic for OptAtom {
type Data = Option<api::ExprTicket>;
type Variant = OwnedVariant;
}
impl OwnedAtom for OptAtom {
type Refs = Vec<Expr>;
async fn val(&self) -> Cow<'_, Self::Data> {
Cow::Owned(self.0.as_ref().map(|ex| ex.handle().ticket()))
}
async fn deserialize(mut ctx: impl DeserializeCtx, refs: Self::Refs) -> Self {
Self(ctx.read::<bool>().await.then(|| refs.into_iter().next().unwrap()))
}
async fn serialize(&self, write: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
self.0.is_some().encode(write).await;
self.0.iter().cloned().collect()
}
}
pub struct OrcOpt<T>(pub Option<T>);
impl<T: TryFromExpr> TryFromExpr for OrcOpt<T> {
async fn try_from_expr(expr: Expr) -> orchid_base::error::OrcRes<Self> {
let atom = TAtom::<OptAtom>::try_from_expr(expr).await?;
match atom.value {
None => Ok(OrcOpt(None)),
Some(tk) => Ok(OrcOpt(Some(
T::try_from_expr(Expr::from_handle(ExprHandle::from_ticket(tk).await)).await?,
))),
}
}
}
impl<T: ToExpr + 'static> ToExpr for OrcOpt<T> {
async fn to_gen(self) -> orchid_extension::gen_expr::GExpr {
if let Some(val) = self.0 {
call(sym_ref(sym!(std::option::some; i())), [val.to_gen().await])
} else {
sym_ref(sym!(std::option::none; i()))
}
}
}
pub fn gen_option_lib() -> Vec<GenMember> {
prefix("std::option", [
cnst(true, "none", OptAtom(None)),
fun(true, "some", async |ex: Expr| OptAtom(Some(ex))),
fun(true, "expect", async |opt: ForeignAtom, msg: OrcString| {
match OrcOpt::try_from_expr(opt.clone().ex()).await? {
OrcOpt(Some(ex)) => Ok::<Expr, _>(ex),
OrcOpt(None) => Err(mk_errv(
i().i("Unwrapped std::option::none").await,
msg.get_string().await.as_str(),
[opt.pos()],
)),
}
}),
])
}

View File

@@ -0,0 +1,4 @@
pub mod parse_impls;
pub mod proto_parser;
pub mod type_parser;
pub mod types;

View File

@@ -0,0 +1,78 @@
use itertools::{Itertools, chain};
use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::interner::Tok;
use orchid_base::name::Sym;
use orchid_base::parse::{
Import, ParseCtx, Parsed, Snippet, expect_tok, line_items, parse_multiname, token_errv,
};
use orchid_base::tree::{Paren, Token};
use orchid_extension::parser::{
PTokTree, ParsCtx, ParsedLine, ParsedLineKind, p_tree2gen, p_v2gen,
};
pub async fn parse_impls(
ctx: &ParsCtx<'_>,
lines: &mut Vec<ParsedLine>,
impls: &mut Vec<(Sym, Tok<String>)>,
body_tt: &PTokTree,
) -> OrcRes<()> {
let i = ctx.i().clone();
let body = match &body_tt.tok {
Token::S(Paren::Round, body) => line_items(ctx, Snippet::new(body_tt, body)).await,
Token::S(ptyp, _) =>
return Err(mk_errv(
i.i("Incorrect paren type").await,
format!("Expected () block, found {ptyp}"),
[body_tt.sr().pos()],
)),
_ =>
return Err(
token_errv(ctx, body_tt, "Expected body", |s| {
format!("Expected (impl ...) block, found {s}")
})
.await,
),
};
for Parsed { tail: line, output: comments } in body {
if let Ok(Parsed { tail, .. }) = expect_tok(ctx, line, i.i("impl").await).await {
let Parsed { tail, output: name_tt } = parse_multiname(ctx, tail).await?;
let (name, name_sr) = match name_tt.into_iter().at_most_one() {
Ok(None) => panic!("multiname is always at least one name"),
Ok(Some(ref n @ Import { name: Some(_), ref sr, .. })) =>
(n.clone().mspath().to_sym(&i).await, sr.clone()),
Ok(Some(Import { name: None, sr, .. })) =>
return Err(mk_errv(
i.i("impl line with globstar").await,
"::* is not permitted in a protocol impl",
[sr.pos()],
)),
Err(e) =>
return Err(mk_errv(
i.i("Impl line with multiple protocol names").await,
"::() is not permitted in a protocol impl",
e.map(|i| i.sr.pos()),
)),
};
let Parsed { tail, .. } = expect_tok(ctx, tail, i.i("as").await).await?;
let cnst_name = i.i(&format!("{}{}", lines.len(), name.iter().join("__"))).await;
lines.push(ParsedLine {
comments,
sr: line.sr(),
kind: ParsedLineKind::Rec(Vec::from_iter(chain![
[Token::Name(i.i("let").await).at(line.sr())],
[Token::Name(cnst_name.clone()).at(name_sr)],
[Token::Name(i.i("=").await).at(line.sr())],
tail.iter().cloned().map(p_tree2gen),
])),
});
impls.push((name, cnst_name));
} else {
lines.push(ParsedLine {
sr: line.sr(),
comments,
kind: ParsedLineKind::Rec(p_v2gen(line.to_vec())),
});
}
}
Ok(())
}

View File

@@ -0,0 +1,77 @@
use std::rc::Rc;
use hashbrown::HashMap;
use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::parse::{Comment, Parsed, expect_end, try_pop_no_fluff};
use orchid_base::sym;
use orchid_base::tree::Token;
use orchid_extension::context::i;
use orchid_extension::coroutine_exec::exec;
use orchid_extension::gen_expr::{call, sym_ref};
use orchid_extension::parser::{PSnippet, ParsCtx, ParsedLine, Parser};
use crate::std::protocol::parse_impls::parse_impls;
use crate::std::protocol::types::Tag;
#[derive(Default)]
pub struct AsProtoParser;
impl Parser for AsProtoParser {
const LINE_HEAD: &'static str = "as_proto";
async fn parse<'a>(
pcx: ParsCtx<'a>,
exported: bool,
cmts: Vec<Comment>,
line: PSnippet<'a>,
) -> OrcRes<Vec<ParsedLine>> {
let Parsed { output: body_tt, tail } = try_pop_no_fluff(&pcx, line).await?;
expect_end(&pcx, tail).await?;
if exported {
return Err(mk_errv(
i().i("Exported internal line").await,
"as_proto cannot be exported, the type shares the enclosing module's visibility",
[line.sr().pos()],
));
}
let mut lines = Vec::new();
let mut impls = Vec::new();
parse_impls(&pcx, &mut lines, &mut impls, body_tt).await?;
let id = pcx.module();
let proto_tag_name = i().i("__protocol_tag__").await;
let proto_tag_path = id.suffix([proto_tag_name.clone()], &i()).await;
lines.push(ParsedLine::cnst(&line.sr(), &cmts, true, proto_tag_name, async |_ccx| {
exec(async move |mut h| {
let mut new_impls = HashMap::new();
for (k, v) in impls {
new_impls.insert(k.clone(), h.register(sym_ref(id.suffix([v], &i()).await)).await);
}
Tag { id, impls: Rc::new(new_impls) }
})
.await
}));
lines.push(ParsedLine::cnst(&line.sr(), [], false, i().i("resolve").await, async move |_| {
call(sym_ref(sym!(std::protocol::resolve; i())), [sym_ref(proto_tag_path)])
}));
Ok(lines)
}
}
#[derive(Default)]
pub struct ProtoParser;
impl Parser for ProtoParser {
const LINE_HEAD: &'static str = "proto";
async fn parse<'a>(
ctx: ParsCtx<'a>,
exported: bool,
cmts: Vec<Comment>,
line: PSnippet<'a>,
) -> OrcRes<Vec<ParsedLine>> {
let Parsed { output: name_tt, tail } = try_pop_no_fluff(&ctx, line).await?;
let Token::Name(name) = &name_tt.tok else {
return Err(mk_errv(i().i("missing name for type").await, "A type needs a name", [name_tt
.sr()
.pos()]));
};
let lines = AsProtoParser::parse(ctx, false, cmts.clone(), tail).await?;
Ok(vec![ParsedLine::module(&line.sr(), &cmts, exported, name, true, lines)])
}
}

View File

@@ -0,0 +1,82 @@
use std::rc::Rc;
use hashbrown::HashMap;
use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::parse::{Comment, Parsed, expect_end, try_pop_no_fluff};
use orchid_base::sym;
use orchid_base::tree::Token;
use orchid_extension::context::i;
use orchid_extension::coroutine_exec::exec;
use orchid_extension::gen_expr::{call, sym_ref};
use orchid_extension::parser::{PSnippet, ParsCtx, ParsedLine, Parser};
use crate::std::protocol::parse_impls::parse_impls;
use crate::std::protocol::types::Tag;
#[derive(Default)]
pub struct AsTypeParser;
impl Parser for AsTypeParser {
const LINE_HEAD: &'static str = "as_type";
async fn parse<'a>(
ctx: ParsCtx<'a>,
exported: bool,
cmts: Vec<Comment>,
line: PSnippet<'a>,
) -> OrcRes<Vec<ParsedLine>> {
let Parsed { output: body_tt, tail } = try_pop_no_fluff(&ctx, line).await?;
expect_end(&ctx, tail).await?;
if exported {
return Err(mk_errv(
i().i("Exported internal line").await,
"as_type cannot be exported, the type shares the enclosing module's visibility",
[line.sr().pos()],
));
}
let mut lines = Vec::new();
let mut impls = Vec::new();
parse_impls(&ctx, &mut lines, &mut impls, body_tt).await?;
let id = ctx.module();
let type_tag_name = i().i("__type_tag__").await;
let type_tag_path = id.suffix([type_tag_name.clone()], &i()).await;
lines.push(ParsedLine::cnst(&line.sr(), &cmts, true, type_tag_name, async |_ccx| {
exec(async move |mut h| {
let mut new_impls = HashMap::new();
for (k, v) in impls {
new_impls.insert(k.clone(), h.register(sym_ref(id.suffix([v], &i()).await)).await);
}
Tag { id, impls: Rc::new(new_impls) }
})
.await
}));
let type_tag_path_1 = type_tag_path.clone();
lines.push(ParsedLine::cnst(&line.sr(), [], false, i().i("wrap").await, async move |_ccx| {
call(sym_ref(sym!(std::protocol::wrap; i())), [sym_ref(type_tag_path_1)])
}));
let type_tag_path_1 = type_tag_path.clone();
lines.push(ParsedLine::cnst(&line.sr(), [], false, i().i("unwrap").await, async move |_ccx| {
call(sym_ref(sym!(std::protocol::unwrap; i())), [sym_ref(type_tag_path_1)])
}));
Ok(lines)
}
}
#[derive(Default)]
pub struct TypeParser;
impl Parser for TypeParser {
const LINE_HEAD: &'static str = "type";
async fn parse<'a>(
ctx: ParsCtx<'a>,
exported: bool,
cmts: Vec<Comment>,
line: PSnippet<'a>,
) -> OrcRes<Vec<ParsedLine>> {
let Parsed { output: name_tt, tail } = try_pop_no_fluff(&ctx, line).await?;
let Token::Name(name) = &name_tt.tok else {
return Err(mk_errv(i().i("missing name for type").await, "A type needs a name", [name_tt
.sr()
.pos()]));
};
let lines = AsTypeParser::parse(ctx, false, cmts.clone(), tail).await?;
Ok(vec![ParsedLine::module(&line.sr(), &cmts, exported, name, true, lines)])
}
}

View File

@@ -0,0 +1,141 @@
use std::borrow::Cow;
use std::rc::Rc;
use hashbrown::HashMap;
use never::Never;
use orchid_api_derive::Coding;
use orchid_api_traits::Request;
use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::format::fmt;
use orchid_base::name::Sym;
use orchid_extension::atom::{AtomMethod, Atomic, ForeignAtom, MethodSetBuilder, Supports, TAtom};
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
use orchid_extension::context::i;
use orchid_extension::conv::ToExpr;
use orchid_extension::expr::Expr;
use orchid_extension::gen_expr::call;
use orchid_extension::tree::{GenMember, fun, prefix};
use crate::api;
#[derive(Clone, Debug)]
pub struct Tag {
pub id: Sym,
pub impls: Rc<HashMap<Sym, Expr>>,
}
impl Atomic for Tag {
type Data = api::TStrv;
type Variant = OwnedVariant;
fn reg_reqs() -> MethodSetBuilder<Self> { MethodSetBuilder::new().handle::<GetImplMethod>() }
}
impl OwnedAtom for Tag {
type Refs = Never;
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(self.id.to_api()) }
}
impl Supports<GetImplMethod> for Tag {
async fn handle(&self, req: GetImplMethod) -> <GetImplMethod as Request>::Response {
self.impls.get(&Sym::from_api(req.0, &i()).await).map(|expr| expr.handle().ticket())
}
}
#[derive(Clone, Debug, Coding)]
pub struct GetImplMethod(pub api::TStrv);
impl Request for GetImplMethod {
type Response = Option<api::ExprTicket>;
}
impl AtomMethod for GetImplMethod {
const NAME: &str = "std::protocol::get_impl";
}
#[derive(Clone, Debug, Coding)]
pub struct GetTagIdMethod;
impl Request for GetTagIdMethod {
type Response = api::TStrv;
}
impl AtomMethod for GetTagIdMethod {
const NAME: &str = "std::protocol::get_tag_id";
}
#[derive(Clone, Debug)]
pub struct Tagged {
pub tag: Tag,
pub value: Expr,
}
impl Atomic for Tagged {
type Data = api::TStrv;
type Variant = OwnedVariant;
}
impl OwnedAtom for Tagged {
type Refs = Never;
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(self.tag.id.to_api()) }
}
impl Supports<GetImplMethod> for Tagged {
async fn handle(&self, req: GetImplMethod) -> <GetImplMethod as Request>::Response {
self.tag.handle(req).await
}
}
pub async fn get_impl(receiver: ForeignAtom, proto: ForeignAtom) -> OrcRes<Expr> {
let Some(proto_id) = proto.request(GetTagIdMethod).await else {
return Err(mk_errv(i().i("Not a protocol").await, "Protocol does not have a tag ID", [
proto.pos()
]));
};
let Some(impl_val_opt) = receiver.request(GetImplMethod(proto_id)).await else {
return Err(mk_errv(
i().i("Receiver not tagged").await,
"The receiver does not have a type tag",
[receiver.pos()],
));
};
if let Some(impl_val) = impl_val_opt {
return Ok(Expr::deserialize(impl_val).await);
}
let Some(type_id) = receiver.request(GetTagIdMethod).await else {
return Err(mk_errv(
i().i("Incorrect protocols implementation in extension").await,
"Atom provides an impl table but no tag ID",
[receiver.pos()],
));
};
let Some(impl_val_opt) = proto.request(GetImplMethod(type_id)).await else {
return Err(mk_errv(
i().i("Incorrect protocols implementation in extension").await,
"Proto table atom provides a tag ID but no impl table",
[receiver.pos()],
));
};
if let Some(impl_val) = impl_val_opt {
return Ok(Expr::deserialize(impl_val).await);
}
return Err(mk_errv(
i().i("Implementation not found").await,
"This protocol is not implemented for this receiver",
[receiver.pos(), proto.pos()],
));
}
pub fn gen_protocol_lib() -> Vec<GenMember> {
prefix("std::protocol", [
fun(false, "resolve", async |tag: ForeignAtom, value: ForeignAtom| {
Ok(call(get_impl(value.clone(), tag).await?.to_gen().await, [value.to_gen().await]))
}),
fun(false, "wrap", async |tag: TAtom<Tag>, value: Expr| Tagged { tag: own(&tag).await, value }),
fun(false, "unwrap", async |tag: TAtom<Tag>, value: TAtom<Tagged>| {
let own_tag = own(&tag).await;
let own_val = own(&value).await;
if own_val.tag.id == own_tag.id {
Ok(own_val.value.to_gen().await)
} else {
Err(mk_errv(
i().i("Type mismatch").await,
format!(
"{} has type {}, expected {}",
fmt(&value, &i()).await,
own_val.tag.id,
own_tag.id
),
[value.pos()],
))
}
}),
])
}

View File

@@ -0,0 +1,2 @@
pub mod record_atom;
pub mod record_lib;

View File

@@ -0,0 +1,39 @@
use std::borrow::Cow;
use std::pin::Pin;
use std::rc::Rc;
use futures::AsyncWrite;
use futures::future::join_all;
use hashbrown::HashMap;
use orchid_api_traits::Encode;
use orchid_base::interner::Tok;
use orchid_extension::atom::Atomic;
use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
use orchid_extension::context::i;
use orchid_extension::expr::Expr;
use crate::api;
#[derive(Clone)]
pub struct Record(pub Rc<HashMap<Tok<String>, Expr>>);
impl Atomic for Record {
type Data = ();
type Variant = OwnedVariant;
}
impl OwnedAtom for Record {
type Refs = Vec<Expr>;
async fn serialize(&self, write: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
let (keys, values) =
self.0.iter().map(|(k, v)| (k.to_api(), v.clone())).unzip::<_, _, Vec<_>, Vec<_>>();
keys.encode(write).await;
values
}
async fn deserialize(mut dctx: impl DeserializeCtx, refs: Self::Refs) -> Self {
let keys =
join_all(dctx.decode::<Vec<api::TStr>>().await.iter().map(|t| async { i().ex(*t).await }))
.await;
Record(Rc::new(keys.into_iter().zip(refs).collect()))
}
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
}

View File

@@ -0,0 +1,30 @@
use std::rc::Rc;
use hashbrown::HashMap;
use orchid_extension::atom::TAtom;
use orchid_extension::atom_owned::own;
use orchid_extension::expr::Expr;
use orchid_extension::tree::{GenMember, cnst, fun, prefix};
use crate::std::option::OrcOpt;
use crate::std::record::record_atom::Record;
use crate::std::string::str_atom::IntStrAtom;
pub fn gen_record_lib() -> Vec<GenMember> {
prefix("std::record", [
cnst(true, "empty", Record(Rc::new(HashMap::new()))),
fun(true, "set", async |map: TAtom<Record>, key: IntStrAtom, val: Expr| {
let mut map = own(&map).await.0.as_ref().clone();
map.insert(key.0.clone(), val);
Record(Rc::new(map))
}),
fun(true, "get", async |map: TAtom<Record>, key: IntStrAtom| {
OrcOpt(own(&map).await.0.get(&key.0).cloned())
}),
fun(true, "delete", async |map: TAtom<Record>, key: IntStrAtom| {
let mut map = own(&map).await.0.as_ref().clone();
map.remove(&key.0);
Record(Rc::new(map))
}),
])
}

View File

@@ -0,0 +1 @@
pub mod sym_atom;

View File

@@ -0,0 +1,67 @@
use std::borrow::Cow;
use orchid_api::TStrv;
use orchid_api_derive::{Coding, Hierarchy};
use orchid_api_traits::Request;
use orchid_base::error::mk_errv;
use orchid_base::name::{NameLike, Sym};
use orchid_extension::atom::{Atomic, Supports, TAtom};
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
use orchid_extension::context::i;
use orchid_extension::expr::{Expr, ExprHandle};
use orchid_extension::system::dep_req;
use orchid_extension::tree::{GenMember, fun, prefix};
use crate::std::std_system::StdReq;
use crate::std::string::str_atom::IntStrAtom;
use crate::std::string::to_string::ToStringMethod;
use crate::{HomoTpl, StdSystem, api};
#[derive(Clone, Coding)]
pub struct SymAtomData(pub api::TStrv);
#[derive(Clone)]
pub struct SymAtom(pub(crate) Sym);
impl Atomic for SymAtom {
type Data = SymAtomData;
type Variant = OwnedVariant;
}
impl OwnedAtom for SymAtom {
type Refs = ();
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(SymAtomData(self.0.tok().to_api())) }
}
impl Supports<ToStringMethod> for SymAtom {
async fn handle(&self, _: ToStringMethod) -> <ToStringMethod as Request>::Response {
self.0.to_string()
}
}
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(StdReq)]
pub struct CreateSymAtom(pub TStrv);
impl Request for CreateSymAtom {
type Response = api::ExprTicket;
}
pub async fn sym_expr(sym: Sym) -> Expr {
Expr::from_handle(ExprHandle::deserialize(
dep_req::<StdSystem, _>(CreateSymAtom(sym.to_api())).await,
))
}
pub async fn gen_sym_lib() -> Vec<GenMember> {
prefix("std::refl::sym", [
fun(true, "from_str", async move |str: TAtom<IntStrAtom>| {
match Sym::parse(&i().ex(*str).await, &i()).await {
Ok(sym) => Ok(SymAtom(sym)),
Err(_) => Err(mk_errv(
i().i("Cannot parse sym from empty string").await,
"Empty string passed to std::refl::sym::from_str",
[str.pos()],
)),
}
}),
fun(true, "to_tpl", async move |sym: TAtom<SymAtom>| {
HomoTpl(own(&sym).await.0.segs().map(IntStrAtom).collect())
}),
])
}

View File

@@ -1,10 +1,15 @@
use never::Never; use std::rc::Rc;
use orchid_base::interner::Interner;
use futures::future::join_all;
use orchid_api_derive::{Coding, Hierarchy};
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::reqnot::Receipt; use orchid_base::reqnot::Receipt;
use orchid_base::sym; use orchid_base::sym;
use orchid_extension::atom::{AtomDynfo, AtomicFeatures}; use orchid_extension::atom::{AtomDynfo, AtomicFeatures};
use orchid_extension::context::i;
use orchid_extension::conv::ToExpr;
use orchid_extension::entrypoint::ExtReq; use orchid_extension::entrypoint::ExtReq;
use orchid_extension::expr::Expr;
use orchid_extension::lexer::LexerObj; use orchid_extension::lexer::LexerObj;
use orchid_extension::parser::ParserObj; use orchid_extension::parser::ParserObj;
use orchid_extension::system::{System, SystemCard}; use orchid_extension::system::{System, SystemCard};
@@ -15,9 +20,25 @@ use super::number::num_lib::gen_num_lib;
use super::string::str_atom::{IntStrAtom, StrAtom}; use super::string::str_atom::{IntStrAtom, StrAtom};
use super::string::str_lib::gen_str_lib; use super::string::str_lib::gen_str_lib;
use crate::std::number::num_lexer::NumLexer; use crate::std::number::num_lexer::NumLexer;
use crate::std::option::{OptAtom, gen_option_lib};
use crate::std::protocol::proto_parser::{AsProtoParser, ProtoParser};
use crate::std::protocol::type_parser::{AsTypeParser, TypeParser};
use crate::std::protocol::types::{Tag, Tagged, gen_protocol_lib};
use crate::std::record::record_atom::Record;
use crate::std::record::record_lib::gen_record_lib;
use crate::std::reflection::sym_atom::{CreateSymAtom, SymAtom, gen_sym_lib};
use crate::std::string::str_lexer::StringLexer; use crate::std::string::str_lexer::StringLexer;
use crate::std::string::to_string::AsStrTag;
use crate::std::tuple::{CreateTuple, Tuple, TupleBuilder, gen_tuple_lib};
use crate::{Float, Int}; use crate::{Float, Int};
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extendable]
pub enum StdReq {
CreateTuple(CreateTuple),
CreateSymAtom(CreateSymAtom),
}
#[derive(Default)] #[derive(Default)]
pub struct StdSystem; pub struct StdSystem;
impl SystemCtor for StdSystem { impl SystemCtor for StdSystem {
@@ -29,15 +50,51 @@ impl SystemCtor for StdSystem {
} }
impl SystemCard for StdSystem { impl SystemCard for StdSystem {
type Ctor = Self; type Ctor = Self;
type Req = Never; type Req = StdReq;
fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>> { fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>> {
[Some(Int::dynfo()), Some(Float::dynfo()), Some(StrAtom::dynfo()), Some(IntStrAtom::dynfo())] [
Some(Int::dynfo()),
Some(Float::dynfo()),
Some(StrAtom::dynfo()),
Some(IntStrAtom::dynfo()),
Some(OptAtom::dynfo()),
Some(Record::dynfo()),
Some(Tuple::dynfo()),
Some(TupleBuilder::dynfo()),
Some(Tag::dynfo()),
Some(Tagged::dynfo()),
Some(AsStrTag::dynfo()),
]
} }
} }
impl System for StdSystem { impl System for StdSystem {
async fn request(_: ExtReq<'_>, req: Self::Req) -> Receipt<'_> { match req {} } async fn request(xreq: ExtReq<'_>, req: Self::Req) -> Receipt<'_> {
match req {
StdReq::CreateTuple(ref req @ CreateTuple(ref items)) => {
let tpl = Tuple(Rc::new(join_all(items.iter().copied().map(Expr::deserialize)).await));
let tk = tpl.to_expr().await.serialize().await;
xreq.handle(req, &tk).await
},
StdReq::CreateSymAtom(ref req @ CreateSymAtom(sym_tok)) => {
let sym_atom = SymAtom(Sym::from_api(sym_tok, &i()).await);
xreq.handle(req, &sym_atom.to_expr().await.serialize().await).await
},
}
}
fn lexers() -> Vec<LexerObj> { vec![&StringLexer, &NumLexer] } fn lexers() -> Vec<LexerObj> { vec![&StringLexer, &NumLexer] }
fn parsers() -> Vec<ParserObj> { vec![] } fn parsers() -> Vec<ParserObj> { vec![&AsTypeParser, &TypeParser, &AsProtoParser, &ProtoParser] }
fn env() -> Vec<GenMember> { merge_trivial([gen_num_lib(), gen_str_lib()]) } async fn env() -> Vec<GenMember> {
async fn prelude(i: &Interner) -> Vec<Sym> { vec![sym!(std; i).await] } merge_trivial([
gen_num_lib(),
gen_str_lib(),
gen_option_lib(),
gen_record_lib(),
gen_tuple_lib(),
gen_protocol_lib(),
gen_sym_lib().await,
])
}
async fn prelude() -> Vec<Sym> {
vec![sym!(std; i()), sym!(std::tuple; i()), sym!(std::option; i())]
}
} }

View File

@@ -1,3 +1,4 @@
pub mod str_atom; pub mod str_atom;
pub mod str_lexer; pub mod str_lexer;
pub mod str_lib; pub mod str_lib;
pub mod to_string;

View File

@@ -9,11 +9,13 @@ use orchid_api_traits::{Encode, Request};
use orchid_base::error::{OrcRes, mk_errv}; use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::format::{FmtCtx, FmtUnit}; use orchid_base::format::{FmtCtx, FmtUnit};
use orchid_base::interner::Tok; use orchid_base::interner::Tok;
use orchid_extension::atom::{AtomMethod, Atomic, MethodSetBuilder, Supports, TypAtom}; use orchid_extension::atom::{AtomMethod, Atomic, MethodSetBuilder, Supports, TAtom};
use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant}; use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
use orchid_extension::context::i;
use orchid_extension::conv::TryFromExpr; use orchid_extension::conv::TryFromExpr;
use orchid_extension::expr::Expr; use orchid_extension::expr::Expr;
use orchid_extension::system::SysCtx;
use crate::std::string::to_string::ToStringMethod;
#[derive(Copy, Clone, Debug, Coding)] #[derive(Copy, Clone, Debug, Coding)]
pub struct StringGetVal; pub struct StringGetVal;
@@ -24,8 +26,11 @@ impl AtomMethod for StringGetVal {
const NAME: &str = "std::string_get_val"; const NAME: &str = "std::string_get_val";
} }
impl Supports<StringGetVal> for StrAtom { impl Supports<StringGetVal> for StrAtom {
async fn handle(&self, _: SysCtx, _: StringGetVal) -> <StringGetVal as Request>::Response { async fn handle(&self, _: StringGetVal) -> <StringGetVal as Request>::Response { self.0.clone() }
self.0.clone() }
impl Supports<ToStringMethod> for StrAtom {
async fn handle(&self, _: ToStringMethod) -> <ToStringMethod as Request>::Response {
self.0.as_str().to_string()
} }
} }
@@ -46,7 +51,7 @@ impl Deref for StrAtom {
impl OwnedAtom for StrAtom { impl OwnedAtom for StrAtom {
type Refs = (); type Refs = ();
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) } async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
async fn serialize(&self, _: SysCtx, sink: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs { async fn serialize(&self, sink: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
self.deref().encode(sink).await self.deref().encode(sink).await
} }
async fn print_atom<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print_atom<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
@@ -58,7 +63,7 @@ impl OwnedAtom for StrAtom {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct IntStrAtom(Tok<String>); pub struct IntStrAtom(pub(crate) Tok<String>);
impl Atomic for IntStrAtom { impl Atomic for IntStrAtom {
type Variant = OwnedVariant; type Variant = OwnedVariant;
type Data = orchid_api::TStr; type Data = orchid_api::TStr;
@@ -72,30 +77,39 @@ impl OwnedAtom for IntStrAtom {
async fn print_atom<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print_atom<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
format!("{:?}i", *self.0).into() format!("{:?}i", *self.0).into()
} }
async fn serialize(&self, _: SysCtx, write: Pin<&mut (impl AsyncWrite + ?Sized)>) { async fn serialize(&self, write: Pin<&mut (impl AsyncWrite + ?Sized)>) {
self.0.encode(write).await self.0.encode(write).await
} }
async fn deserialize(mut ctx: impl DeserializeCtx, _: ()) -> Self { async fn deserialize(mut dctx: impl DeserializeCtx, _: ()) -> Self {
let s = ctx.decode::<String>().await; let s = dctx.decode::<String>().await;
Self(ctx.sys().i().i(&s).await) Self(i().i(&s).await)
}
}
impl TryFromExpr for IntStrAtom {
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
Ok(IntStrAtom(i().ex(TAtom::<IntStrAtom>::try_from_expr(expr).await?.value).await))
}
}
impl Supports<ToStringMethod> for IntStrAtom {
async fn handle(&self, _: ToStringMethod) -> <ToStringMethod as Request>::Response {
self.0.as_str().to_string()
} }
} }
#[derive(Clone)] #[derive(Clone)]
pub struct OrcString { pub struct OrcString {
kind: OrcStringKind, kind: OrcStringKind,
ctx: SysCtx,
} }
#[derive(Clone)] #[derive(Clone)]
pub enum OrcStringKind { pub enum OrcStringKind {
Val(TypAtom<StrAtom>), Val(TAtom<StrAtom>),
Int(TypAtom<IntStrAtom>), Int(TAtom<IntStrAtom>),
} }
impl OrcString { impl OrcString {
pub async fn get_string(&self) -> Rc<String> { pub async fn get_string(&self) -> Rc<String> {
match &self.kind { match &self.kind {
OrcStringKind::Int(tok) => self.ctx.i().ex(**tok).await.rc(), OrcStringKind::Int(tok) => i().ex(**tok).await.rc(),
OrcStringKind::Val(atom) => atom.request(StringGetVal).await, OrcStringKind::Val(atom) => atom.request(StringGetVal).await,
} }
} }
@@ -103,13 +117,12 @@ impl OrcString {
impl TryFromExpr for OrcString { impl TryFromExpr for OrcString {
async fn try_from_expr(expr: Expr) -> OrcRes<OrcString> { async fn try_from_expr(expr: Expr) -> OrcRes<OrcString> {
if let Ok(v) = TypAtom::<StrAtom>::try_from_expr(expr.clone()).await { if let Ok(v) = TAtom::<StrAtom>::try_from_expr(expr.clone()).await {
return Ok(OrcString { ctx: expr.ctx(), kind: OrcStringKind::Val(v) }); return Ok(OrcString { kind: OrcStringKind::Val(v) });
} }
let ctx = expr.ctx(); match TAtom::<IntStrAtom>::try_from_expr(expr).await {
match TypAtom::<IntStrAtom>::try_from_expr(expr).await { Ok(t) => Ok(OrcString { kind: OrcStringKind::Int(t) }),
Ok(t) => Ok(OrcString { ctx: t.untyped.ctx().clone(), kind: OrcStringKind::Int(t) }), Err(e) => Err(mk_errv(i().i("A string was expected").await, "", e.pos_iter())),
Err(e) => Err(mk_errv(ctx.i().i("A string was expected").await, "", e.pos_iter())),
} }
} }
} }

View File

@@ -5,10 +5,12 @@ use orchid_base::location::SrcRange;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::parse::ParseCtx; use orchid_base::parse::ParseCtx;
use orchid_base::sym; use orchid_base::sym;
use orchid_base::tree::wrap_tokv; use orchid_base::tree::{Paren, wrap_tokv};
use orchid_extension::context::i;
use orchid_extension::gen_expr::sym_ref;
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable}; use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
use orchid_extension::parser::p_tree2gen; use orchid_extension::parser::p_tree2gen;
use orchid_extension::tree::{GenTokTree, ref_tok, x_tok}; use orchid_extension::tree::{GenTok, GenTokTree, ref_tok, x_tok};
use super::str_atom::IntStrAtom; use super::str_atom::IntStrAtom;
@@ -97,9 +99,9 @@ fn parse_string(str: &str) -> Result<String, StringError> {
pub struct StringLexer; pub struct StringLexer;
impl Lexer for StringLexer { impl Lexer for StringLexer {
const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['"'..='"', '`'..='`']; const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['"'..='"', '`'..='`'];
async fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> { async fn lex<'a>(all: &'a str, lctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
let Some(mut tail) = all.strip_prefix('"') else { let Some(mut tail) = all.strip_prefix('"') else {
return Err(err_not_applicable(ctx.ctx.i()).await); return Err(err_not_applicable().await);
}; };
let mut ret = None; let mut ret = None;
let mut cur = String::new(); let mut cur = String::new();
@@ -121,19 +123,27 @@ impl Lexer for StringLexer {
} }
let add_frag = |prev: Option<GenTokTree>, new: GenTokTree| async { let add_frag = |prev: Option<GenTokTree>, new: GenTokTree| async {
let Some(prev) = prev else { return new }; let Some(prev) = prev else { return new };
let concat_fn = ref_tok(sym!(std::string::concat; ctx.i()).await) let concat_fn = ref_tok(sym!(std::string::concat; lctx.i()))
.await .await
.at(SrcRange::zw(prev.sr.path(), prev.sr.start())); .at(SrcRange::zw(prev.sr.path(), prev.sr.start()));
wrap_tokv([concat_fn, prev, new]) wrap_tokv([concat_fn, prev, new])
}; };
loop { loop {
if let Some(rest) = tail.strip_prefix('"') { if let Some(rest) = tail.strip_prefix('"') {
return Ok((rest, add_frag(ret, str_to_gen(&mut cur, tail, &mut errors, ctx).await).await)); return Ok((
rest,
add_frag(ret, str_to_gen(&mut cur, tail, &mut errors, lctx).await).await,
));
} else if let Some(rest) = tail.strip_prefix('$') { } else if let Some(rest) = tail.strip_prefix('$') {
ret = Some(add_frag(ret, str_to_gen(&mut cur, tail, &mut errors, ctx).await).await); ret = Some(add_frag(ret, str_to_gen(&mut cur, tail, &mut errors, lctx).await).await);
let (new_tail, tree) = ctx.recurse(rest).await?; let (new_tail, tree) = lctx.recurse(rest).await?;
tail = new_tail; tail = new_tail;
ret = Some(add_frag(ret, p_tree2gen(tree)).await); // wrap the received token in a call to to_str
let to_str = sym_ref(sym!(std::string::to_str; i()));
let sr = tree.sr();
let inj_to_str_tok = GenTok::NewExpr(to_str).at(sr.map_range(|_| sr.start()..sr.start()));
let to_str_call = GenTok::S(Paren::Round, vec![inj_to_str_tok, p_tree2gen(tree)]).at(sr);
ret = Some(add_frag(ret, to_str_call).await);
} else if tail.starts_with('\\') { } else if tail.starts_with('\\') {
// parse_string will deal with it, we just have to skip the next char // parse_string will deal with it, we just have to skip the next char
tail = &tail[2..]; tail = &tail[2..];
@@ -143,11 +153,11 @@ impl Lexer for StringLexer {
cur.push(c); cur.push(c);
tail = ch.as_str(); tail = ch.as_str();
} else { } else {
let range = ctx.pos(all)..ctx.pos(""); let range = lctx.pos(all)..lctx.pos("");
return Err(mk_errv( return Err(mk_errv(
ctx.i().i("No string end").await, lctx.i().i("No string end").await,
"String never terminated with \"", "String never terminated with \"",
[SrcRange::new(range.clone(), ctx.src())], [SrcRange::new(range.clone(), lctx.src())],
)); ));
} }
} }

View File

@@ -1,15 +1,60 @@
use std::rc::Rc; use std::rc::Rc;
use orchid_extension::tree::{GenMember, comments, fun, prefix}; use orchid_base::format::fmt;
use orchid_base::sym;
use orchid_extension::atom::ForeignAtom;
use orchid_extension::context::i;
use orchid_extension::conv::ToExpr;
use orchid_extension::coroutine_exec::exec;
use orchid_extension::expr::Expr;
use orchid_extension::gen_expr::{call, sym_ref};
use orchid_extension::tree::{GenMember, cnst, comments, fun, prefix};
use super::str_atom::StrAtom; use super::str_atom::StrAtom;
use crate::OrcString; use crate::OrcString;
use crate::std::protocol::types::get_impl;
use crate::std::string::to_string::{AsStrTag, ToStringMethod};
pub fn gen_str_lib() -> Vec<GenMember> { pub fn gen_str_lib() -> Vec<GenMember> {
prefix("std::string", [comments( prefix("std::string", [
comments(
["Concatenate two strings"], ["Concatenate two strings"],
fun(true, "concat", |left: OrcString, right: OrcString| async move { fun(true, "concat", async |left: OrcString, right: OrcString| {
StrAtom::new(Rc::new(left.get_string().await.to_string() + &right.get_string().await)) StrAtom::new(Rc::new(left.get_string().await.to_string() + &right.get_string().await))
}), }),
)]) ),
comments(
["Converts a value to string. This function is used in interpolation. \
It supports the std::string::to_string protocol in Orchid, \
the std::string::to_string request in Rust, \
and expression debug printing as a fallback (print_atom for Atomic implementors in Rust).\n\n\
This function is infallible."],
fun(true, "to_str", async |input: Expr| {
exec(async move |mut h| {
if let Ok(atom) = h.exec::<ForeignAtom>(input.clone()).await {
if let Some(str) = atom.request(ToStringMethod).await {
return StrAtom::new(Rc::new(str)).to_gen().await;
}
let proto_ref = sym_ref(sym!(std::string::to_string::__protocol_tag__; i()));
let proto = h.exec(proto_ref).await.expect("This protocol is defined in this system");
if let Ok(cb) = get_impl(atom.clone(), proto).await {
return call(cb.to_gen().await, [atom.to_gen().await]).to_gen().await;
}
}
return StrAtom::new(Rc::new(fmt(&input, &i()).await)).to_gen().await;
})
.await
}),
),
prefix("to_string", [
cnst(true, "__type_tag__", AsStrTag),
fun(true, "resolve", async |atom: ForeignAtom| {
exec(async |mut h| {
let proto = h.exec(sym_ref(sym!(std::string::to_string; i()))).await?;
Ok(call(get_impl(atom.clone(), proto).await?.to_gen().await, [atom.to_gen().await]))
})
.await
}),
]),
])
} }

View File

@@ -0,0 +1,36 @@
use orchid_api_derive::Coding;
use orchid_api_traits::Request;
use orchid_base::name::Sym;
use orchid_extension::atom::{AtomMethod, Atomic, MethodSetBuilder, Supports};
use orchid_extension::atom_thin::{ThinAtom, ThinVariant};
use orchid_extension::context::i;
use crate::std::protocol::types::{GetImplMethod, GetTagIdMethod};
#[derive(Coding, Clone, Debug)]
pub struct AsStrTag;
impl Atomic for AsStrTag {
type Data = AsStrTag;
type Variant = ThinVariant;
fn reg_reqs() -> MethodSetBuilder<Self> {
MethodSetBuilder::new().handle::<GetTagIdMethod>().handle::<GetImplMethod>()
}
}
impl ThinAtom for AsStrTag {}
impl Supports<GetTagIdMethod> for AsStrTag {
async fn handle(&self, _: GetTagIdMethod) -> <GetTagIdMethod as Request>::Response {
Sym::parse("std::string::to_string", &i()).await.unwrap().to_api()
}
}
impl Supports<GetImplMethod> for AsStrTag {
async fn handle(&self, _: GetImplMethod) -> <GetImplMethod as Request>::Response { None }
}
#[derive(Coding, Clone, Debug)]
pub struct ToStringMethod;
impl Request for ToStringMethod {
type Response = String;
}
impl AtomMethod for ToStringMethod {
const NAME: &str = "std::string::to_string";
}

211
orchid-std/src/std/tuple.rs Normal file
View File

@@ -0,0 +1,211 @@
use std::borrow::Cow;
use std::num::NonZero;
use std::pin::Pin;
use std::rc::Rc;
use futures::AsyncWrite;
use futures::future::join_all;
use never::Never;
use orchid_api::ExprTicket;
use orchid_api_derive::{Coding, Hierarchy};
use orchid_api_traits::Request;
use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
use orchid_extension::atom::{Atomic, TAtom};
use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant, own};
use orchid_extension::context::i;
use orchid_extension::conv::{ToExpr, TryFromExpr};
use orchid_extension::expr::{Expr, ExprHandle};
use orchid_extension::gen_expr::GExpr;
use orchid_extension::system::dep_req;
use orchid_extension::tree::{GenMember, cnst, fun, prefix};
use crate::std::std_system::StdReq;
use crate::{Int, StdSystem, api};
#[derive(Clone)]
pub struct Tuple(pub(super) Rc<Vec<Expr>>);
impl Atomic for Tuple {
type Data = Vec<ExprTicket>;
type Variant = OwnedVariant;
}
impl OwnedAtom for Tuple {
type Refs = Vec<Expr>;
async fn val(&self) -> Cow<'_, Self::Data> {
Cow::Owned(self.0.iter().map(|x| x.handle().ticket()).collect())
}
async fn serialize(&self, _: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
self.0.as_ref().clone()
}
async fn deserialize(_: impl DeserializeCtx, refs: Self::Refs) -> Self { Self(Rc::new(refs)) }
async fn print_atom<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
Variants::default()
.sequence(self.0.len(), "t[", ", ", "]", Some(true))
.sequence(self.0.len(), "t[\n", ",\n", "\n]", Some(true))
.units_own(join_all(self.0.iter().map(|x| x.print(c))).await)
}
}
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(StdReq)]
pub struct CreateTuple(pub Vec<api::ExprTicket>);
impl Request for CreateTuple {
type Response = api::ExprTicket;
}
#[derive(Clone)]
pub struct TupleBuilder {
arity: NonZero<u32>,
items: Vec<Expr>,
}
impl Atomic for TupleBuilder {
type Data = ();
type Variant = OwnedVariant;
}
impl OwnedAtom for TupleBuilder {
type Refs = Never;
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
async fn call(mut self, arg: Expr) -> GExpr {
self.items.push(arg);
if self.arity.get() == self.items.len().try_into().expect("counting up from 0") {
Tuple(Rc::new(self.items)).to_gen().await
} else {
self.to_gen().await
}
}
}
pub fn gen_tuple_lib() -> Vec<GenMember> {
prefix("std::tuple", [
cnst(true, "empty", Tuple(Rc::new(Vec::new()))),
fun(true, "one", async |item: Expr| Tuple(Rc::new(vec![item]))),
fun(true, "new", async |arity: TAtom<Int>| {
if let Ok(arity) = u32::try_from(arity.value.0).and_then(|v| v.try_into()) {
TupleBuilder { arity, items: Vec::new() }.to_gen().await
} else {
Tuple(Rc::new(Vec::new())).to_gen().await
}
}),
fun(true, "get", async |tup: TAtom<Tuple>, idx: TAtom<Int>| {
if let Ok(idx) = usize::try_from(idx.0)
&& let Some(val) = own(&tup).await.0.get(idx)
{
return Ok(val.clone());
}
return Err(mk_errv(
i().i("Tuple index out of bounds").await,
format!("{} is out of bounds for Tuple{}", idx.0, tup.len()),
[idx.pos()],
));
}),
fun(true, "set", async |tup: TAtom<Tuple>, idx: TAtom<Int>, val: Expr| {
if let Ok(idx) = usize::try_from(idx.0) {
let mut new_vec = own(&tup).await.0.to_vec();
if let Some(slot) = new_vec.get_mut(idx) {
*slot = val;
return Ok(Tuple(Rc::new(new_vec)));
}
}
return Err(mk_errv(
i().i("Tuple index out of bounds").await,
format!("{} is out of bounds for Tuple{}", idx.0, tup.len()),
[idx.pos()],
));
}),
fun(true, "len", async |tup: TAtom<Tuple>| {
Int(tup.len().try_into().expect("Tuple was created with an Int length"))
}),
fun(true, "cat", async |left: TAtom<Tuple>, right: TAtom<Tuple>| {
Tuple(Rc::new(own(&left).await.0.iter().chain(own(&right).await.0.iter()).cloned().collect()))
}),
])
}
pub struct UntypedTuple(pub Vec<Expr>);
impl TryFromExpr for UntypedTuple {
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
let tpl = TAtom::<Tuple>::try_from_expr(expr.clone()).await?;
let exprs =
join_all(tpl.iter().map(async |t| Expr::from_handle(ExprHandle::from_ticket(*t).await)))
.await;
Ok(UntypedTuple(exprs))
}
}
impl ToExpr for UntypedTuple {
async fn to_gen(self) -> GExpr {
let exprs = join_all(self.0.into_iter().map(async |expr| expr.serialize().await)).await;
Expr::deserialize(dep_req::<StdSystem, _>(CreateTuple(exprs)).await).await.to_gen().await
}
}
pub struct Tpl<T>(pub T);
mod tpl_impls {
use itertools::Itertools;
use orchid_base::error::{OrcRes, mk_errv};
use orchid_extension::context::i;
use orchid_extension::conv::{ToExpr, TryFromExpr};
use orchid_extension::expr::Expr;
use orchid_extension::gen_expr::GExpr;
use super::{Tpl, UntypedTuple};
macro_rules! tpl_derives {
($len:literal $($t:ident)*) => {
pastey::paste! {
impl<$( $t: TryFromExpr, )*> TryFromExpr for Tpl<($( $t, )*)> {
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
let tpl = UntypedTuple::try_from_expr(expr.clone()).await?;
let Some([$( [< $t:lower >], )*]) = tpl.0.iter().cloned().collect_array() else {
return Err(mk_errv(
i().i("Tuple arity mismatch").await,
format!("Expected a {}-ary tuple, found {}-ary", $len, tpl.0.len()),
[expr.data().await.pos.clone()]
));
};
Ok(Tpl(( $( $t::try_from_expr([< $t:lower >]).await?, )* )))
}
}
impl<$( $t: ToExpr, )*> ToExpr for Tpl<($( $t, )*)> {
async fn to_gen(self) -> GExpr {
let Self(($( [< $t:lower >], )*)) = self;
UntypedTuple(vec![
$( [< $t:lower >].to_expr().await, )*
]).to_gen().await
}
}
}
};
}
tpl_derives!(0);
tpl_derives!(1 A);
tpl_derives!(2 A B);
tpl_derives!(3 A B C);
tpl_derives!(4 A B C D);
tpl_derives!(5 A B C D E);
tpl_derives!(6 A B C D E F);
tpl_derives!(7 A B C D E F G);
tpl_derives!(8 A B C D E F G H);
tpl_derives!(9 A B C D E F G H I);
tpl_derives!(10 A B C D E F G H I J);
}
pub struct HomoTpl<T>(pub Vec<T>);
impl<T: TryFromExpr> TryFromExpr for HomoTpl<T> {
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
let tpl = TAtom::<Tuple>::try_from_expr(expr.clone()).await?;
let mut res = Vec::new();
for item in tpl.iter() {
res.push(T::try_from_expr(Expr::from_handle(ExprHandle::from_ticket(*item).await)).await?);
}
Ok(HomoTpl(res))
}
}
impl<T: ToExpr> ToExpr for HomoTpl<T> {
async fn to_gen(self) -> GExpr {
UntypedTuple(join_all(self.0.into_iter().map(async |t| t.to_expr().await)).await).to_gen().await
}
}

View File

@@ -1 +0,0 @@
target

828
orchidlang/Cargo.lock generated
View File

@@ -1,828 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "ahash"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d713b3834d76b85304d4d525563c1276e2e30dc97cc67bfb4585a4a29fc2c89f"
dependencies = [
"cfg-if",
"once_cell",
"version_check",
"zerocopy",
]
[[package]]
name = "aho-corasick"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
dependencies = [
"memchr",
]
[[package]]
name = "allocator-api2"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
[[package]]
name = "anstream"
version = "0.6.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96b09b5178381e0874812a9b157f7fe84982617e48f71f4e3235482775e5b540"
dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
"anstyle-wincon",
"colorchoice",
"utf8parse",
]
[[package]]
name = "anstyle"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc"
[[package]]
name = "anstyle-parse"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648"
dependencies = [
"windows-sys",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7"
dependencies = [
"anstyle",
"windows-sys",
]
[[package]]
name = "atty"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
"hermit-abi",
"libc",
"winapi 0.3.9",
]
[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "block-buffer"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
dependencies = [
"generic-array",
]
[[package]]
name = "bound"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6021ae095f16f54aaae093f4c723700430e71eab731d3b0a07fc8fe258fd5110"
[[package]]
name = "bstr"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c48f0051a4b4c5e0b6d365cd04af53aeaa209e3cc15ec2cdb69e73cc87fbd0dc"
dependencies = [
"memchr",
"serde",
]
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "clap"
version = "4.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c918d541ef2913577a0f9566e9ce27cb35b6df072075769e0b26cb5a554520da"
dependencies = [
"clap_builder",
"clap_derive",
]
[[package]]
name = "clap_builder"
version = "4.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f3e7391dad68afb0c2ede1bf619f579a3dc9c2ec67f089baa397123a2f3d1eb"
dependencies = [
"anstream",
"anstyle",
"clap_lex",
"strsim",
]
[[package]]
name = "clap_derive"
version = "4.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "307bc0538d5f0f83b8248db3087aa92fe504e4691294d0c96c0eabc33f47ba47"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.50",
]
[[package]]
name = "clap_lex"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce"
[[package]]
name = "colorchoice"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
[[package]]
name = "const_format"
version = "0.2.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3a214c7af3d04997541b18d432afaff4c455e79e2029079647e72fc2bd27673"
dependencies = [
"const_format_proc_macros",
]
[[package]]
name = "const_format_proc_macros"
version = "0.2.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7f6ff08fd20f4f299298a28e2dfa8a8ba1036e6cd2460ac1de7b425d76f2500"
dependencies = [
"proc-macro2",
"quote",
"unicode-xid",
]
[[package]]
name = "cpufeatures"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
dependencies = [
"libc",
]
[[package]]
name = "crossbeam-deque"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d"
dependencies = [
"crossbeam-epoch",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-epoch"
version = "0.9.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
dependencies = [
"crossbeam-utils",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345"
[[package]]
name = "crypto-common"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
dependencies = [
"generic-array",
"typenum",
]
[[package]]
name = "digest"
version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"crypto-common",
]
[[package]]
name = "dyn-clone"
version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d"
[[package]]
name = "either"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a"
[[package]]
name = "generic-array"
version = "0.14.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"typenum",
"version_check",
]
[[package]]
name = "globset"
version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1"
dependencies = [
"aho-corasick",
"bstr",
"log",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "hashbrown"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
dependencies = [
"ahash",
"allocator-api2",
]
[[package]]
name = "heck"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
[[package]]
name = "hermit-abi"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
dependencies = [
"libc",
]
[[package]]
name = "intern-all"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20c9bf7d7b0572f7b4398fddc93ac1a200a92d1ba319a27dac04649b2223c0f6"
dependencies = [
"hashbrown",
"lazy_static",
"trait-set",
]
[[package]]
name = "itertools"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569"
dependencies = [
"either",
]
[[package]]
name = "kernel32-sys"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
dependencies = [
"winapi 0.2.8",
"winapi-build",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.153"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
[[package]]
name = "log"
version = "0.4.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
[[package]]
name = "memchr"
version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149"
[[package]]
name = "never"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c96aba5aa877601bb3f6dd6a63a969e1f82e60646e81e71b14496995e9853c91"
[[package]]
name = "num-traits"
version = "0.2.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a"
dependencies = [
"autocfg",
]
[[package]]
name = "numtoa"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8f8bdf33df195859076e54ab11ee78a1b208382d3a26ec40d142ffc1ecc49ef"
[[package]]
name = "once_cell"
version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
[[package]]
name = "orchidlang"
version = "0.3.0"
dependencies = [
"bound",
"clap",
"const_format",
"dyn-clone",
"hashbrown",
"intern-all",
"itertools",
"never",
"once_cell",
"ordered-float",
"paste",
"rayon",
"rust-embed",
"substack",
"take_mut",
"termsize",
"trait-set",
"unicode-segmentation",
]
[[package]]
name = "ordered-float"
version = "4.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a76df7075c7d4d01fdcb46c912dd17fba5b60c78ea480b475f2b6ab6f666584e"
dependencies = [
"num-traits",
]
[[package]]
name = "paste"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c"
[[package]]
name = "proc-macro2"
version = "1.0.78"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rayon"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa7237101a77a10773db45d62004a272517633fbcc3df19d96455ede1122e051"
dependencies = [
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2"
dependencies = [
"crossbeam-deque",
"crossbeam-utils",
]
[[package]]
name = "redox_syscall"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
dependencies = [
"bitflags",
]
[[package]]
name = "redox_termios"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20145670ba436b55d91fc92d25e71160fbfbdd57831631c8d7d36377a476f1cb"
[[package]]
name = "regex-automata"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
[[package]]
name = "rust-embed"
version = "8.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a82c0bbc10308ed323529fd3c1dce8badda635aa319a5ff0e6466f33b8101e3f"
dependencies = [
"rust-embed-impl",
"rust-embed-utils",
"walkdir",
]
[[package]]
name = "rust-embed-impl"
version = "8.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6227c01b1783cdfee1bcf844eb44594cd16ec71c35305bf1c9fb5aade2735e16"
dependencies = [
"proc-macro2",
"quote",
"rust-embed-utils",
"syn 2.0.50",
"walkdir",
]
[[package]]
name = "rust-embed-utils"
version = "8.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8cb0a25bfbb2d4b4402179c2cf030387d9990857ce08a32592c6238db9fa8665"
dependencies = [
"globset",
"sha2",
"walkdir",
]
[[package]]
name = "same-file"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
dependencies = [
"winapi-util",
]
[[package]]
name = "serde"
version = "1.0.197"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.197"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.50",
]
[[package]]
name = "sha2"
version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
[[package]]
name = "strsim"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01"
[[package]]
name = "substack"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffccc3d80f0a489de67aa74ff31ab852abb973e1c6dacf3704889e00ca544e7f"
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74f1bdc9872430ce9b75da68329d1c1746faf50ffac5f19e02b71e37ff881ffb"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "take_mut"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60"
[[package]]
name = "termion"
version = "1.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "077185e2eac69c3f8379a4298e1e07cd36beb962290d4a51199acf0fdc10607e"
dependencies = [
"libc",
"numtoa",
"redox_syscall",
"redox_termios",
]
[[package]]
name = "termsize"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e86d824a8e90f342ad3ef4bd51ef7119a9b681b0cc9f8ee7b2852f02ccd2517"
dependencies = [
"atty",
"kernel32-sys",
"libc",
"termion",
"winapi 0.2.8",
]
[[package]]
name = "trait-set"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b79e2e9c9ab44c6d7c20d5976961b47e8f49ac199154daa514b77cd1ab536625"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "typenum"
version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "unicode-segmentation"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202"
[[package]]
name = "unicode-xid"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
name = "utf8parse"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "walkdir"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee"
dependencies = [
"same-file",
"winapi-util",
]
[[package]]
name = "winapi"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-build"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596"
dependencies = [
"winapi 0.3.9",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-sys"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.52.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d380ba1dc7187569a8a9e91ed34b8ccfc33123bbacb8c0aed2d1ad7f3ef2dc5f"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68e5dcfb9413f53afd9c8f86e56a7b4d86d9a2fa26090ea2dc9e40fba56c6ec6"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8dab469ebbc45798319e69eebf92308e541ce46760b49b18c6b3fe5e8965b30f"
[[package]]
name = "windows_i686_gnu"
version = "0.52.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a4e9b6a7cac734a8b4138a4e1044eac3404d8326b6c0f939276560687a033fb"
[[package]]
name = "windows_i686_msvc"
version = "0.52.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28b0ec9c422ca95ff34a78755cfa6ad4a51371da2a5ace67500cf7ca5f232c58"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "704131571ba93e89d7cd43482277d6632589b18ecf4468f591fbae0a8b101614"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42079295511643151e98d61c38c0acc444e52dd42ab456f7ccfd5152e8ecf21c"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0770833d60a970638e989b3fa9fd2bb1aaadcf88963d1659fd7d9990196ed2d6"
[[package]]
name = "zerocopy"
version = "0.7.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.7.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.50",
]

View File

@@ -1,41 +0,0 @@
[package]
name = "orchidlang"
version = "0.3.0"
edition = "2024"
license = "GPL-3.0"
repository = "https://github.com/lbfalvy/orchid"
description = """
An embeddable pure functional scripting language
"""
authors = ["Lawrence Bethlenfalvy <lbfalvy@protonmail.com>"]
[lib]
path = "src/lib.rs"
[[bin]]
name = "orcx"
path = "src/bin/orcx.rs"
doc = false
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
hashbrown = "0.14"
ordered-float = "4.2"
itertools = "0.12"
dyn-clone = "1.0"
trait-set = "0.3"
paste = "1.0"
rust-embed = { version = "8.2", features = ["include-exclude"] }
take_mut = "0.2"
unicode-segmentation = "1.11"
never = "0.1"
substack = "1.1"
intern-all = "0.4.1"
once_cell = "1.19"
const_format = "0.2"
bound = "0.5"
# Dependencies of orcx
clap = { version = "4.5", features = ["derive"] }
rayon = "1.8"
termsize = "0.1"

View File

@@ -1,2 +0,0 @@
mod prompt;
pub use prompt::cmd_prompt;

View File

@@ -1,11 +0,0 @@
use std::io::{self, Error, Write};
pub fn cmd_prompt(prompt: &str) -> Result<(String, Vec<String>), Error> {
print!("{}", prompt);
io::stdout().flush()?;
let mut cmdln = String::new();
io::stdin().read_line(&mut cmdln)?;
let mut segments = cmdln.split(' ');
let cmd = if let Some(cmd) = segments.next() { cmd } else { "" };
Ok((cmd.to_string(), segments.map(str::to_string).collect()))
}

View File

@@ -1,74 +0,0 @@
use itertools::Itertools;
use orchidlang::error::Reporter;
use orchidlang::facade::macro_runner::MacroRunner;
use orchidlang::libs::std::exit_status::OrcExitStatus;
use orchidlang::location::{CodeGenInfo, CodeLocation};
use orchidlang::name::Sym;
use orchidlang::pipeline::project::{ItemKind, ProjItem, ProjectTree};
use orchidlang::sym;
use crate::cli::cmd_prompt;
/// A little utility to step through the reproject of a macro set
pub fn main(tree: ProjectTree, symbol: Sym) -> OrcExitStatus {
print!("Macro debugger starting on {symbol}");
let location = CodeLocation::new_gen(CodeGenInfo::no_details(sym!(orcx::macro_runner)));
let expr_ent = match tree.0.walk1_ref(&[], &symbol[..], |_| true) {
Ok((e, _)) => e.clone(),
Err(e) => {
eprintln!("{}", e.at(&location.origin()));
return OrcExitStatus::Failure;
},
};
let mut expr = match expr_ent.item() {
Some(ProjItem { kind: ItemKind::Const(c) }) => c.clone(),
_ => {
eprintln!("macro-debug argument must be a constant");
return OrcExitStatus::Failure;
},
};
let reporter = Reporter::new();
let macro_runner = MacroRunner::new(&tree, None, &reporter);
reporter.assert_exit();
println!("\nInitial state: {expr}");
// print_for_debug(&code);
let mut steps = macro_runner.step(expr.clone()).enumerate();
loop {
let (cmd, _) = cmd_prompt("\ncmd> ").unwrap();
match cmd.trim() {
"" | "n" | "next" => match steps.next() {
None => print!("Halted"),
Some((idx, c)) => {
expr = c;
print!("Step {idx}: {expr}");
},
},
"p" | "print" => {
let glossary = expr.value.collect_names();
let gl_str = glossary.iter().join(", ");
print!("code: {expr}\nglossary: {gl_str}")
},
"d" | "dump" => print!("Rules: {}", macro_runner.repo),
"q" | "quit" => return OrcExitStatus::Success,
"complete" => {
match steps.last() {
Some((idx, c)) => print!("Step {idx}: {c}"),
None => print!("Already halted"),
}
return OrcExitStatus::Success;
},
"h" | "help" => print!(
"Available commands:
\t<blank>, n, next\t\ttake a step
\tp, print\t\tprint the current state
\td, dump\t\tprint the rule table
\tq, quit\t\texit
\th, help\t\tprint this text"
),
_ => {
print!("unrecognized command \"{}\", try \"help\"", cmd);
continue;
},
}
}
}

View File

@@ -1,4 +0,0 @@
pub mod macro_debug;
pub mod print_project;
pub mod shared;
pub mod tests;

Some files were not shown because too many files have changed in this diff Show More