forked from Orchid/orchid
New macro system and stdlib additions
This commit is contained in:
18
Cargo.lock
generated
18
Cargo.lock
generated
@@ -1061,6 +1061,7 @@ dependencies = [
|
||||
"pastey",
|
||||
"some_executor",
|
||||
"substack",
|
||||
"task-local",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"trait-set",
|
||||
@@ -1110,7 +1111,9 @@ dependencies = [
|
||||
"orchid-base",
|
||||
"orchid-extension",
|
||||
"ordered-float",
|
||||
"pastey",
|
||||
"rust_decimal",
|
||||
"subslice-offset",
|
||||
"substack",
|
||||
"test_executors",
|
||||
"tokio",
|
||||
@@ -1632,6 +1635,12 @@ version = "0.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||
|
||||
[[package]]
|
||||
name = "subslice-offset"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c883fb2521558a8be70f0f1922babf736f9f72dfbe6ae4f397de3aefb74627ec"
|
||||
|
||||
[[package]]
|
||||
name = "substack"
|
||||
version = "1.1.1"
|
||||
@@ -1677,6 +1686,15 @@ version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
|
||||
|
||||
[[package]]
|
||||
name = "task-local"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b2c821daee0efdf6414970c8185a1c22e259a7ed87b2fd9f7d3c5f5503fd2863"
|
||||
dependencies = [
|
||||
"pin-project-lite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "test_executors"
|
||||
version = "0.3.5"
|
||||
|
||||
674
LICENSE
674
LICENSE
@@ -1,674 +0,0 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
@@ -1,3 +1,2 @@
|
||||
import macros::common::(+ *)
|
||||
|
||||
let main = 1 + 2
|
||||
let my_tuple = option::some t[1, 2]
|
||||
let main = tuple::get (option::expect my_tuple "tuple is none") 1
|
||||
|
||||
@@ -4,7 +4,7 @@ use std::future::Future;
|
||||
use super::coding::Coding;
|
||||
use crate::helpers::enc_vec;
|
||||
|
||||
pub trait Request: fmt::Debug + Coding + Sized + 'static {
|
||||
pub trait Request: fmt::Debug + Sized + 'static {
|
||||
type Response: fmt::Debug + Coding + 'static;
|
||||
}
|
||||
|
||||
|
||||
@@ -145,16 +145,6 @@ impl Request for ExtAtomPrint {
|
||||
type Response = FormattingUnit;
|
||||
}
|
||||
|
||||
/// Can specify the recipient of an atom as well. The main use case for this is
|
||||
/// to be able to return an atom to other extensions, so it can be combined with
|
||||
/// a [crate::Move].
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extends(ExtHostReq)]
|
||||
pub struct CreateAtom(pub Atom, pub SysId);
|
||||
impl Request for CreateAtom {
|
||||
type Response = ExprTicket;
|
||||
}
|
||||
|
||||
/// Requests that apply to an existing atom instance
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extends(HostExtReq)]
|
||||
|
||||
@@ -43,17 +43,6 @@ pub struct Acquire(pub SysId, pub ExprTicket);
|
||||
#[extends(ExprNotif, ExtHostNotif)]
|
||||
pub struct Release(pub SysId, pub ExprTicket);
|
||||
|
||||
/// Decrement the reference count for one system and increment it for another,
|
||||
/// to indicate passing an owned reference. Equivalent to [Acquire] followed by
|
||||
/// [Release].
|
||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||
#[extends(ExprNotif, ExtHostNotif)]
|
||||
pub struct Move {
|
||||
pub dec: SysId,
|
||||
pub inc: SysId,
|
||||
pub expr: ExprTicket,
|
||||
}
|
||||
|
||||
/// A description of a new expression. It is used as the return value of
|
||||
/// [crate::atom::Call] or [crate::atom::CallRef], or a constant in the
|
||||
/// [crate::tree::Tree].
|
||||
@@ -67,8 +56,9 @@ pub enum ExpressionKind {
|
||||
/// template
|
||||
Arg(u64),
|
||||
/// Insert the specified host-expression in the template here. When the clause
|
||||
/// is used in the const tree, this variant is forbidden.
|
||||
Slot { tk: ExprTicket, by_value: bool },
|
||||
/// is used in the const tree, this variant is forbidden. The ticket held
|
||||
/// within is always owning. To avoid a leak, it must be deserialized.
|
||||
Slot(ExprTicket),
|
||||
/// The lhs must be fully processed before the rhs can be processed.
|
||||
/// Equivalent to Haskell's function of the same name
|
||||
Seq(Box<Expression>, Box<Expression>),
|
||||
@@ -115,11 +105,12 @@ impl Request for Inspect {
|
||||
type Response = Inspected;
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extends(ExtHostReq)]
|
||||
#[extendable]
|
||||
pub enum ExprReq {
|
||||
Inspect(Inspect),
|
||||
Create(Create),
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
|
||||
@@ -128,5 +119,11 @@ pub enum ExprReq {
|
||||
pub enum ExprNotif {
|
||||
Acquire(Acquire),
|
||||
Release(Release),
|
||||
Move(Move),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extends(ExprReq, ExtHostReq)]
|
||||
pub struct Create(pub Expression);
|
||||
impl Request for Create {
|
||||
type Response = ExprTicket;
|
||||
}
|
||||
|
||||
@@ -17,6 +17,8 @@ pub enum Location {
|
||||
Gen(CodeGenInfo),
|
||||
/// Range and file
|
||||
SourceRange(SourceRange),
|
||||
/// Multiple locations
|
||||
Multi(Vec<Location>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
|
||||
@@ -85,7 +85,6 @@ pub enum ExtHostReq {
|
||||
IntReq(interner::IntReq),
|
||||
Fwd(atom::Fwd),
|
||||
ExtAtomPrint(atom::ExtAtomPrint),
|
||||
CreateAtom(atom::CreateAtom),
|
||||
SysFwd(system::SysFwd),
|
||||
ExprReq(expr::ExprReq),
|
||||
SubLex(lexer::SubLex),
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
use std::num::NonZeroU64;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
@@ -56,6 +57,15 @@ pub enum Paren {
|
||||
Square,
|
||||
Curly,
|
||||
}
|
||||
impl fmt::Display for Paren {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", match self {
|
||||
Self::Round => "()",
|
||||
Self::Curly => "{}",
|
||||
Self::Square => "[]",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||
pub struct TreeId(pub NonZeroU64);
|
||||
|
||||
@@ -7,7 +7,7 @@ use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
|
||||
use futures::future::join_all;
|
||||
use itertools::Itertools;
|
||||
use itertools::{Itertools, chain};
|
||||
use never::Never;
|
||||
use regex::Regex;
|
||||
|
||||
@@ -47,12 +47,14 @@ impl FmtUnit {
|
||||
}
|
||||
}
|
||||
pub fn sequence(
|
||||
head: &str,
|
||||
delim: &str,
|
||||
tail: &str,
|
||||
seq_bnd: Option<bool>,
|
||||
seq: impl IntoIterator<Item = FmtUnit>,
|
||||
) -> Self {
|
||||
let items = seq.into_iter().collect_vec();
|
||||
FmtUnit::new(Variants::sequence(items.len(), delim, seq_bnd), items)
|
||||
Variants::default().sequence(items.len(), head, delim, tail, seq_bnd).units_own(items)
|
||||
}
|
||||
}
|
||||
impl<T> From<T> for FmtUnit
|
||||
@@ -110,8 +112,29 @@ pub struct Variant {
|
||||
|
||||
#[test]
|
||||
fn variants_parse_test() {
|
||||
let vars = Variants::default().bounded("({0})");
|
||||
println!("final: {vars:?}")
|
||||
let vars = Rc::new(Variants::default().bounded("({{{0}}})"));
|
||||
let expected_vars = Rc::new(Variants(vec![Variant {
|
||||
bounded: true,
|
||||
elements: vec![
|
||||
FmtElement::String(Rc::new("({".to_string())),
|
||||
FmtElement::Sub { bounded: Some(false), slot: 0 },
|
||||
FmtElement::String(Rc::new("})".to_string())),
|
||||
],
|
||||
}]));
|
||||
assert_eq!(vars.as_ref(), expected_vars.as_ref());
|
||||
let unit = vars.units(["1".into()]);
|
||||
assert_eq!(unit, FmtUnit {
|
||||
subs: vec![FmtUnit {
|
||||
subs: vec![],
|
||||
variants: Rc::new(Variants(vec![Variant {
|
||||
bounded: true,
|
||||
elements: vec![FmtElement::String(Rc::new("1".to_string()))]
|
||||
}]))
|
||||
}],
|
||||
variants: expected_vars
|
||||
});
|
||||
let str = take_first(&unit, true);
|
||||
assert_eq!(str, "({1})");
|
||||
}
|
||||
|
||||
/// Represents a collection of formatting strings for the same set of parameters
|
||||
@@ -208,12 +231,27 @@ impl Variants {
|
||||
self.add(false, s);
|
||||
self
|
||||
}
|
||||
pub fn sequence(len: usize, delim: &str, seq_bnd: Option<bool>) -> Rc<Self> {
|
||||
let seq = Itertools::intersperse(
|
||||
FmtElement::sequence(len, seq_bnd).into_iter(),
|
||||
FmtElement::str(delim),
|
||||
pub fn sequence(
|
||||
mut self,
|
||||
len: usize,
|
||||
head: &str,
|
||||
delim: &str,
|
||||
tail: &str,
|
||||
seq_bnd: Option<bool>,
|
||||
) -> Self {
|
||||
let seq = chain!(
|
||||
[FmtElement::str(head)],
|
||||
Itertools::intersperse(
|
||||
FmtElement::sequence(len, seq_bnd).into_iter(),
|
||||
FmtElement::str(delim),
|
||||
),
|
||||
[FmtElement::str(tail)],
|
||||
);
|
||||
Rc::new(Variants(vec![Variant { bounded: true, elements: seq.collect_vec() }]))
|
||||
self.0.push(Variant { bounded: true, elements: seq.collect_vec() });
|
||||
self
|
||||
}
|
||||
pub fn units_own(self, subs: impl IntoIterator<Item = FmtUnit>) -> FmtUnit {
|
||||
FmtUnit::new(Rc::new(self), subs)
|
||||
}
|
||||
pub fn units(self: &Rc<Self>, subs: impl IntoIterator<Item = FmtUnit>) -> FmtUnit {
|
||||
FmtUnit::new(self.clone(), subs)
|
||||
|
||||
@@ -2,8 +2,9 @@
|
||||
|
||||
use std::fmt;
|
||||
use std::hash::Hash;
|
||||
use std::ops::Range;
|
||||
use std::ops::{Add, AddAssign, Range};
|
||||
|
||||
use futures::future::join_all;
|
||||
use trait_set::trait_set;
|
||||
|
||||
use crate::error::ErrPos;
|
||||
@@ -25,6 +26,7 @@ pub enum Pos {
|
||||
Gen(CodeGenInfo),
|
||||
/// Range and file
|
||||
SrcRange(SrcRange),
|
||||
Multi(Vec<Pos>),
|
||||
}
|
||||
impl Pos {
|
||||
pub fn pretty_print(&self, get_src: &mut impl GetSrc) -> String {
|
||||
@@ -39,6 +41,7 @@ impl Pos {
|
||||
match_mapping!(api, api::Location => Pos {
|
||||
None, Inherit, SlotTarget,
|
||||
Gen(cgi => CodeGenInfo::from_api(cgi, i).await),
|
||||
Multi(v => join_all(v.iter().map(|l| Pos::from_api(l, i))).await)
|
||||
} {
|
||||
api::Location::SourceRange(sr) => Self::SrcRange(SrcRange::from_api(sr, i).await)
|
||||
})
|
||||
@@ -47,6 +50,7 @@ impl Pos {
|
||||
match_mapping!(self, Pos => api::Location {
|
||||
None, Inherit, SlotTarget,
|
||||
Gen(cgi.to_api()),
|
||||
Multi(v => v.iter().map(|pos| pos.to_api()).collect()),
|
||||
} {
|
||||
Self::SrcRange(sr) => api::Location::SourceRange(sr.to_api()),
|
||||
})
|
||||
@@ -60,9 +64,36 @@ impl fmt::Display for Pos {
|
||||
Pos::None => f.write_str("N/A"),
|
||||
Pos::Gen(g) => write!(f, "{g}"),
|
||||
Pos::SrcRange(sr) => write!(f, "{sr}"),
|
||||
Pos::Multi(posv) => {
|
||||
write!(f, "{}", posv[0])?;
|
||||
for pos in posv {
|
||||
write!(f, "+{}", pos)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Add for Pos {
|
||||
type Output = Pos;
|
||||
fn add(self, rhs: Self) -> Self::Output {
|
||||
match (self, rhs) {
|
||||
(Pos::Multi(l), Pos::Multi(r)) => Pos::Multi(l.into_iter().chain(r).collect()),
|
||||
(Pos::None, any) => any,
|
||||
(any, Pos::None) => any,
|
||||
(Pos::Multi(v), single) => Pos::Multi(v.into_iter().chain([single]).collect()),
|
||||
(single, Pos::Multi(v)) => Pos::Multi([single].into_iter().chain(v).collect()),
|
||||
(l, r) => Pos::Multi(vec![l, r]),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl AddAssign for Pos {
|
||||
fn add_assign(&mut self, rhs: Self) {
|
||||
let mut tmp = Pos::None;
|
||||
std::mem::swap(&mut tmp, self);
|
||||
*self = tmp + rhs;
|
||||
}
|
||||
}
|
||||
|
||||
/// Exact source code location. Includes where the code was loaded from, what
|
||||
/// the original source code was, and a byte range.
|
||||
@@ -77,7 +108,7 @@ impl SrcRange {
|
||||
}
|
||||
/// Create a dud [SourceRange] for testing. Its value is unspecified and
|
||||
/// volatile.
|
||||
pub async fn mock(i: &Interner) -> Self { Self { range: 0..1, path: sym!(test; i).await } }
|
||||
pub async fn mock(i: &Interner) -> Self { Self { range: 0..1, path: sym!(test; i) } }
|
||||
/// Path the source text was loaded from
|
||||
pub fn path(&self) -> Sym { self.path.clone() }
|
||||
/// Byte range
|
||||
|
||||
@@ -311,7 +311,7 @@ impl NameLike for VName {}
|
||||
/// cloning the token.
|
||||
#[macro_export]
|
||||
macro_rules! sym {
|
||||
($seg1:tt $( :: $seg:tt)* ; $i:expr) => { async {
|
||||
($seg1:tt $( :: $seg:tt)* ; $i:expr) => {
|
||||
$crate::name::Sym::from_tok(
|
||||
$i.i(&[
|
||||
$i.i(stringify!($seg1)).await
|
||||
@@ -319,9 +319,7 @@ macro_rules! sym {
|
||||
])
|
||||
.await
|
||||
).unwrap()
|
||||
}
|
||||
};
|
||||
(@NAME $seg:tt) => {}
|
||||
}
|
||||
|
||||
/// Create a [VName] literal.
|
||||
@@ -329,12 +327,12 @@ macro_rules! sym {
|
||||
/// The components are interned much like in [sym].
|
||||
#[macro_export]
|
||||
macro_rules! vname {
|
||||
($seg1:tt $( :: $seg:tt)* ; $i:expr) => { async {
|
||||
($seg1:tt $( :: $seg:tt)* ; $i:expr) => {
|
||||
$crate::name::VName::new([
|
||||
$i.i(stringify!($seg1)).await
|
||||
$( , $i.i(stringify!($seg)).await )*
|
||||
]).unwrap()
|
||||
} };
|
||||
};
|
||||
}
|
||||
|
||||
/// Create a [VPath] literal.
|
||||
@@ -342,12 +340,12 @@ macro_rules! vname {
|
||||
/// The components are interned much like in [sym].
|
||||
#[macro_export]
|
||||
macro_rules! vpath {
|
||||
($seg1:tt $( :: $seg:tt)+ ; $i:expr) => { async {
|
||||
($seg1:tt $( :: $seg:tt)+ ; $i:expr) => {
|
||||
$crate::name::VPath(vec![
|
||||
$i.i(stringify!($seg1)).await
|
||||
$( , $i.i(stringify!($seg)).await )+
|
||||
])
|
||||
} };
|
||||
};
|
||||
() => {
|
||||
$crate::name::VPath(vec![])
|
||||
}
|
||||
@@ -367,7 +365,7 @@ mod test {
|
||||
fn recur() {
|
||||
spin_on(async {
|
||||
let i = Interner::new_master();
|
||||
let myname = vname!(foo::bar; i).await;
|
||||
let myname = vname!(foo::bar; i);
|
||||
let _borrowed_slice: &[Tok<String>] = myname.borrow();
|
||||
let _deref_pathslice: &[Tok<String>] = &myname;
|
||||
let _as_slice_out: &[Tok<String>] = myname.as_slice();
|
||||
@@ -379,15 +377,15 @@ mod test {
|
||||
spin_on(async {
|
||||
let i = Interner::new_master();
|
||||
assert_eq!(
|
||||
sym!(foo::bar::baz; i).await,
|
||||
sym!(foo::bar::baz; i),
|
||||
Sym::new([i.i("foo").await, i.i("bar").await, i.i("baz").await], &i).await.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
vname!(foo::bar::baz; i).await,
|
||||
vname!(foo::bar::baz; i),
|
||||
VName::new([i.i("foo").await, i.i("bar").await, i.i("baz").await]).unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
vpath!(foo::bar::baz; i).await,
|
||||
vpath!(foo::bar::baz; i),
|
||||
VPath::new([i.i("foo").await, i.i("bar").await, i.i("baz").await])
|
||||
);
|
||||
})
|
||||
|
||||
@@ -307,7 +307,7 @@ pub async fn ttv_fmt<'a: 'b, 'b>(
|
||||
ttv: impl IntoIterator<Item = &'b TokTree<impl ExprRepr + 'a, impl ExtraTok + 'a>>,
|
||||
c: &(impl FmtCtx + ?Sized),
|
||||
) -> FmtUnit {
|
||||
FmtUnit::sequence(" ", None, join_all(ttv.into_iter().map(|t| t.print(c))).await)
|
||||
FmtUnit::sequence("", " ", "", None, join_all(ttv.into_iter().map(|t| t.print(c))).await)
|
||||
}
|
||||
|
||||
pub fn indent(s: &str) -> String { s.replace("\n", "\n ") }
|
||||
|
||||
@@ -31,6 +31,7 @@ ordered-float = "5.0.0"
|
||||
pastey = "0.1.1"
|
||||
some_executor = "0.6.1"
|
||||
substack = "1.1.1"
|
||||
task-local = "0.1.0"
|
||||
tokio = { version = "1.47.1", optional = true, features = [] }
|
||||
tokio-util = { version = "0.7.16", optional = true, features = ["compat"] }
|
||||
|
||||
|
||||
@@ -12,21 +12,20 @@ use futures::future::LocalBoxFuture;
|
||||
use futures::{AsyncRead, AsyncWrite, FutureExt, StreamExt, stream};
|
||||
use orchid_api_derive::Coding;
|
||||
use orchid_api_traits::{Coding, Decode, Encode, Request, enc_vec};
|
||||
use orchid_base::clone;
|
||||
use orchid_base::error::{OrcErrv, OrcRes, mk_errv, mk_errv_floating};
|
||||
use orchid_base::format::{FmtCtx, FmtUnit, Format};
|
||||
use orchid_base::interner::Interner;
|
||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, fmt};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::reqnot::Requester;
|
||||
use trait_set::trait_set;
|
||||
|
||||
use crate::api;
|
||||
use crate::context::{ctx, i};
|
||||
use crate::conv::ToExpr;
|
||||
// use crate::error::{ProjectError, ProjectResult};
|
||||
use crate::expr::{Expr, ExprData, ExprHandle, ExprKind};
|
||||
use crate::gen_expr::GExpr;
|
||||
use crate::system::{DynSystemCard, SysCtx, atom_info_for, downcast_atom};
|
||||
use crate::system::{DynSystemCard, atom_info_for, downcast_atom};
|
||||
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||
pub struct AtomTypeId(pub NonZeroU32);
|
||||
@@ -91,19 +90,18 @@ pub struct ForeignAtom {
|
||||
}
|
||||
impl ForeignAtom {
|
||||
pub fn pos(&self) -> Pos { self.pos.clone() }
|
||||
pub fn ctx(&self) -> &SysCtx { &self.expr.ctx }
|
||||
pub fn ex(self) -> Expr {
|
||||
let (handle, pos) = (self.expr.clone(), self.pos.clone());
|
||||
let data = ExprData { pos, kind: ExprKind::Atom(ForeignAtom { ..self }) };
|
||||
Expr::new(handle, data)
|
||||
Expr::from_data(handle, data)
|
||||
}
|
||||
pub(crate) fn new(handle: Rc<ExprHandle>, atom: api::Atom, pos: Pos) -> Self {
|
||||
ForeignAtom { atom, expr: handle, pos }
|
||||
}
|
||||
pub async fn request<M: AtomMethod>(&self, m: M) -> Option<M::Response> {
|
||||
let rep = (self.ctx().reqnot().request(api::Fwd(
|
||||
let rep = (ctx().reqnot().request(api::Fwd(
|
||||
self.atom.clone(),
|
||||
Sym::parse(M::NAME, self.ctx().i()).await.unwrap().tok().to_api(),
|
||||
Sym::parse(M::NAME, &i()).await.unwrap().tok().to_api(),
|
||||
enc_vec(&m).await,
|
||||
)))
|
||||
.await?;
|
||||
@@ -121,40 +119,38 @@ impl fmt::Debug for ForeignAtom {
|
||||
}
|
||||
impl Format for ForeignAtom {
|
||||
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
FmtUnit::from_api(&self.ctx().reqnot().request(api::ExtAtomPrint(self.atom.clone())).await)
|
||||
FmtUnit::from_api(&ctx().reqnot().request(api::ExtAtomPrint(self.atom.clone())).await)
|
||||
}
|
||||
}
|
||||
impl ToExpr for ForeignAtom {
|
||||
async fn to_expr(self) -> GExpr { self.ex().to_expr().await }
|
||||
async fn to_gen(self) -> GExpr { self.ex().to_gen().await }
|
||||
}
|
||||
|
||||
pub struct NotTypAtom {
|
||||
pub pos: Pos,
|
||||
pub expr: Expr,
|
||||
pub typ: Box<dyn AtomDynfo>,
|
||||
pub ctx: SysCtx,
|
||||
}
|
||||
impl NotTypAtom {
|
||||
pub async fn mk_err(&self) -> OrcErrv {
|
||||
mk_errv(
|
||||
self.ctx.i().i("Not the expected type").await,
|
||||
format!("This expression is not a {}", self.typ.name()),
|
||||
i().i("Not the expected type").await,
|
||||
format!("The expression {} is not a {}", fmt(&self.expr, &i()).await, self.typ.name()),
|
||||
[self.pos.clone()],
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AtomMethod: Request {
|
||||
pub trait AtomMethod: Request + Coding {
|
||||
const NAME: &str;
|
||||
}
|
||||
pub trait Supports<M: AtomMethod>: AtomCard {
|
||||
fn handle(&self, ctx: SysCtx, req: M) -> impl Future<Output = <M as Request>::Response>;
|
||||
fn handle(&self, req: M) -> impl Future<Output = <M as Request>::Response>;
|
||||
}
|
||||
|
||||
trait_set! {
|
||||
trait AtomReqCb<A> = for<'a> Fn(
|
||||
&'a A,
|
||||
SysCtx,
|
||||
Pin<&'a mut dyn AsyncRead>,
|
||||
Pin<&'a mut dyn AsyncWrite>,
|
||||
) -> LocalBoxFuture<'a, ()>
|
||||
@@ -171,24 +167,18 @@ impl<A: AtomCard> MethodSetBuilder<A> {
|
||||
assert!(!M::NAME.is_empty(), "AtomMethod::NAME cannoot be empty");
|
||||
self.handlers.push((
|
||||
M::NAME,
|
||||
Rc::new(
|
||||
move |a: &A, ctx: SysCtx, req: Pin<&mut dyn AsyncRead>, rep: Pin<&mut dyn AsyncWrite>| {
|
||||
async { Supports::<M>::handle(a, ctx, M::decode(req).await).await.encode(rep).await }
|
||||
.boxed_local()
|
||||
},
|
||||
),
|
||||
Rc::new(move |a: &A, req: Pin<&mut dyn AsyncRead>, rep: Pin<&mut dyn AsyncWrite>| {
|
||||
async { Supports::<M>::handle(a, M::decode(req).await).await.encode(rep).await }
|
||||
.boxed_local()
|
||||
}),
|
||||
));
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn pack(&self, ctx: SysCtx) -> MethodSet<A> {
|
||||
pub async fn pack(&self) -> MethodSet<A> {
|
||||
MethodSet {
|
||||
handlers: stream::iter(self.handlers.iter())
|
||||
.then(|(k, v)| {
|
||||
clone!(ctx; async move {
|
||||
(Sym::parse(k, ctx.i()).await.unwrap(), v.clone())
|
||||
})
|
||||
})
|
||||
.then(async |(k, v)| (Sym::parse(k, &i()).await.unwrap(), v.clone()))
|
||||
.collect()
|
||||
.await,
|
||||
}
|
||||
@@ -202,7 +192,6 @@ impl<A: AtomCard> MethodSet<A> {
|
||||
pub(crate) async fn dispatch<'a>(
|
||||
&'a self,
|
||||
atom: &'a A,
|
||||
ctx: SysCtx,
|
||||
key: Sym,
|
||||
req: Pin<&'a mut dyn AsyncRead>,
|
||||
rep: Pin<&'a mut dyn AsyncWrite>,
|
||||
@@ -210,7 +199,7 @@ impl<A: AtomCard> MethodSet<A> {
|
||||
match self.handlers.get(&key) {
|
||||
None => false,
|
||||
Some(handler) => {
|
||||
handler(atom, ctx, req, rep).await;
|
||||
handler(atom, req, rep).await;
|
||||
true
|
||||
},
|
||||
}
|
||||
@@ -228,33 +217,23 @@ pub struct TAtom<A: AtomicFeatures> {
|
||||
}
|
||||
impl<A: AtomicFeatures> TAtom<A> {
|
||||
pub fn ex(&self) -> Expr { self.untyped.clone().ex() }
|
||||
pub fn ctx(&self) -> &SysCtx { self.untyped.ctx() }
|
||||
pub fn i(&self) -> &Interner { self.ctx().i() }
|
||||
pub fn pos(&self) -> Pos { self.untyped.pos() }
|
||||
pub async fn downcast(expr: Rc<ExprHandle>) -> Result<Self, NotTypAtom> {
|
||||
match Expr::from_handle(expr).atom().await {
|
||||
Err(expr) => Err(NotTypAtom {
|
||||
ctx: expr.handle().get_ctx(),
|
||||
pos: expr.data().await.pos.clone(),
|
||||
expr,
|
||||
typ: Box::new(A::info()),
|
||||
}),
|
||||
Err(expr) =>
|
||||
Err(NotTypAtom { pos: expr.data().await.pos.clone(), expr, typ: Box::new(A::info()) }),
|
||||
Ok(atm) => match downcast_atom::<A>(atm).await {
|
||||
Ok(tatom) => Ok(tatom),
|
||||
Err(fa) => Err(NotTypAtom {
|
||||
pos: fa.pos.clone(),
|
||||
ctx: fa.ctx().clone(),
|
||||
expr: fa.ex(),
|
||||
typ: Box::new(A::info()),
|
||||
}),
|
||||
Err(fa) => Err(NotTypAtom { pos: fa.pos.clone(), expr: fa.ex(), typ: Box::new(A::info()) }),
|
||||
},
|
||||
}
|
||||
}
|
||||
pub async fn request<M: AtomMethod>(&self, req: M) -> M::Response
|
||||
where A: Supports<M> {
|
||||
M::Response::decode(Pin::new(
|
||||
&mut &(self.untyped.ctx().reqnot().request(api::Fwd(
|
||||
&mut &(ctx().reqnot().request(api::Fwd(
|
||||
self.untyped.atom.clone(),
|
||||
Sym::parse(M::NAME, self.untyped.ctx().i()).await.unwrap().tok().to_api(),
|
||||
Sym::parse(M::NAME, &i()).await.unwrap().tok().to_api(),
|
||||
enc_vec(&req).await,
|
||||
)))
|
||||
.await
|
||||
@@ -268,13 +247,15 @@ impl<A: AtomicFeatures> Deref for TAtom<A> {
|
||||
fn deref(&self) -> &Self::Target { &self.value }
|
||||
}
|
||||
impl<A: AtomicFeatures> ToExpr for TAtom<A> {
|
||||
async fn to_expr(self) -> GExpr { self.untyped.to_expr().await }
|
||||
async fn to_gen(self) -> GExpr { self.untyped.to_gen().await }
|
||||
}
|
||||
impl<A: AtomicFeatures> Format for TAtom<A> {
|
||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
self.untyped.print(c).await
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AtomCtx<'a>(pub &'a [u8], pub Option<api::AtomId>, pub SysCtx);
|
||||
impl FmtCtx for AtomCtx<'_> {
|
||||
fn i(&self) -> &Interner { self.2.i() }
|
||||
}
|
||||
pub struct AtomCtx<'a>(pub &'a [u8], pub Option<api::AtomId>);
|
||||
|
||||
pub trait AtomDynfo: 'static {
|
||||
fn tid(&self) -> TypeId;
|
||||
@@ -296,24 +277,19 @@ pub trait AtomDynfo: 'static {
|
||||
ctx: AtomCtx<'a>,
|
||||
write: Pin<&'b mut dyn AsyncWrite>,
|
||||
) -> LocalBoxFuture<'a, Option<Vec<Expr>>>;
|
||||
fn deserialize<'a>(
|
||||
&'a self,
|
||||
ctx: SysCtx,
|
||||
data: &'a [u8],
|
||||
refs: &'a [Expr],
|
||||
) -> LocalBoxFuture<'a, api::Atom>;
|
||||
fn deserialize<'a>(&'a self, data: &'a [u8], refs: &'a [Expr]) -> LocalBoxFuture<'a, api::Atom>;
|
||||
fn drop<'a>(&'a self, ctx: AtomCtx<'a>) -> LocalBoxFuture<'a, ()>;
|
||||
}
|
||||
|
||||
trait_set! {
|
||||
pub trait AtomFactoryFn = FnOnce(SysCtx) -> LocalBoxFuture<'static, api::Atom> + DynClone;
|
||||
pub trait AtomFactoryFn = FnOnce() -> LocalBoxFuture<'static, api::Atom> + DynClone;
|
||||
}
|
||||
pub struct AtomFactory(Box<dyn AtomFactoryFn>);
|
||||
impl AtomFactory {
|
||||
pub fn new(f: impl AsyncFnOnce(SysCtx) -> api::Atom + Clone + 'static) -> Self {
|
||||
Self(Box::new(|ctx| f(ctx).boxed_local()))
|
||||
pub fn new(f: impl AsyncFnOnce() -> api::Atom + Clone + 'static) -> Self {
|
||||
Self(Box::new(|| f().boxed_local()))
|
||||
}
|
||||
pub async fn build(self, ctx: SysCtx) -> api::Atom { (self.0)(ctx).await }
|
||||
pub async fn build(self) -> api::Atom { (self.0)().await }
|
||||
}
|
||||
impl Clone for AtomFactory {
|
||||
fn clone(&self) -> Self { AtomFactory(clone_box(&*self.0)) }
|
||||
@@ -330,10 +306,10 @@ impl Format for AtomFactory {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn err_not_callable(i: &Interner) -> OrcErrv {
|
||||
mk_errv_floating(i.i("This atom is not callable").await, "Attempted to apply value as function")
|
||||
pub async fn err_not_callable() -> OrcErrv {
|
||||
mk_errv_floating(i().i("This atom is not callable").await, "Attempted to apply value as function")
|
||||
}
|
||||
|
||||
pub async fn err_not_command(i: &Interner) -> OrcErrv {
|
||||
mk_errv_floating(i.i("This atom is not a command").await, "Settled on an inactionable value")
|
||||
pub async fn err_not_command() -> OrcErrv {
|
||||
mk_errv_floating(i().i("This atom is not a command").await, "Settled on an inactionable value")
|
||||
}
|
||||
|
||||
@@ -25,26 +25,26 @@ use crate::atom::{
|
||||
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
|
||||
MethodSetBuilder, TAtom, err_not_callable, err_not_command, get_info,
|
||||
};
|
||||
use crate::context::{SysCtxEntry, ctx, i};
|
||||
use crate::expr::Expr;
|
||||
use crate::gen_expr::{GExpr, bot};
|
||||
use crate::system::{SysCtx, SysCtxEntry};
|
||||
use crate::system_ctor::CtedObj;
|
||||
|
||||
pub struct OwnedVariant;
|
||||
impl AtomicVariant for OwnedVariant {}
|
||||
impl<A: OwnedAtom + Atomic<Variant = OwnedVariant>> AtomicFeaturesImpl<OwnedVariant> for A {
|
||||
fn _factory(self) -> AtomFactory {
|
||||
AtomFactory::new(async move |ctx| {
|
||||
let serial =
|
||||
ctx.get_or_default::<ObjStore>().next_id.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
|
||||
AtomFactory::new(async move || {
|
||||
let serial = ctx()
|
||||
.get_or_default::<ObjStore>()
|
||||
.next_id
|
||||
.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
|
||||
let atom_id = api::AtomId(NonZero::new(serial + 1).unwrap());
|
||||
let (typ_id, _) = get_info::<A>(ctx.get::<CtedObj>().inst().card());
|
||||
let (typ_id, _) = get_info::<A>(ctx().get::<CtedObj>().inst().card());
|
||||
let mut data = enc_vec(&typ_id).await;
|
||||
self.encode(Pin::<&mut Vec<u8>>::new(&mut data)).await;
|
||||
let g = ctx.get_or_default::<ObjStore>().objects.read().await;
|
||||
g.insert(atom_id, Box::new(self));
|
||||
std::mem::drop(g);
|
||||
api::Atom { drop: Some(atom_id), data: api::AtomData(data), owner: ctx.sys_id() }
|
||||
ctx().get_or_default::<ObjStore>().objects.read().await.insert(atom_id, Box::new(self));
|
||||
api::Atom { drop: Some(atom_id), data: api::AtomData(data), owner: ctx().sys_id() }
|
||||
})
|
||||
}
|
||||
fn _info() -> Self::_Info { OwnedAtomDynfo { msbuild: A::reg_reqs(), ms: OnceCell::new() } }
|
||||
@@ -58,8 +58,8 @@ pub(crate) struct AtomReadGuard<'a> {
|
||||
guard: RwLockReadGuard<MemoMap<api::AtomId, Box<dyn DynOwnedAtom>>>,
|
||||
}
|
||||
impl<'a> AtomReadGuard<'a> {
|
||||
async fn new(id: api::AtomId, ctx: &'a SysCtx) -> Self {
|
||||
let guard = ctx.get_or_default::<ObjStore>().objects.read().await;
|
||||
async fn new(id: api::AtomId) -> Self {
|
||||
let guard = ctx().get_or_default::<ObjStore>().objects.read().await;
|
||||
if guard.get(&id).is_none() {
|
||||
panic!("Received invalid atom ID: {id:?}");
|
||||
}
|
||||
@@ -72,8 +72,8 @@ impl Deref for AtomReadGuard<'_> {
|
||||
}
|
||||
|
||||
/// Remove an atom from the store
|
||||
pub(crate) async fn take_atom(id: api::AtomId, ctx: &SysCtx) -> Box<dyn DynOwnedAtom> {
|
||||
let mut g = ctx.get_or_default::<ObjStore>().objects.write().await;
|
||||
pub(crate) async fn take_atom(id: api::AtomId) -> Box<dyn DynOwnedAtom> {
|
||||
let mut g = ctx().get_or_default::<ObjStore>().objects.write().await;
|
||||
g.remove(&id).unwrap_or_else(|| panic!("Received invalid atom ID: {}", id.0))
|
||||
}
|
||||
|
||||
@@ -89,64 +89,53 @@ impl<T: OwnedAtom> AtomDynfo for OwnedAtomDynfo<T> {
|
||||
Box::new(<T as AtomCard>::Data::decode(Pin::new(&mut &data[..])).await) as Box<dyn Any>
|
||||
})
|
||||
}
|
||||
fn call(&self, AtomCtx(_, id, ctx): AtomCtx, arg: Expr) -> LocalBoxFuture<'_, GExpr> {
|
||||
Box::pin(async move { take_atom(id.unwrap(), &ctx).await.dyn_call(arg).await })
|
||||
fn call(&self, AtomCtx(_, id): AtomCtx, arg: Expr) -> LocalBoxFuture<'_, GExpr> {
|
||||
Box::pin(async move { take_atom(id.unwrap()).await.dyn_call(arg).await })
|
||||
}
|
||||
fn call_ref<'a>(
|
||||
&'a self,
|
||||
AtomCtx(_, id, ctx): AtomCtx<'a>,
|
||||
arg: Expr,
|
||||
) -> LocalBoxFuture<'a, GExpr> {
|
||||
Box::pin(async move { AtomReadGuard::new(id.unwrap(), &ctx).await.dyn_call_ref(arg).await })
|
||||
fn call_ref<'a>(&'a self, AtomCtx(_, id): AtomCtx<'a>, arg: Expr) -> LocalBoxFuture<'a, GExpr> {
|
||||
Box::pin(async move { AtomReadGuard::new(id.unwrap()).await.dyn_call_ref(arg).await })
|
||||
}
|
||||
fn print(&self, AtomCtx(_, id, ctx): AtomCtx<'_>) -> LocalBoxFuture<'_, FmtUnit> {
|
||||
Box::pin(
|
||||
async move { AtomReadGuard::new(id.unwrap(), &ctx).await.dyn_print(ctx.clone()).await },
|
||||
)
|
||||
fn print(&self, AtomCtx(_, id): AtomCtx<'_>) -> LocalBoxFuture<'_, FmtUnit> {
|
||||
Box::pin(async move { AtomReadGuard::new(id.unwrap()).await.dyn_print().await })
|
||||
}
|
||||
fn handle_req<'a, 'b: 'a, 'c: 'a>(
|
||||
&'a self,
|
||||
AtomCtx(_, id, ctx): AtomCtx,
|
||||
AtomCtx(_, id): AtomCtx,
|
||||
key: Sym,
|
||||
req: Pin<&'b mut dyn AsyncRead>,
|
||||
rep: Pin<&'c mut dyn AsyncWrite>,
|
||||
) -> LocalBoxFuture<'a, bool> {
|
||||
Box::pin(async move {
|
||||
let a = AtomReadGuard::new(id.unwrap(), &ctx).await;
|
||||
let ms = self.ms.get_or_init(self.msbuild.pack(ctx.clone())).await;
|
||||
ms.dispatch(a.as_any_ref().downcast_ref().unwrap(), ctx.clone(), key, req, rep).await
|
||||
let a = AtomReadGuard::new(id.unwrap()).await;
|
||||
let ms = self.ms.get_or_init(self.msbuild.pack()).await;
|
||||
ms.dispatch(a.as_any_ref().downcast_ref().unwrap(), key, req, rep).await
|
||||
})
|
||||
}
|
||||
fn command<'a>(
|
||||
&'a self,
|
||||
AtomCtx(_, id, ctx): AtomCtx<'a>,
|
||||
AtomCtx(_, id): AtomCtx<'a>,
|
||||
) -> LocalBoxFuture<'a, OrcRes<Option<GExpr>>> {
|
||||
Box::pin(async move { take_atom(id.unwrap(), &ctx).await.dyn_command(ctx.clone()).await })
|
||||
Box::pin(async move { take_atom(id.unwrap()).await.dyn_command().await })
|
||||
}
|
||||
fn drop(&self, AtomCtx(_, id, ctx): AtomCtx) -> LocalBoxFuture<'_, ()> {
|
||||
Box::pin(async move { take_atom(id.unwrap(), &ctx).await.dyn_free(ctx.clone()).await })
|
||||
fn drop(&self, AtomCtx(_, id): AtomCtx) -> LocalBoxFuture<'_, ()> {
|
||||
Box::pin(async move { take_atom(id.unwrap()).await.dyn_free().await })
|
||||
}
|
||||
fn serialize<'a, 'b: 'a>(
|
||||
&'a self,
|
||||
AtomCtx(_, id, ctx): AtomCtx<'a>,
|
||||
AtomCtx(_, id): AtomCtx<'a>,
|
||||
mut write: Pin<&'b mut dyn AsyncWrite>,
|
||||
) -> LocalBoxFuture<'a, Option<Vec<Expr>>> {
|
||||
Box::pin(async move {
|
||||
let id = id.unwrap();
|
||||
id.encode(write.as_mut()).await;
|
||||
AtomReadGuard::new(id, &ctx).await.dyn_serialize(ctx.clone(), write).await
|
||||
AtomReadGuard::new(id).await.dyn_serialize(write).await
|
||||
})
|
||||
}
|
||||
fn deserialize<'a>(
|
||||
&'a self,
|
||||
ctx: SysCtx,
|
||||
data: &'a [u8],
|
||||
refs: &'a [Expr],
|
||||
) -> LocalBoxFuture<'a, api::Atom> {
|
||||
fn deserialize<'a>(&'a self, data: &'a [u8], refs: &'a [Expr]) -> LocalBoxFuture<'a, api::Atom> {
|
||||
Box::pin(async move {
|
||||
let refs = T::Refs::from_iter(refs.iter().cloned());
|
||||
let obj = T::deserialize(DeserCtxImpl(data, &ctx), refs).await;
|
||||
obj._factory().build(ctx).await
|
||||
let obj = T::deserialize(DeserCtxImpl(data), refs).await;
|
||||
obj._factory().build().await
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -162,14 +151,12 @@ pub trait DeserializeCtx: Sized {
|
||||
t
|
||||
}
|
||||
}
|
||||
fn sys(&self) -> SysCtx;
|
||||
}
|
||||
|
||||
struct DeserCtxImpl<'a>(&'a [u8], &'a SysCtx);
|
||||
struct DeserCtxImpl<'a>(&'a [u8]);
|
||||
impl DeserializeCtx for DeserCtxImpl<'_> {
|
||||
async fn read<T: Decode>(&mut self) -> T { T::decode(Pin::new(&mut self.0)).await }
|
||||
fn is_empty(&self) -> bool { self.0.is_empty() }
|
||||
fn sys(&self) -> SysCtx { self.1.clone() }
|
||||
}
|
||||
|
||||
pub trait RefSet {
|
||||
@@ -220,22 +207,21 @@ pub trait OwnedAtom: Atomic<Variant = OwnedVariant> + Any + Clone + 'static {
|
||||
fn val(&self) -> impl Future<Output = Cow<'_, Self::Data>>;
|
||||
#[allow(unused_variables)]
|
||||
fn call_ref(&self, arg: Expr) -> impl Future<Output = GExpr> {
|
||||
async move { bot(err_not_callable(arg.ctx().i()).await) }
|
||||
async move { bot(err_not_callable().await) }
|
||||
}
|
||||
fn call(self, arg: Expr) -> impl Future<Output = GExpr> {
|
||||
async {
|
||||
let ctx = arg.ctx();
|
||||
let gcl = self.call_ref(arg).await;
|
||||
self.free(ctx).await;
|
||||
self.free().await;
|
||||
gcl
|
||||
}
|
||||
}
|
||||
#[allow(unused_variables)]
|
||||
fn command(self, ctx: SysCtx) -> impl Future<Output = OrcRes<Option<GExpr>>> {
|
||||
async move { Err(err_not_command(ctx.i()).await) }
|
||||
fn command(self) -> impl Future<Output = OrcRes<Option<GExpr>>> {
|
||||
async move { Err(err_not_command().await) }
|
||||
}
|
||||
#[allow(unused_variables)]
|
||||
fn free(self, ctx: SysCtx) -> impl Future<Output = ()> { async {} }
|
||||
fn free(self) -> impl Future<Output = ()> { async {} }
|
||||
#[allow(unused_variables)]
|
||||
fn print_atom<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> impl Future<Output = FmtUnit> {
|
||||
async { format!("OwnedAtom({})", type_name::<Self>()).into() }
|
||||
@@ -243,14 +229,13 @@ pub trait OwnedAtom: Atomic<Variant = OwnedVariant> + Any + Clone + 'static {
|
||||
#[allow(unused_variables)]
|
||||
fn serialize(
|
||||
&self,
|
||||
ctx: SysCtx,
|
||||
write: Pin<&mut (impl AsyncWrite + ?Sized)>,
|
||||
) -> impl Future<Output = Self::Refs> {
|
||||
assert_serializable::<Self>();
|
||||
async { panic!("Either implement serialize or set Refs to Never for {}", type_name::<Self>()) }
|
||||
}
|
||||
#[allow(unused_variables)]
|
||||
fn deserialize(ctx: impl DeserializeCtx, refs: Self::Refs) -> impl Future<Output = Self> {
|
||||
fn deserialize(dctx: impl DeserializeCtx, refs: Self::Refs) -> impl Future<Output = Self> {
|
||||
assert_serializable::<Self>();
|
||||
async {
|
||||
panic!("Either implement deserialize or set Refs to Never for {}", type_name::<Self>())
|
||||
@@ -269,12 +254,11 @@ pub trait DynOwnedAtom: DynClone + 'static {
|
||||
fn encode<'a>(&'a self, buffer: Pin<&'a mut dyn AsyncWrite>) -> LocalBoxFuture<'a, ()>;
|
||||
fn dyn_call_ref(&self, arg: Expr) -> LocalBoxFuture<'_, GExpr>;
|
||||
fn dyn_call(self: Box<Self>, arg: Expr) -> LocalBoxFuture<'static, GExpr>;
|
||||
fn dyn_command(self: Box<Self>, ctx: SysCtx) -> LocalBoxFuture<'static, OrcRes<Option<GExpr>>>;
|
||||
fn dyn_free(self: Box<Self>, ctx: SysCtx) -> LocalBoxFuture<'static, ()>;
|
||||
fn dyn_print(&self, ctx: SysCtx) -> LocalBoxFuture<'_, FmtUnit>;
|
||||
fn dyn_command(self: Box<Self>) -> LocalBoxFuture<'static, OrcRes<Option<GExpr>>>;
|
||||
fn dyn_free(self: Box<Self>) -> LocalBoxFuture<'static, ()>;
|
||||
fn dyn_print(&self) -> LocalBoxFuture<'_, FmtUnit>;
|
||||
fn dyn_serialize<'a>(
|
||||
&'a self,
|
||||
ctx: SysCtx,
|
||||
sink: Pin<&'a mut dyn AsyncWrite>,
|
||||
) -> LocalBoxFuture<'a, Option<Vec<Expr>>>;
|
||||
}
|
||||
@@ -290,23 +274,20 @@ impl<T: OwnedAtom> DynOwnedAtom for T {
|
||||
fn dyn_call(self: Box<Self>, arg: Expr) -> LocalBoxFuture<'static, GExpr> {
|
||||
self.call(arg).boxed_local()
|
||||
}
|
||||
fn dyn_command(self: Box<Self>, ctx: SysCtx) -> LocalBoxFuture<'static, OrcRes<Option<GExpr>>> {
|
||||
self.command(ctx).boxed_local()
|
||||
fn dyn_command(self: Box<Self>) -> LocalBoxFuture<'static, OrcRes<Option<GExpr>>> {
|
||||
self.command().boxed_local()
|
||||
}
|
||||
fn dyn_free(self: Box<Self>, ctx: SysCtx) -> LocalBoxFuture<'static, ()> {
|
||||
self.free(ctx).boxed_local()
|
||||
}
|
||||
fn dyn_print(&self, ctx: SysCtx) -> LocalBoxFuture<'_, FmtUnit> {
|
||||
async move { self.print_atom(&FmtCtxImpl { i: ctx.i() }).await }.boxed_local()
|
||||
fn dyn_free(self: Box<Self>) -> LocalBoxFuture<'static, ()> { self.free().boxed_local() }
|
||||
fn dyn_print(&self) -> LocalBoxFuture<'_, FmtUnit> {
|
||||
async move { self.print_atom(&FmtCtxImpl { i: &i() }).await }.boxed_local()
|
||||
}
|
||||
fn dyn_serialize<'a>(
|
||||
&'a self,
|
||||
ctx: SysCtx,
|
||||
sink: Pin<&'a mut dyn AsyncWrite>,
|
||||
) -> LocalBoxFuture<'a, Option<Vec<Expr>>> {
|
||||
match TypeId::of::<Never>() == TypeId::of::<<Self as OwnedAtom>::Refs>() {
|
||||
true => ready(None).boxed_local(),
|
||||
false => async { Some(self.serialize(ctx, sink).await.to_vec()) }.boxed_local(),
|
||||
false => async { Some(self.serialize(sink).await.to_vec()) }.boxed_local(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -318,16 +299,16 @@ pub(crate) struct ObjStore {
|
||||
}
|
||||
impl SysCtxEntry for ObjStore {}
|
||||
|
||||
pub async fn own<A: OwnedAtom>(typ: TAtom<A>) -> A {
|
||||
let ctx = typ.untyped.ctx();
|
||||
let g = ctx.get_or_default::<ObjStore>().objects.read().await;
|
||||
pub async fn own<A: OwnedAtom>(typ: &TAtom<A>) -> A {
|
||||
let g = ctx().get_or_default::<ObjStore>().objects.read().await;
|
||||
let atom_id = typ.untyped.atom.drop.expect("Owned atoms always have a drop ID");
|
||||
let dyn_atom =
|
||||
g.get(&atom_id).expect("Atom ID invalid; atom type probably not owned by this crate");
|
||||
dyn_atom.as_any_ref().downcast_ref().cloned().expect("The ID should imply a type as well")
|
||||
}
|
||||
|
||||
pub async fn debug_print_obj_store(ctx: &SysCtx, show_atoms: bool) {
|
||||
pub async fn debug_print_obj_store(show_atoms: bool) {
|
||||
let ctx = ctx();
|
||||
let store = ctx.get_or_default::<ObjStore>();
|
||||
let keys = store.objects.read().await.keys().cloned().collect_vec();
|
||||
let mut message = "Atoms in store:".to_string();
|
||||
@@ -342,7 +323,7 @@ pub async fn debug_print_obj_store(ctx: &SysCtx, show_atoms: bool) {
|
||||
};
|
||||
let atom = clone_box(&**atom);
|
||||
std::mem::drop(g);
|
||||
message += &format!("\n{k:?} -> {}", take_first(&atom.dyn_print(ctx.clone()).await, true));
|
||||
message += &format!("\n{k:?} -> {}", take_first(&atom.dyn_print().await, true));
|
||||
}
|
||||
}
|
||||
eprintln!("{message}")
|
||||
|
||||
@@ -15,20 +15,20 @@ use crate::atom::{
|
||||
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
|
||||
MethodSetBuilder, err_not_callable, err_not_command, get_info,
|
||||
};
|
||||
use crate::context::ctx;
|
||||
use crate::expr::Expr;
|
||||
use crate::gen_expr::{GExpr, bot};
|
||||
use crate::system::SysCtx;
|
||||
use crate::system_ctor::CtedObj;
|
||||
|
||||
pub struct ThinVariant;
|
||||
impl AtomicVariant for ThinVariant {}
|
||||
impl<A: ThinAtom + Atomic<Variant = ThinVariant>> AtomicFeaturesImpl<ThinVariant> for A {
|
||||
fn _factory(self) -> AtomFactory {
|
||||
AtomFactory::new(async move |ctx| {
|
||||
let (id, _) = get_info::<A>(ctx.get::<CtedObj>().inst().card());
|
||||
AtomFactory::new(async move || {
|
||||
let (id, _) = get_info::<A>(ctx().get::<CtedObj>().inst().card());
|
||||
let mut buf = enc_vec(&id).await;
|
||||
self.encode(Pin::new(&mut buf)).await;
|
||||
api::Atom { drop: None, data: api::AtomData(buf), owner: ctx.sys_id() }
|
||||
api::Atom { drop: None, data: api::AtomData(buf), owner: ctx().sys_id() }
|
||||
})
|
||||
}
|
||||
fn _info() -> Self::_Info { ThinAtomDynfo { msbuild: Self::reg_reqs(), ms: OnceCell::new() } }
|
||||
@@ -40,8 +40,8 @@ pub struct ThinAtomDynfo<T: ThinAtom> {
|
||||
ms: OnceCell<MethodSet<T>>,
|
||||
}
|
||||
impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
|
||||
fn print<'a>(&self, AtomCtx(buf, _, ctx): AtomCtx<'a>) -> LocalBoxFuture<'a, FmtUnit> {
|
||||
Box::pin(async move { T::decode(Pin::new(&mut &buf[..])).await.print(ctx).await })
|
||||
fn print<'a>(&self, AtomCtx(buf, _): AtomCtx<'a>) -> LocalBoxFuture<'a, FmtUnit> {
|
||||
Box::pin(async move { T::decode(Pin::new(&mut &buf[..])).await.print().await })
|
||||
}
|
||||
fn tid(&self) -> TypeId { TypeId::of::<T>() }
|
||||
fn name(&self) -> &'static str { type_name::<T>() }
|
||||
@@ -56,21 +56,21 @@ impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
|
||||
}
|
||||
fn handle_req<'a, 'm1: 'a, 'm2: 'a>(
|
||||
&'a self,
|
||||
AtomCtx(buf, _, sys): AtomCtx<'a>,
|
||||
AtomCtx(buf, _): AtomCtx<'a>,
|
||||
key: Sym,
|
||||
req: Pin<&'m1 mut dyn AsyncRead>,
|
||||
rep: Pin<&'m2 mut dyn AsyncWrite>,
|
||||
) -> LocalBoxFuture<'a, bool> {
|
||||
Box::pin(async move {
|
||||
let ms = self.ms.get_or_init(self.msbuild.pack(sys.clone())).await;
|
||||
ms.dispatch(&T::decode(Pin::new(&mut &buf[..])).await, sys, key, req, rep).await
|
||||
let ms = self.ms.get_or_init(self.msbuild.pack()).await;
|
||||
ms.dispatch(&T::decode(Pin::new(&mut &buf[..])).await, key, req, rep).await
|
||||
})
|
||||
}
|
||||
fn command<'a>(
|
||||
&'a self,
|
||||
AtomCtx(buf, _, ctx): AtomCtx<'a>,
|
||||
AtomCtx(buf, _): AtomCtx<'a>,
|
||||
) -> LocalBoxFuture<'a, OrcRes<Option<GExpr>>> {
|
||||
async move { T::decode(Pin::new(&mut &buf[..])).await.command(ctx).await }.boxed_local()
|
||||
async move { T::decode(Pin::new(&mut &buf[..])).await.command().await }.boxed_local()
|
||||
}
|
||||
fn serialize<'a, 'b: 'a>(
|
||||
&'a self,
|
||||
@@ -82,19 +82,14 @@ impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
|
||||
Some(Vec::new())
|
||||
})
|
||||
}
|
||||
fn deserialize<'a>(
|
||||
&'a self,
|
||||
ctx: SysCtx,
|
||||
data: &'a [u8],
|
||||
refs: &'a [Expr],
|
||||
) -> LocalBoxFuture<'a, api::Atom> {
|
||||
fn deserialize<'a>(&'a self, data: &'a [u8], refs: &'a [Expr]) -> LocalBoxFuture<'a, api::Atom> {
|
||||
assert!(refs.is_empty(), "Refs found when deserializing thin atom");
|
||||
Box::pin(async { T::decode(Pin::new(&mut &data[..])).await._factory().build(ctx).await })
|
||||
Box::pin(async { T::decode(Pin::new(&mut &data[..])).await._factory().build().await })
|
||||
}
|
||||
fn drop<'a>(&'a self, AtomCtx(buf, _, ctx): AtomCtx<'a>) -> LocalBoxFuture<'a, ()> {
|
||||
fn drop<'a>(&'a self, AtomCtx(buf, _): AtomCtx<'a>) -> LocalBoxFuture<'a, ()> {
|
||||
Box::pin(async move {
|
||||
let string_self = T::decode(Pin::new(&mut &buf[..])).await.print(ctx.clone()).await;
|
||||
writeln!(ctx.logger(), "Received drop signal for non-drop atom {string_self:?}");
|
||||
let string_self = T::decode(Pin::new(&mut &buf[..])).await.print().await;
|
||||
writeln!(ctx().logger(), "Received drop signal for non-drop atom {string_self:?}");
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -104,14 +99,14 @@ pub trait ThinAtom:
|
||||
{
|
||||
#[allow(unused_variables)]
|
||||
fn call(&self, arg: Expr) -> impl Future<Output = GExpr> {
|
||||
async move { bot(err_not_callable(arg.ctx().i()).await) }
|
||||
async move { bot(err_not_callable().await) }
|
||||
}
|
||||
#[allow(unused_variables)]
|
||||
fn command(&self, ctx: SysCtx) -> impl Future<Output = OrcRes<Option<GExpr>>> {
|
||||
async move { Err(err_not_command(ctx.i()).await) }
|
||||
fn command(&self) -> impl Future<Output = OrcRes<Option<GExpr>>> {
|
||||
async move { Err(err_not_command().await) }
|
||||
}
|
||||
#[allow(unused_variables)]
|
||||
fn print(&self, ctx: SysCtx) -> impl Future<Output = FmtUnit> {
|
||||
fn print(&self) -> impl Future<Output = FmtUnit> {
|
||||
async { format!("ThinAtom({})", type_name::<Self>()).into() }
|
||||
}
|
||||
}
|
||||
|
||||
90
orchid-extension/src/context.rs
Normal file
90
orchid-extension/src/context.rs
Normal file
@@ -0,0 +1,90 @@
|
||||
use std::any::{Any, TypeId, type_name};
|
||||
use std::fmt;
|
||||
use std::num::NonZero;
|
||||
use std::rc::Rc;
|
||||
|
||||
use memo_map::MemoMap;
|
||||
use orchid_base::builtin::Spawner;
|
||||
use orchid_base::interner::Interner;
|
||||
use orchid_base::logging::Logger;
|
||||
use orchid_base::reqnot::ReqNot;
|
||||
use task_local::task_local;
|
||||
|
||||
use crate::api;
|
||||
use crate::system_ctor::CtedObj;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SysCtx(Rc<MemoMap<TypeId, Box<dyn Any>>>);
|
||||
impl SysCtx {
|
||||
pub fn new(
|
||||
id: api::SysId,
|
||||
i: Interner,
|
||||
reqnot: ReqNot<api::ExtMsgSet>,
|
||||
spawner: Spawner,
|
||||
logger: Logger,
|
||||
cted: CtedObj,
|
||||
) -> Self {
|
||||
let this = Self(Rc::new(MemoMap::new()));
|
||||
this.add(id).add(i).add(reqnot).add(spawner).add(logger).add(cted);
|
||||
this
|
||||
}
|
||||
pub fn add<T: SysCtxEntry>(&self, t: T) -> &Self {
|
||||
assert!(self.0.insert(TypeId::of::<T>(), Box::new(t)), "Key already exists");
|
||||
self
|
||||
}
|
||||
pub fn get_or_insert<T: SysCtxEntry>(&self, f: impl FnOnce() -> T) -> &T {
|
||||
(self.0.get_or_insert_owned(TypeId::of::<T>(), || Box::new(f())).downcast_ref())
|
||||
.expect("Keyed by TypeId")
|
||||
}
|
||||
pub fn get_or_default<T: SysCtxEntry + Default>(&self) -> &T { self.get_or_insert(T::default) }
|
||||
pub fn try_get<T: SysCtxEntry>(&self) -> Option<&T> {
|
||||
Some(self.0.get(&TypeId::of::<T>())?.downcast_ref().expect("Keyed by TypeId"))
|
||||
}
|
||||
pub fn get<T: SysCtxEntry>(&self) -> &T {
|
||||
self.try_get().unwrap_or_else(|| panic!("Context {} missing", type_name::<T>()))
|
||||
}
|
||||
/// Shorthand to get the messaging link
|
||||
pub fn reqnot(&self) -> &ReqNot<api::ExtMsgSet> { self.get::<ReqNot<api::ExtMsgSet>>() }
|
||||
/// Shorthand to get the system ID
|
||||
pub fn sys_id(&self) -> api::SysId { *self.get::<api::SysId>() }
|
||||
/// Spawn a task that will eventually be executed asynchronously
|
||||
pub fn spawn(&self, f: impl Future<Output = ()> + 'static) {
|
||||
(self.get::<Spawner>())(Box::pin(CTX.scope(self.clone(), f)))
|
||||
}
|
||||
/// Shorthand to get the logger
|
||||
pub fn logger(&self) -> &Logger { self.get::<Logger>() }
|
||||
/// Shorthand to get the constructed system object
|
||||
pub fn cted(&self) -> &CtedObj { self.get::<CtedObj>() }
|
||||
}
|
||||
impl fmt::Debug for SysCtx {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "SysCtx({:?})", self.sys_id())
|
||||
}
|
||||
}
|
||||
pub trait SysCtxEntry: 'static + Sized {}
|
||||
impl SysCtxEntry for api::SysId {}
|
||||
impl SysCtxEntry for ReqNot<api::ExtMsgSet> {}
|
||||
impl SysCtxEntry for Spawner {}
|
||||
impl SysCtxEntry for CtedObj {}
|
||||
impl SysCtxEntry for Logger {}
|
||||
impl SysCtxEntry for Interner {}
|
||||
|
||||
task_local! {
|
||||
static CTX: SysCtx;
|
||||
}
|
||||
|
||||
pub async fn with_ctx<F: Future>(ctx: SysCtx, f: F) -> F::Output { CTX.scope(ctx, f).await }
|
||||
pub fn ctx() -> SysCtx { CTX.get() }
|
||||
|
||||
/// Shorthand to get the [Interner] instance
|
||||
pub fn i() -> Interner { ctx().get::<Interner>().clone() }
|
||||
|
||||
pub fn mock_ctx() -> SysCtx {
|
||||
let ctx = SysCtx(Rc::default());
|
||||
ctx
|
||||
.add(Logger::new(api::LogStrategy::StdErr))
|
||||
.add(Interner::new_master())
|
||||
.add::<Spawner>(Rc::new(|_| panic!("Cannot fork in test environment")))
|
||||
.add(api::SysId(NonZero::<u16>::MIN));
|
||||
ctx
|
||||
}
|
||||
@@ -4,14 +4,13 @@ use std::pin::Pin;
|
||||
use dyn_clone::DynClone;
|
||||
use never::Never;
|
||||
use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
|
||||
use orchid_base::interner::Interner;
|
||||
use orchid_base::location::Pos;
|
||||
use trait_set::trait_set;
|
||||
|
||||
use crate::atom::{AtomicFeatures, ForeignAtom, TAtom, ToAtom};
|
||||
use crate::context::i;
|
||||
use crate::expr::Expr;
|
||||
use crate::gen_expr::{GExpr, atom, bot};
|
||||
use crate::system::{SysCtx, downcast_atom};
|
||||
|
||||
pub trait TryFromExpr: Sized {
|
||||
fn try_from_expr(expr: Expr) -> impl Future<Output = OrcRes<Self>>;
|
||||
@@ -27,18 +26,14 @@ impl<T: TryFromExpr, U: TryFromExpr> TryFromExpr for (T, U) {
|
||||
}
|
||||
}
|
||||
|
||||
async fn err_not_atom(pos: Pos, i: &Interner) -> OrcErrv {
|
||||
mk_errv(i.i("Expected an atom").await, "This expression is not an atom", [pos])
|
||||
}
|
||||
|
||||
async fn err_type(pos: Pos, i: &Interner) -> OrcErrv {
|
||||
mk_errv(i.i("Type error").await, "The atom is a different type than expected", [pos])
|
||||
async fn err_not_atom(pos: Pos) -> OrcErrv {
|
||||
mk_errv(i().i("Expected an atom").await, "This expression is not an atom", [pos])
|
||||
}
|
||||
|
||||
impl TryFromExpr for ForeignAtom {
|
||||
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
||||
match expr.atom().await {
|
||||
Err(ex) => Err(err_not_atom(ex.data().await.pos.clone(), ex.ctx().i()).await),
|
||||
Err(ex) => Err(err_not_atom(ex.data().await.pos.clone()).await),
|
||||
Ok(f) => Ok(f),
|
||||
}
|
||||
}
|
||||
@@ -47,27 +42,34 @@ impl TryFromExpr for ForeignAtom {
|
||||
impl<A: AtomicFeatures> TryFromExpr for TAtom<A> {
|
||||
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
||||
let f = ForeignAtom::try_from_expr(expr).await?;
|
||||
match downcast_atom::<A>(f).await {
|
||||
match f.clone().downcast::<A>().await {
|
||||
Ok(a) => Ok(a),
|
||||
Err(f) => Err(err_type(f.pos(), f.ctx().i()).await),
|
||||
Err(e) => Err(e.mk_err().await),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFromExpr for SysCtx {
|
||||
async fn try_from_expr(expr: Expr) -> OrcRes<Self> { Ok(expr.ctx()) }
|
||||
}
|
||||
|
||||
pub trait ToExpr {
|
||||
fn to_expr(self) -> impl Future<Output = GExpr>;
|
||||
fn to_gen(self) -> impl Future<Output = GExpr>;
|
||||
fn to_expr(self) -> impl Future<Output = Expr>
|
||||
where Self: Sized {
|
||||
async { self.to_gen().await.create().await }
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ToExprDyn {
|
||||
fn to_expr_dyn<'a>(self: Box<Self>) -> Pin<Box<dyn Future<Output = GExpr> + 'a>>
|
||||
fn to_gen_dyn<'a>(self: Box<Self>) -> Pin<Box<dyn Future<Output = GExpr> + 'a>>
|
||||
where Self: 'a;
|
||||
|
||||
fn to_expr_dyn<'a>(self: Box<Self>) -> Pin<Box<dyn Future<Output = Expr> + 'a>>
|
||||
where Self: 'a;
|
||||
}
|
||||
impl<T: ToExpr> ToExprDyn for T {
|
||||
fn to_expr_dyn<'a>(self: Box<Self>) -> Pin<Box<dyn Future<Output = GExpr> + 'a>>
|
||||
fn to_gen_dyn<'a>(self: Box<Self>) -> Pin<Box<dyn Future<Output = GExpr> + 'a>>
|
||||
where Self: 'a {
|
||||
Box::pin(self.to_gen())
|
||||
}
|
||||
fn to_expr_dyn<'a>(self: Box<Self>) -> Pin<Box<dyn Future<Output = Expr> + 'a>>
|
||||
where Self: 'a {
|
||||
Box::pin(self.to_expr())
|
||||
}
|
||||
@@ -76,35 +78,39 @@ trait_set! {
|
||||
pub trait ClonableToExprDyn = ToExprDyn + DynClone;
|
||||
}
|
||||
impl ToExpr for Box<dyn ToExprDyn> {
|
||||
async fn to_expr(self) -> GExpr { self.to_expr_dyn().await }
|
||||
async fn to_gen(self) -> GExpr { self.to_gen_dyn().await }
|
||||
async fn to_expr(self) -> Expr { self.to_expr_dyn().await }
|
||||
}
|
||||
impl ToExpr for Box<dyn ClonableToExprDyn> {
|
||||
async fn to_expr(self) -> GExpr { self.to_expr_dyn().await }
|
||||
async fn to_gen(self) -> GExpr { self.to_gen_dyn().await }
|
||||
async fn to_expr(self) -> Expr { self.to_expr_dyn().await }
|
||||
}
|
||||
impl Clone for Box<dyn ClonableToExprDyn> {
|
||||
fn clone(&self) -> Self { dyn_clone::clone_box(&**self) }
|
||||
}
|
||||
|
||||
impl ToExpr for GExpr {
|
||||
async fn to_expr(self) -> GExpr { self }
|
||||
async fn to_gen(self) -> GExpr { self }
|
||||
async fn to_expr(self) -> Expr { self.create().await }
|
||||
}
|
||||
impl ToExpr for Expr {
|
||||
async fn to_expr(self) -> GExpr { self.slot() }
|
||||
async fn to_gen(self) -> GExpr { self.slot() }
|
||||
async fn to_expr(self) -> Expr { self }
|
||||
}
|
||||
|
||||
impl<T: ToExpr> ToExpr for OrcRes<T> {
|
||||
async fn to_expr(self) -> GExpr {
|
||||
async fn to_gen(self) -> GExpr {
|
||||
match self {
|
||||
Err(e) => bot(e),
|
||||
Ok(t) => t.to_expr().await,
|
||||
Ok(t) => t.to_gen().await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<A: ToAtom> ToExpr for A {
|
||||
async fn to_expr(self) -> GExpr { atom(self) }
|
||||
async fn to_gen(self) -> GExpr { atom(self) }
|
||||
}
|
||||
|
||||
impl ToExpr for Never {
|
||||
async fn to_expr(self) -> GExpr { match self {} }
|
||||
async fn to_gen(self) -> GExpr { match self {} }
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ use futures::stream::{self, LocalBoxStream};
|
||||
use futures::{FutureExt, SinkExt, StreamExt};
|
||||
use never::Never;
|
||||
use orchid_base::error::OrcRes;
|
||||
use orchid_base::format::{FmtCtx, FmtUnit};
|
||||
|
||||
use crate::atom::Atomic;
|
||||
use crate::atom_owned::{OwnedAtom, OwnedVariant};
|
||||
@@ -23,7 +22,6 @@ enum Command {
|
||||
}
|
||||
|
||||
struct BuilderCoroutineData {
|
||||
name: Option<String>,
|
||||
receiver: Mutex<LocalBoxStream<'static, Command>>,
|
||||
}
|
||||
|
||||
@@ -36,11 +34,14 @@ impl BuilderCoroutine {
|
||||
None => panic!("Before the stream ends, we should have gotten a Halt"),
|
||||
Some(Command::Halt(expr)) => expr,
|
||||
Some(Command::Execute(expr, reply)) => call(
|
||||
lambda(0, seq([arg(0)], call(Replier { reply, builder: self }.to_expr().await, [arg(0)]))),
|
||||
lambda(0, [seq(
|
||||
[arg(0)],
|
||||
call(Replier { reply, builder: self }.to_gen().await, [arg(0)]),
|
||||
)]),
|
||||
[expr],
|
||||
),
|
||||
Some(Command::Register(expr, reply)) =>
|
||||
call(Replier { reply, builder: self }.to_expr().await, [expr]),
|
||||
call(Replier { reply, builder: self }.to_gen().await, [expr]),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -62,23 +63,13 @@ impl OwnedAtom for Replier {
|
||||
std::mem::drop(self.reply);
|
||||
self.builder.run().await
|
||||
}
|
||||
async fn print_atom<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
match &self.builder.0.name {
|
||||
None => "BuilderCoroutine".into(),
|
||||
Some(name) => format!("BuilderCoroutine({name})").into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn exec<R: ToExpr>(
|
||||
debug: impl AsRef<str>,
|
||||
f: impl for<'a> AsyncFnOnce(ExecHandle<'a>) -> R + 'static,
|
||||
) -> GExpr {
|
||||
pub async fn exec<R: ToExpr>(f: impl for<'a> AsyncFnOnce(ExecHandle<'a>) -> R + 'static) -> GExpr {
|
||||
let (cmd_snd, cmd_recv) = channel(0);
|
||||
let halt = async { Command::Halt(f(ExecHandle(cmd_snd, PhantomData)).await.to_expr().await) }
|
||||
let halt = async { Command::Halt(f(ExecHandle(cmd_snd, PhantomData)).await.to_gen().await) }
|
||||
.into_stream();
|
||||
let coro = BuilderCoroutine(Rc::new(BuilderCoroutineData {
|
||||
name: Some(debug.as_ref().to_string()),
|
||||
receiver: Mutex::new(stream::select(halt, cmd_recv).boxed_local()),
|
||||
}));
|
||||
coro.run().await
|
||||
@@ -90,12 +81,12 @@ pub struct ExecHandle<'a>(Sender<Command>, PhantomData<&'a ()>);
|
||||
impl ExecHandle<'_> {
|
||||
pub async fn exec<T: TryFromExpr>(&mut self, val: impl ToExpr) -> OrcRes<T> {
|
||||
let (reply_snd, mut reply_recv) = channel(1);
|
||||
self.0.send(Command::Execute(val.to_expr().await, reply_snd)).await.expect(WEIRD_DROP_ERR);
|
||||
self.0.send(Command::Execute(val.to_gen().await, reply_snd)).await.expect(WEIRD_DROP_ERR);
|
||||
T::try_from_expr(reply_recv.next().await.expect(WEIRD_DROP_ERR)).await
|
||||
}
|
||||
pub async fn register(&mut self, val: impl ToExpr) -> Expr {
|
||||
let (reply_snd, mut reply_recv) = channel(1);
|
||||
self.0.send(Command::Register(val.to_expr().await, reply_snd)).await.expect(WEIRD_DROP_ERR);
|
||||
self.0.send(Command::Register(val.to_gen().await, reply_snd)).await.expect(WEIRD_DROP_ERR);
|
||||
reply_recv.next().await.expect(WEIRD_DROP_ERR)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,10 +29,11 @@ use trait_set::trait_set;
|
||||
use crate::api;
|
||||
use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId};
|
||||
use crate::atom_owned::take_atom;
|
||||
use crate::context::{SysCtx, ctx, i, with_ctx};
|
||||
use crate::expr::{BorrowedExprStore, Expr, ExprHandle};
|
||||
use crate::lexer::{LexContext, ekey_cascade, ekey_not_applicable};
|
||||
use crate::parser::{PTokTree, ParsCtx, get_const, linev_into_api};
|
||||
use crate::system::{SysCtx, atom_by_idx};
|
||||
use crate::system::atom_by_idx;
|
||||
use crate::system_ctor::{CtedObj, DynSystemCtor};
|
||||
use crate::tree::{LazyMemberFactory, TreeIntoApiCtxImpl};
|
||||
|
||||
@@ -62,7 +63,6 @@ pub struct SystemRecord {
|
||||
trait_set! {
|
||||
pub trait WithAtomRecordCallback<'a, T> = AsyncFnOnce(
|
||||
Box<dyn AtomDynfo>,
|
||||
SysCtx,
|
||||
AtomTypeId,
|
||||
&'a [u8]
|
||||
) -> T
|
||||
@@ -78,7 +78,7 @@ pub async fn with_atom_record<'a, F: Future<Output = SysCtx>, T>(
|
||||
let inst = ctx.get::<CtedObj>().inst();
|
||||
let id = AtomTypeId::decode(Pin::new(&mut data)).await;
|
||||
let atom_record = atom_by_idx(inst.card(), id.clone()).expect("Atom ID reserved");
|
||||
cb(atom_record, ctx, id, data).await
|
||||
with_ctx(ctx, async move { cb(atom_record, id, data).await }).await
|
||||
}
|
||||
|
||||
pub struct ExtensionOwner {
|
||||
@@ -157,7 +157,8 @@ pub fn extension_init(
|
||||
clone!(logger, get_ctx, init_ctx, systems_weak, interner_weak, decls, msg_logger);
|
||||
async move {
|
||||
let interner_cell = interner_weak.upgrade().expect("Interner dropped before request");
|
||||
let i = interner_cell.borrow().clone().expect("Request arrived before interner set");
|
||||
let interner =
|
||||
interner_cell.borrow().clone().expect("Request arrived before interner set");
|
||||
if !matches!(req, api::HostExtReq::AtomReq(api::AtomReq::AtomPrint(_))) {
|
||||
writeln!(msg_logger, "{} extension received request {req:?}", data.name);
|
||||
}
|
||||
@@ -169,153 +170,156 @@ pub fn extension_init(
|
||||
}
|
||||
hand.handle(&sys_drop, &()).await
|
||||
},
|
||||
api::HostExtReq::AtomDrop(atom_drop @ api::AtomDrop(sys_id, atom)) => {
|
||||
let ctx = get_ctx(sys_id).await;
|
||||
take_atom(atom, &ctx).await.dyn_free(ctx.clone()).await;
|
||||
hand.handle(&atom_drop, &()).await
|
||||
},
|
||||
api::HostExtReq::AtomDrop(atom_drop @ api::AtomDrop(sys_id, atom)) =>
|
||||
with_ctx(get_ctx(sys_id).await, async move {
|
||||
take_atom(atom).await.dyn_free().await;
|
||||
hand.handle(&atom_drop, &()).await
|
||||
})
|
||||
.await,
|
||||
api::HostExtReq::Ping(ping @ api::Ping) => hand.handle(&ping, &()).await,
|
||||
api::HostExtReq::Sweep(sweep @ api::Sweep) =>
|
||||
hand.handle(&sweep, &i.sweep_replica().await).await,
|
||||
hand.handle(&sweep, &interner.sweep_replica().await).await,
|
||||
api::HostExtReq::SysReq(api::SysReq::NewSystem(new_sys)) => {
|
||||
let (sys_id, _) = (decls.iter().enumerate().find(|(_, s)| s.id == new_sys.system))
|
||||
.expect("NewSystem call received for invalid system");
|
||||
let cted = data.systems[sys_id].new_system(&new_sys);
|
||||
let lex_filter =
|
||||
cted.inst().dyn_lexers().iter().fold(api::CharFilter(vec![]), |cf, lx| {
|
||||
char_filter_union(&cf, &mk_char_filter(lx.char_filter().iter().cloned()))
|
||||
});
|
||||
let lazy_members = Mutex::new(HashMap::new());
|
||||
let ctx = init_ctx(new_sys.id, cted.clone(), hand.reqnot()).await;
|
||||
let const_root = stream::iter(cted.inst().dyn_env())
|
||||
.then(|mem| {
|
||||
let lazy_mems = &lazy_members;
|
||||
clone!(i, ctx; async move {
|
||||
let name = i.i(&mem.name).await;
|
||||
let mut tia_ctx = TreeIntoApiCtxImpl {
|
||||
lazy_members: &mut *lazy_mems.lock().await,
|
||||
sys: ctx,
|
||||
basepath: &[],
|
||||
path: Substack::Bottom.push(name.clone()),
|
||||
};
|
||||
(name.to_api(), mem.kind.into_api(&mut tia_ctx).await)
|
||||
with_ctx(init_ctx(new_sys.id, cted.clone(), hand.reqnot()).await, async move {
|
||||
let lex_filter =
|
||||
cted.inst().dyn_lexers().iter().fold(api::CharFilter(vec![]), |cf, lx| {
|
||||
char_filter_union(&cf, &mk_char_filter(lx.char_filter().iter().cloned()))
|
||||
});
|
||||
let lazy_members = Mutex::new(HashMap::new());
|
||||
let const_root = stream::iter(cted.inst().dyn_env().await)
|
||||
.then(|mem| {
|
||||
let lazy_mems = &lazy_members;
|
||||
async move {
|
||||
let name = i().i(&mem.name).await;
|
||||
let mut tia_ctx = TreeIntoApiCtxImpl {
|
||||
lazy_members: &mut *lazy_mems.lock().await,
|
||||
basepath: &[],
|
||||
path: Substack::Bottom.push(name.clone()),
|
||||
};
|
||||
(name.to_api(), mem.kind.into_api(&mut tia_ctx).await)
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
.collect()
|
||||
.await;
|
||||
let prelude =
|
||||
cted.inst().dyn_prelude().await.iter().map(|sym| sym.to_api()).collect();
|
||||
let record = SystemRecord { ctx: ctx(), lazy_members };
|
||||
let systems = systems_weak.upgrade().expect("System constructed during shutdown");
|
||||
systems.write().await.insert(new_sys.id, record);
|
||||
let line_types = join_all(
|
||||
(cted.inst().dyn_parsers().iter())
|
||||
.map(|p| async { interner.i(p.line_head()).await.to_api() }),
|
||||
)
|
||||
.await;
|
||||
let prelude =
|
||||
cted.inst().dyn_prelude(&i).await.iter().map(|sym| sym.to_api()).collect();
|
||||
let record = SystemRecord { ctx, lazy_members };
|
||||
let systems = systems_weak.upgrade().expect("System constructed during shutdown");
|
||||
systems.write().await.insert(new_sys.id, record);
|
||||
let line_types = join_all(
|
||||
(cted.inst().dyn_parsers().iter())
|
||||
.map(|p| async { i.i(p.line_head()).await.to_api() }),
|
||||
)
|
||||
.await;
|
||||
let response = api::NewSystemResponse { lex_filter, const_root, line_types, prelude };
|
||||
hand.handle(&new_sys, &response).await
|
||||
},
|
||||
api::HostExtReq::GetMember(get_tree @ api::GetMember(sys_id, tree_id)) => {
|
||||
let sys_ctx = get_ctx(sys_id).await;
|
||||
let systems = systems_weak.upgrade().expect("Member queried during shutdown");
|
||||
let systems_g = systems.read().await;
|
||||
let mut lazy_members =
|
||||
systems_g.get(&sys_id).expect("System not found").lazy_members.lock().await;
|
||||
let (path, cb) = match lazy_members.insert(tree_id, MemberRecord::Res) {
|
||||
None => panic!("Tree for ID not found"),
|
||||
Some(MemberRecord::Res) => panic!("This tree has already been transmitted"),
|
||||
Some(MemberRecord::Gen(path, cb)) => (path, cb),
|
||||
};
|
||||
let tree = cb.build(Sym::new(path.clone(), &i).await.unwrap(), sys_ctx.clone()).await;
|
||||
let mut tia_ctx = TreeIntoApiCtxImpl {
|
||||
sys: sys_ctx,
|
||||
path: Substack::Bottom,
|
||||
basepath: &path,
|
||||
lazy_members: &mut lazy_members,
|
||||
};
|
||||
hand.handle(&get_tree, &tree.into_api(&mut tia_ctx).await).await
|
||||
let response =
|
||||
api::NewSystemResponse { lex_filter, const_root, line_types, prelude };
|
||||
hand.handle(&new_sys, &response).await
|
||||
})
|
||||
.await
|
||||
},
|
||||
api::HostExtReq::GetMember(get_tree @ api::GetMember(sys_id, tree_id)) =>
|
||||
with_ctx(get_ctx(sys_id).await, async move {
|
||||
let systems = systems_weak.upgrade().expect("Member queried during shutdown");
|
||||
let systems_g = systems.read().await;
|
||||
let mut lazy_members =
|
||||
systems_g.get(&sys_id).expect("System not found").lazy_members.lock().await;
|
||||
let (path, cb) = match lazy_members.insert(tree_id, MemberRecord::Res) {
|
||||
None => panic!("Tree for ID not found"),
|
||||
Some(MemberRecord::Res) => panic!("This tree has already been transmitted"),
|
||||
Some(MemberRecord::Gen(path, cb)) => (path, cb),
|
||||
};
|
||||
let tree = cb.build(Sym::new(path.clone(), &interner).await.unwrap()).await;
|
||||
let mut tia_ctx = TreeIntoApiCtxImpl {
|
||||
path: Substack::Bottom,
|
||||
basepath: &path,
|
||||
lazy_members: &mut lazy_members,
|
||||
};
|
||||
hand.handle(&get_tree, &tree.into_api(&mut tia_ctx).await).await
|
||||
})
|
||||
.await,
|
||||
api::HostExtReq::SysReq(api::SysReq::SysFwded(fwd)) => {
|
||||
let api::SysFwded(sys_id, payload) = fwd;
|
||||
let ctx = get_ctx(sys_id).await;
|
||||
let sys = ctx.cted().inst();
|
||||
sys.dyn_request(hand, payload).await
|
||||
with_ctx(ctx.clone(), async move {
|
||||
let sys = ctx.cted().inst();
|
||||
sys.dyn_request(hand, payload).await
|
||||
})
|
||||
.await
|
||||
},
|
||||
api::HostExtReq::LexExpr(lex @ api::LexExpr { sys, src, text, pos, id }) => {
|
||||
let mut sys_ctx = get_ctx(sys).await;
|
||||
let text = Tok::from_api(text, &i).await;
|
||||
let src = Sym::from_api(src, sys_ctx.i()).await;
|
||||
let rep = Reporter::new();
|
||||
let expr_store = BorrowedExprStore::new();
|
||||
let trigger_char = text.chars().nth(pos as usize).unwrap();
|
||||
let ekey_na = ekey_not_applicable(&i).await;
|
||||
let ekey_cascade = ekey_cascade(&i).await;
|
||||
let lexers = sys_ctx.cted().inst().dyn_lexers();
|
||||
for lx in lexers.iter().filter(|l| char_filter_match(l.char_filter(), trigger_char)) {
|
||||
let ctx = LexContext {
|
||||
id,
|
||||
pos,
|
||||
text: &text,
|
||||
src: src.clone(),
|
||||
ctx: sys_ctx.clone(),
|
||||
rep: &rep,
|
||||
exprs: &expr_store,
|
||||
};
|
||||
match lx.lex(&text[pos as usize..], &ctx).await {
|
||||
Err(e) if e.any(|e| *e == ekey_na) => continue,
|
||||
Err(e) => {
|
||||
let eopt = e.keep_only(|e| *e != ekey_cascade).map(|e| Err(e.to_api()));
|
||||
expr_store.dispose().await;
|
||||
return hand.handle(&lex, &eopt).await;
|
||||
},
|
||||
Ok((s, expr)) => {
|
||||
let expr = expr.into_api(&mut (), &mut sys_ctx).await;
|
||||
let pos = (text.len() - s.len()) as u32;
|
||||
expr_store.dispose().await;
|
||||
return hand.handle(&lex, &Some(Ok(api::LexedExpr { pos, expr }))).await;
|
||||
},
|
||||
api::HostExtReq::LexExpr(lex @ api::LexExpr { sys, src, text, pos, id }) =>
|
||||
with_ctx(get_ctx(sys).await, async move {
|
||||
let text = Tok::from_api(text, &i()).await;
|
||||
let src = Sym::from_api(src, &i()).await;
|
||||
let rep = Reporter::new();
|
||||
let expr_store = BorrowedExprStore::new();
|
||||
let trigger_char = text.chars().nth(pos as usize).unwrap();
|
||||
let ekey_na = ekey_not_applicable().await;
|
||||
let ekey_cascade = ekey_cascade().await;
|
||||
let lexers = ctx().cted().inst().dyn_lexers();
|
||||
for lx in lexers.iter().filter(|l| char_filter_match(l.char_filter(), trigger_char))
|
||||
{
|
||||
let ctx = LexContext::new(&expr_store, &text, id, pos, src.clone(), &rep);
|
||||
match lx.lex(&text[pos as usize..], &ctx).await {
|
||||
Err(e) if e.any(|e| *e == ekey_na) => continue,
|
||||
Err(e) => {
|
||||
let eopt = e.keep_only(|e| *e != ekey_cascade).map(|e| Err(e.to_api()));
|
||||
expr_store.dispose().await;
|
||||
return hand.handle(&lex, &eopt).await;
|
||||
},
|
||||
Ok((s, expr)) => {
|
||||
let expr = expr.into_api(&mut (), &mut ()).await;
|
||||
let pos = (text.len() - s.len()) as u32;
|
||||
expr_store.dispose().await;
|
||||
return hand.handle(&lex, &Some(Ok(api::LexedExpr { pos, expr }))).await;
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
writeln!(logger, "Got notified about n/a character '{trigger_char}'");
|
||||
expr_store.dispose().await;
|
||||
hand.handle(&lex, &None).await
|
||||
},
|
||||
writeln!(logger, "Got notified about n/a character '{trigger_char}'");
|
||||
expr_store.dispose().await;
|
||||
hand.handle(&lex, &None).await
|
||||
})
|
||||
.await,
|
||||
api::HostExtReq::ParseLine(pline) => {
|
||||
let api::ParseLine { module, src, exported, comments, sys, line, idx } = &pline;
|
||||
let ctx = get_ctx(*sys).await;
|
||||
let parsers = ctx.cted().inst().dyn_parsers();
|
||||
let src = Sym::from_api(*src, ctx.i()).await;
|
||||
let comments =
|
||||
join_all(comments.iter().map(|c| Comment::from_api(c, src.clone(), &i))).await;
|
||||
let expr_store = BorrowedExprStore::new();
|
||||
let mut from_api_ctx = (ctx.clone(), &expr_store);
|
||||
let line: Vec<PTokTree> =
|
||||
ttv_from_api(line, &mut from_api_ctx, &mut (), &src, &i).await;
|
||||
let snip = Snippet::new(line.first().expect("Empty line"), &line);
|
||||
let parser = parsers[*idx as usize];
|
||||
let module = Sym::from_api(*module, ctx.i()).await;
|
||||
let reporter = Reporter::new();
|
||||
let pctx = ParsCtx::new(ctx.clone(), module, &reporter);
|
||||
let parse_res = parser.parse(pctx, *exported, comments, snip).await;
|
||||
let o_line = match reporter.merge(parse_res) {
|
||||
Err(e) => Err(e.to_api()),
|
||||
Ok(t) => Ok(linev_into_api(t, ctx.clone()).await),
|
||||
};
|
||||
mem::drop(line);
|
||||
expr_store.dispose().await;
|
||||
hand.handle(&pline, &o_line).await
|
||||
},
|
||||
api::HostExtReq::FetchParsedConst(ref fpc @ api::FetchParsedConst(sys, id)) => {
|
||||
let ctx = get_ctx(sys).await;
|
||||
let cnst = get_const(id, ctx.clone()).await;
|
||||
hand.handle(fpc, &cnst.api_return(ctx).await).await
|
||||
with_ctx(get_ctx(*sys).await, async {
|
||||
let parsers = ctx().cted().inst().dyn_parsers();
|
||||
let src = Sym::from_api(*src, &i()).await;
|
||||
let comments =
|
||||
join_all(comments.iter().map(|c| Comment::from_api(c, src.clone(), &interner)))
|
||||
.await;
|
||||
let expr_store = BorrowedExprStore::new();
|
||||
let line: Vec<PTokTree> =
|
||||
ttv_from_api(line, &mut &expr_store, &mut (), &src, &i()).await;
|
||||
let snip = Snippet::new(line.first().expect("Empty line"), &line);
|
||||
let parser = parsers[*idx as usize];
|
||||
let module = Sym::from_api(*module, &i()).await;
|
||||
let reporter = Reporter::new();
|
||||
let pctx = ParsCtx::new(module, &reporter);
|
||||
let parse_res = parser.parse(pctx, *exported, comments, snip).await;
|
||||
let o_line = match reporter.merge(parse_res) {
|
||||
Err(e) => Err(e.to_api()),
|
||||
Ok(t) => Ok(linev_into_api(t).await),
|
||||
};
|
||||
mem::drop(line);
|
||||
expr_store.dispose().await;
|
||||
hand.handle(&pline, &o_line).await
|
||||
})
|
||||
.await
|
||||
},
|
||||
api::HostExtReq::FetchParsedConst(ref fpc @ api::FetchParsedConst(sys, id)) =>
|
||||
with_ctx(get_ctx(sys).await, async move {
|
||||
let cnst = get_const(id).await;
|
||||
hand.handle(fpc, &cnst.serialize().await).await
|
||||
})
|
||||
.await,
|
||||
api::HostExtReq::AtomReq(atom_req) => {
|
||||
let atom = atom_req.get_atom();
|
||||
let atom_req = atom_req.clone();
|
||||
with_atom_record(&get_ctx, atom, async move |nfo, ctx, id, buf| {
|
||||
let actx = AtomCtx(buf, atom.drop, ctx.clone());
|
||||
with_atom_record(&get_ctx, atom, async move |nfo, id, buf| {
|
||||
let actx = AtomCtx(buf, atom.drop);
|
||||
match &atom_req {
|
||||
api::AtomReq::SerializeAtom(ser) => {
|
||||
let mut buf = enc_vec(&id).await;
|
||||
@@ -334,7 +338,7 @@ pub fn extension_init(
|
||||
api::AtomReq::Fwded(fwded) => {
|
||||
let api::Fwded(_, key, payload) = &fwded;
|
||||
let mut reply = Vec::new();
|
||||
let key = Sym::from_api(*key, &i).await;
|
||||
let key = Sym::from_api(*key, &interner).await;
|
||||
let some = nfo
|
||||
.handle_req(
|
||||
actx,
|
||||
@@ -347,18 +351,18 @@ pub fn extension_init(
|
||||
},
|
||||
api::AtomReq::CallRef(call @ api::CallRef(_, arg)) => {
|
||||
let expr_store = BorrowedExprStore::new();
|
||||
let expr_handle = ExprHandle::borrowed(ctx.clone(), *arg, &expr_store);
|
||||
let expr_handle = ExprHandle::borrowed(*arg, &expr_store);
|
||||
let ret = nfo.call_ref(actx, Expr::from_handle(expr_handle.clone())).await;
|
||||
let api_expr = ret.api_return(ctx.clone()).await;
|
||||
let api_expr = ret.serialize().await;
|
||||
mem::drop(expr_handle);
|
||||
expr_store.dispose().await;
|
||||
hand.handle(call, &api_expr).await
|
||||
},
|
||||
api::AtomReq::FinalCall(call @ api::FinalCall(_, arg)) => {
|
||||
let expr_store = BorrowedExprStore::new();
|
||||
let expr_handle = ExprHandle::borrowed(ctx.clone(), *arg, &expr_store);
|
||||
let expr_handle = ExprHandle::borrowed(*arg, &expr_store);
|
||||
let ret = nfo.call(actx, Expr::from_handle(expr_handle.clone())).await;
|
||||
let api_expr = ret.api_return(ctx.clone()).await;
|
||||
let api_expr = ret.serialize().await;
|
||||
mem::drop(expr_handle);
|
||||
expr_store.dispose().await;
|
||||
hand.handle(call, &api_expr).await
|
||||
@@ -368,7 +372,7 @@ pub fn extension_init(
|
||||
Ok(opt) => match opt {
|
||||
None => hand.handle(cmd, &Ok(api::NextStep::Halt)).await,
|
||||
Some(cont) => {
|
||||
let cont = cont.api_return(ctx.clone()).await;
|
||||
let cont = cont.serialize().await;
|
||||
hand.handle(cmd, &Ok(api::NextStep::Continue(cont))).await
|
||||
},
|
||||
},
|
||||
@@ -383,12 +387,12 @@ pub fn extension_init(
|
||||
let ctx = get_ctx(*sys).await;
|
||||
// SAFETY: deserialization implicitly grants ownership to previously owned exprs
|
||||
let refs = (refs.iter())
|
||||
.map(|tk| Expr::from_handle(ExprHandle::deserialize(ctx.clone(), *tk)))
|
||||
.map(|tk| Expr::from_handle(ExprHandle::deserialize(*tk)))
|
||||
.collect_vec();
|
||||
let id = AtomTypeId::decode(Pin::new(&mut read)).await;
|
||||
let inst = ctx.cted().inst();
|
||||
let nfo = atom_by_idx(inst.card(), id).expect("Deserializing atom with invalid ID");
|
||||
hand.handle(&deser, &nfo.deserialize(ctx.clone(), read, &refs).await).await
|
||||
hand.handle(&deser, &nfo.deserialize(read, &refs).await).await
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,8 +13,8 @@ use orchid_base::reqnot::Requester;
|
||||
|
||||
use crate::api;
|
||||
use crate::atom::ForeignAtom;
|
||||
use crate::context::{ctx, i};
|
||||
use crate::gen_expr::{GExpr, GExprKind};
|
||||
use crate::system::SysCtx;
|
||||
|
||||
pub struct BorrowedExprStore(RefCell<Option<HashSet<Rc<ExprHandle>>>>);
|
||||
impl BorrowedExprStore {
|
||||
@@ -22,7 +22,7 @@ impl BorrowedExprStore {
|
||||
pub async fn dispose(self) {
|
||||
let elements = self.0.borrow_mut().take().unwrap();
|
||||
for handle in elements {
|
||||
handle.drop_one().await
|
||||
handle.on_borrow_expire().await
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -34,58 +34,67 @@ impl Drop for BorrowedExprStore {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(destructure)]
|
||||
pub struct ExprHandle {
|
||||
pub tk: api::ExprTicket,
|
||||
pub ctx: SysCtx,
|
||||
}
|
||||
#[derive(destructure, PartialEq, Eq, Hash)]
|
||||
pub struct ExprHandle(api::ExprTicket);
|
||||
impl ExprHandle {
|
||||
/// This function does not signal to take ownership of the expr.
|
||||
pub fn borrowed(ctx: SysCtx, tk: api::ExprTicket, store: &BorrowedExprStore) -> Rc<Self> {
|
||||
let this = Rc::new(Self { ctx, tk });
|
||||
/// Do not signal to take ownership of the expr. Instead, the
|
||||
/// [BorrowedExprStore] signifies the lifetime of the borrow, and when it is
|
||||
/// freed, it signals to take ownership of any exprs that ended up outliving
|
||||
/// it. It is used to receive exprs sent via [ExprHandle::ticket] as an
|
||||
/// optimization over [ExprHandle::from_ticket]
|
||||
pub fn borrowed(tk: api::ExprTicket, store: &BorrowedExprStore) -> Rc<Self> {
|
||||
let this = Rc::new(Self(tk));
|
||||
store.0.borrow_mut().as_mut().unwrap().insert(this.clone());
|
||||
this
|
||||
}
|
||||
pub fn deserialize(ctx: SysCtx, tk: api::ExprTicket) -> Rc<Self> { Rc::new(Self { ctx, tk }) }
|
||||
pub fn get_ctx(&self) -> SysCtx { self.ctx.clone() }
|
||||
/// Drop one instance of the handle silently; if it's the last one, do
|
||||
/// nothing, otherwise send an Acquire
|
||||
pub async fn drop_one(self: Rc<Self>) {
|
||||
match Rc::try_unwrap(self) {
|
||||
Err(rc) => rc.ctx.reqnot().notify(api::Acquire(rc.ctx.sys_id(), rc.tk)).await,
|
||||
Ok(hand) => {
|
||||
// avoid calling destructor
|
||||
hand.destructure();
|
||||
},
|
||||
}
|
||||
/// This function takes over the loose reference pre-created via
|
||||
/// [ExprHandle::serialize] in the sender. It must therefore pair up with a
|
||||
/// corresponding call to that function.
|
||||
pub fn deserialize(tk: api::ExprTicket) -> Rc<Self> { Rc::new(Self(tk)) }
|
||||
/// This function takes ownership of a borrowed expr sent via
|
||||
/// [ExprHandle::ticket] and signals immediately to record that ownership. It
|
||||
/// is used in place of [ExprHandle::borrowed] when it's impractical to
|
||||
/// determine how long the borrow will live.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// You need to ensure that the [api::Acquire] sent by this function arrives
|
||||
/// before the borrow expires, so you still need a borrow delimited by some
|
||||
/// message you will send in the future.
|
||||
pub async fn from_ticket(tk: api::ExprTicket) -> Rc<Self> {
|
||||
let store = BorrowedExprStore::new();
|
||||
let expr = Self::borrowed(tk, &store);
|
||||
store.dispose().await;
|
||||
expr
|
||||
}
|
||||
/// The raw ticket used in messages. If you want to transfer ownership via the
|
||||
/// ticket, you should use [ExprHandle::serialize]. Only send this if you want
|
||||
/// to lend the expr, and you expect the receiver to use
|
||||
/// [ExprHandle::borrowed] or [ExprHandle::from_ticket]
|
||||
pub fn ticket(&self) -> api::ExprTicket { self.0 }
|
||||
async fn send_acq(&self) { ctx().reqnot().notify(api::Acquire(ctx().sys_id(), self.0)).await }
|
||||
/// If this is the last one reference, do nothing, otherwise send an Acquire
|
||||
pub async fn on_borrow_expire(self: Rc<Self>) { self.serialize().await; }
|
||||
/// Drop the handle and get the ticket without a release notification.
|
||||
/// Use this with messages that imply ownership transfer. This function is
|
||||
/// safe because abusing it is a memory leak.
|
||||
pub fn serialize(self) -> api::ExprTicket { self.destructure().0 }
|
||||
}
|
||||
impl Eq for ExprHandle {}
|
||||
impl PartialEq for ExprHandle {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.ctx.sys_id() == other.ctx.sys_id() && self.tk == other.tk
|
||||
}
|
||||
}
|
||||
impl Hash for ExprHandle {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.ctx.sys_id().hash(state);
|
||||
self.tk.hash(state);
|
||||
pub async fn serialize(self: Rc<Self>) -> api::ExprTicket {
|
||||
match Rc::try_unwrap(self) {
|
||||
Err(rc) => {
|
||||
rc.send_acq().await;
|
||||
rc.0
|
||||
},
|
||||
Ok(hand) => hand.destructure().0,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl fmt::Debug for ExprHandle {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "ExprHandle({})", self.tk.0)
|
||||
}
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ExprHandle({})", self.0.0) }
|
||||
}
|
||||
impl Drop for ExprHandle {
|
||||
fn drop(&mut self) {
|
||||
let notif = api::Release(self.ctx.sys_id(), self.tk);
|
||||
let reqnot = self.ctx.reqnot().clone();
|
||||
self.ctx.spawner()(Box::pin(async move { reqnot.notify(notif).await }))
|
||||
let notif = api::Release(ctx().sys_id(), self.0);
|
||||
ctx().spawn(async move { ctx().reqnot().clone().notify(notif).await })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -96,19 +105,23 @@ pub struct Expr {
|
||||
}
|
||||
impl Expr {
|
||||
pub fn from_handle(handle: Rc<ExprHandle>) -> Self { Self { handle, data: Rc::default() } }
|
||||
pub fn new(handle: Rc<ExprHandle>, d: ExprData) -> Self {
|
||||
pub fn from_data(handle: Rc<ExprHandle>, d: ExprData) -> Self {
|
||||
Self { handle, data: Rc::new(OnceCell::from(d)) }
|
||||
}
|
||||
|
||||
/// Creates an instance without incrementing the reference count. This is
|
||||
/// only safe to be called on a reference created with an [Expr::serialize]
|
||||
/// call which created the loose reference it can take ownership of.
|
||||
pub async fn deserialize(tk: api::ExprTicket) -> Self {
|
||||
Self::from_handle(ExprHandle::deserialize(tk))
|
||||
}
|
||||
pub async fn data(&self) -> &ExprData {
|
||||
(self.data.get_or_init(async {
|
||||
let details = self.handle.ctx.reqnot().request(api::Inspect { target: self.handle.tk }).await;
|
||||
let pos = Pos::from_api(&details.location, self.handle.ctx.i()).await;
|
||||
let details = ctx().reqnot().request(api::Inspect { target: self.handle.ticket() }).await;
|
||||
let pos = Pos::from_api(&details.location, &i()).await;
|
||||
let kind = match details.kind {
|
||||
api::InspectedKind::Atom(a) =>
|
||||
ExprKind::Atom(ForeignAtom::new(self.handle.clone(), a, pos.clone())),
|
||||
api::InspectedKind::Bottom(b) =>
|
||||
ExprKind::Bottom(OrcErrv::from_api(&b, self.handle.ctx.i()).await),
|
||||
api::InspectedKind::Bottom(b) => ExprKind::Bottom(OrcErrv::from_api(&b, &i()).await),
|
||||
api::InspectedKind::Opaque => ExprKind::Opaque,
|
||||
};
|
||||
ExprData { pos, kind }
|
||||
@@ -122,20 +135,22 @@ impl Expr {
|
||||
}
|
||||
}
|
||||
pub fn handle(&self) -> Rc<ExprHandle> { self.handle.clone() }
|
||||
pub fn ctx(&self) -> SysCtx { self.handle.ctx.clone() }
|
||||
|
||||
pub fn slot(&self) -> GExpr {
|
||||
GExpr { pos: Pos::SlotTarget, kind: GExprKind::Slot(self.clone()) }
|
||||
}
|
||||
/// Increments the refcount to ensure that the ticket remains valid even if
|
||||
/// the handle is freed. To avoid a leak, [Expr::deserialize] must eventually
|
||||
/// be called.
|
||||
pub async fn serialize(self) -> api::ExprTicket { self.handle.serialize().await }
|
||||
}
|
||||
impl Format for Expr {
|
||||
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
match &self.data().await.kind {
|
||||
ExprKind::Opaque => "OPAQUE".to_string().into(),
|
||||
ExprKind::Bottom(b) => format!("Bottom({b})").into(),
|
||||
ExprKind::Atom(a) => FmtUnit::from_api(
|
||||
&self.handle.ctx.reqnot().request(api::ExtAtomPrint(a.atom.clone())).await,
|
||||
),
|
||||
ExprKind::Atom(a) =>
|
||||
FmtUnit::from_api(&ctx().reqnot().request(api::ExtAtomPrint(a.atom.clone())).await),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,11 +19,11 @@ use trait_set::trait_set;
|
||||
|
||||
use crate::atom::Atomic;
|
||||
use crate::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
|
||||
use crate::context::{SysCtxEntry, ctx, i};
|
||||
use crate::conv::ToExpr;
|
||||
use crate::coroutine_exec::{ExecHandle, exec};
|
||||
use crate::expr::Expr;
|
||||
use crate::gen_expr::GExpr;
|
||||
use crate::system::{SysCtx, SysCtxEntry};
|
||||
|
||||
trait_set! {
|
||||
trait FunCB = Fn(Vec<Expr>) -> LocalBoxFuture<'static, OrcRes<GExpr>> + 'static;
|
||||
@@ -43,14 +43,11 @@ struct FunRecord {
|
||||
fun: Rc<dyn FunCB>,
|
||||
}
|
||||
|
||||
fn process_args<I, O, F: ExprFunc<I, O>>(
|
||||
debug: impl AsRef<str> + Clone + 'static,
|
||||
f: F,
|
||||
) -> FunRecord {
|
||||
fn process_args<I, O, F: ExprFunc<I, O>>(f: F) -> FunRecord {
|
||||
let argtyps = F::argtyps();
|
||||
let fun = Rc::new(move |v: Vec<Expr>| {
|
||||
clone!(f, v mut);
|
||||
exec(debug.clone(), async move |mut hand| {
|
||||
exec(async move |mut hand| {
|
||||
let mut norm_args = Vec::with_capacity(v.len());
|
||||
for (expr, typ) in v.into_iter().zip(argtyps) {
|
||||
if *typ != TypeId::of::<Expr>() {
|
||||
@@ -77,13 +74,14 @@ pub(crate) struct Fun {
|
||||
record: FunRecord,
|
||||
}
|
||||
impl Fun {
|
||||
pub async fn new<I, O, F: ExprFunc<I, O>>(path: Sym, ctx: SysCtx, f: F) -> Self {
|
||||
pub async fn new<I, O, F: ExprFunc<I, O>>(path: Sym, f: F) -> Self {
|
||||
let ctx = ctx();
|
||||
let funs: &FunsCtx = ctx.get_or_default();
|
||||
let mut fung = funs.0.lock().await;
|
||||
let record = if let Some(record) = fung.get(&path) {
|
||||
record.clone()
|
||||
} else {
|
||||
let record = process_args(path.to_string(), f);
|
||||
let record = process_args(f);
|
||||
fung.insert(path.clone(), record.clone());
|
||||
record
|
||||
};
|
||||
@@ -101,20 +99,19 @@ impl OwnedAtom for Fun {
|
||||
async fn call_ref(&self, arg: Expr) -> GExpr {
|
||||
let new_args = self.args.iter().cloned().chain([arg]).collect_vec();
|
||||
if new_args.len() == self.record.argtyps.len() {
|
||||
(self.record.fun)(new_args).await.to_expr().await
|
||||
(self.record.fun)(new_args).await.to_gen().await
|
||||
} else {
|
||||
Self { args: new_args, record: self.record.clone(), path: self.path.clone() }.to_expr().await
|
||||
Self { args: new_args, record: self.record.clone(), path: self.path.clone() }.to_gen().await
|
||||
}
|
||||
}
|
||||
async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await }
|
||||
async fn serialize(&self, _: SysCtx, write: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
|
||||
async fn serialize(&self, write: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
|
||||
self.path.to_api().encode(write).await;
|
||||
self.args.clone()
|
||||
}
|
||||
async fn deserialize(mut ctx: impl DeserializeCtx, args: Self::Refs) -> Self {
|
||||
let sys = ctx.sys();
|
||||
let path = Sym::from_api(ctx.decode().await, sys.i()).await;
|
||||
let record = (sys.get::<FunsCtx>().0.lock().await.get(&path))
|
||||
async fn deserialize(mut ds_cx: impl DeserializeCtx, args: Self::Refs) -> Self {
|
||||
let path = Sym::from_api(ds_cx.decode().await, &i()).await;
|
||||
let record = (ctx().get::<FunsCtx>().0.lock().await.get(&path))
|
||||
.expect("Function missing during deserialization")
|
||||
.clone();
|
||||
Self { args, path, record }
|
||||
@@ -134,8 +131,8 @@ pub struct Lambda {
|
||||
record: FunRecord,
|
||||
}
|
||||
impl Lambda {
|
||||
pub fn new<I, O, F: ExprFunc<I, O>>(debug: impl AsRef<str> + Clone + 'static, f: F) -> Self {
|
||||
Self { args: vec![], record: process_args(debug, f) }
|
||||
pub fn new<I, O, F: ExprFunc<I, O>>(f: F) -> Self {
|
||||
Self { args: vec![], record: process_args(f) }
|
||||
}
|
||||
}
|
||||
impl Atomic for Lambda {
|
||||
@@ -148,9 +145,9 @@ impl OwnedAtom for Lambda {
|
||||
async fn call_ref(&self, arg: Expr) -> GExpr {
|
||||
let new_args = self.args.iter().cloned().chain([arg]).collect_vec();
|
||||
if new_args.len() == self.record.argtyps.len() {
|
||||
(self.record.fun)(new_args).await.to_expr().await
|
||||
(self.record.fun)(new_args).await.to_gen().await
|
||||
} else {
|
||||
Self { args: new_args, record: self.record.clone() }.to_expr().await
|
||||
Self { args: new_args, record: self.record.clone() }.to_gen().await
|
||||
}
|
||||
}
|
||||
async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await }
|
||||
@@ -182,7 +179,7 @@ mod expr_func_derives {
|
||||
async fn apply<'a>(&self, _: ExecHandle<'a>, v: Vec<Expr>) -> OrcRes<GExpr> {
|
||||
assert_eq!(v.len(), Self::argtyps().len(), "Arity mismatch");
|
||||
let [$([< $t:lower >],)*] = v.try_into().unwrap_or_else(|_| panic!("Checked above"));
|
||||
Ok(self($($t::try_from_expr([< $t:lower >]).await?,)*).await.to_expr().await)
|
||||
Ok(self($($t::try_from_expr([< $t:lower >]).await?,)*).await.to_gen().await)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,12 +6,13 @@ use orchid_base::error::{OrcErr, OrcErrv};
|
||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::reqnot::Requester;
|
||||
use orchid_base::{match_mapping, tl_cache};
|
||||
|
||||
use crate::api;
|
||||
use crate::atom::{AtomFactory, ToAtom};
|
||||
use crate::context::ctx;
|
||||
use crate::expr::Expr;
|
||||
use crate::system::SysCtx;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct GExpr {
|
||||
@@ -19,29 +20,32 @@ pub struct GExpr {
|
||||
pub pos: Pos,
|
||||
}
|
||||
impl GExpr {
|
||||
pub async fn api_return(self, ctx: SysCtx) -> api::Expression {
|
||||
/// Release notifications will not be sent for the slots. Use this with
|
||||
/// messages that imply ownership transfer
|
||||
pub async fn serialize(self) -> api::Expression {
|
||||
if let GExprKind::Slot(ex) = self.kind {
|
||||
let hand = ex.handle();
|
||||
mem::drop(ex);
|
||||
api::Expression {
|
||||
location: api::Location::SlotTarget,
|
||||
kind: match Rc::try_unwrap(hand) {
|
||||
Ok(h) => api::ExpressionKind::Slot { tk: h.serialize(), by_value: true },
|
||||
Err(rc) => api::ExpressionKind::Slot { tk: rc.tk, by_value: false },
|
||||
},
|
||||
// an instance is leaked here, we must take ownership of it when we receive this
|
||||
kind: api::ExpressionKind::Slot(hand.serialize().await),
|
||||
}
|
||||
} else {
|
||||
api::Expression {
|
||||
location: api::Location::Inherit,
|
||||
kind: self.kind.api_return(ctx).boxed_local().await,
|
||||
kind: self.kind.serialize().boxed_local().await,
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn at(self, pos: Pos) -> Self { GExpr { pos, kind: self.kind } }
|
||||
pub async fn create(self) -> Expr {
|
||||
Expr::deserialize(ctx().reqnot().request(api::Create(self.serialize().await)).await).await
|
||||
}
|
||||
}
|
||||
impl Format for GExpr {
|
||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
self.kind.print(c).await
|
||||
self.kind.print(c).boxed_local().await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -57,21 +61,21 @@ pub enum GExprKind {
|
||||
Bottom(OrcErrv),
|
||||
}
|
||||
impl GExprKind {
|
||||
pub async fn api_return(self, ctx: SysCtx) -> api::ExpressionKind {
|
||||
pub async fn serialize(self) -> api::ExpressionKind {
|
||||
match_mapping!(self, Self => api::ExpressionKind {
|
||||
Call(
|
||||
f => Box::new(f.api_return(ctx.clone()).await),
|
||||
x => Box::new(x.api_return(ctx).await)
|
||||
f => Box::new(f.serialize().await),
|
||||
x => Box::new(x.serialize().await)
|
||||
),
|
||||
Seq(
|
||||
a => Box::new(a.api_return(ctx.clone()).await),
|
||||
b => Box::new(b.api_return(ctx).await)
|
||||
a => Box::new(a.serialize().await),
|
||||
b => Box::new(b.serialize().await)
|
||||
),
|
||||
Lambda(arg, body => Box::new(body.api_return(ctx).await)),
|
||||
Lambda(arg, body => Box::new(body.serialize().await)),
|
||||
Arg(arg),
|
||||
Const(name.to_api()),
|
||||
Bottom(err.to_api()),
|
||||
NewAtom(fac.clone().build(ctx).await),
|
||||
NewAtom(fac.clone().build().await),
|
||||
} {
|
||||
Self::Slot(_) => panic!("processed elsewhere")
|
||||
})
|
||||
@@ -118,7 +122,7 @@ pub fn seq(deps: impl IntoIterator<Item = GExpr>, val: GExpr) -> GExpr {
|
||||
|
||||
pub fn arg(n: u64) -> GExpr { inherit(GExprKind::Arg(n)) }
|
||||
|
||||
pub fn lambda(n: u64, b: GExpr) -> GExpr { inherit(GExprKind::Lambda(n, Box::new(b))) }
|
||||
pub fn lambda(n: u64, [b]: [GExpr; 1]) -> GExpr { inherit(GExprKind::Lambda(n, Box::new(b))) }
|
||||
|
||||
pub fn call(f: GExpr, argv: impl IntoIterator<Item = GExpr>) -> GExpr {
|
||||
(argv.into_iter()).fold(f, |f, x| inherit(GExprKind::Call(Box::new(f), Box::new(x))))
|
||||
|
||||
@@ -12,38 +12,48 @@ use orchid_base::parse::ParseCtx;
|
||||
use orchid_base::reqnot::Requester;
|
||||
|
||||
use crate::api;
|
||||
use crate::context::{ctx, i};
|
||||
use crate::expr::BorrowedExprStore;
|
||||
use crate::parser::PTokTree;
|
||||
use crate::system::SysCtx;
|
||||
use crate::tree::GenTokTree;
|
||||
|
||||
pub async fn ekey_cascade(i: &Interner) -> Tok<String> {
|
||||
i.i("An error cascading from a recursive call").await
|
||||
pub async fn ekey_cascade() -> Tok<String> {
|
||||
i().i("An error cascading from a recursive call").await
|
||||
}
|
||||
pub async fn ekey_not_applicable(i: &Interner) -> Tok<String> {
|
||||
i.i("Pseudo-error to communicate that the current branch in a dispatch doesn't apply").await
|
||||
pub async fn ekey_not_applicable() -> Tok<String> {
|
||||
i().i("Pseudo-error to communicate that the current branch in a dispatch doesn't apply").await
|
||||
}
|
||||
const MSG_INTERNAL_ERROR: &str = "This error is a sentinel for the extension library.\
|
||||
it should not be emitted by the extension.";
|
||||
|
||||
pub async fn err_cascade(i: &Interner) -> OrcErrv {
|
||||
mk_errv(ekey_cascade(i).await, MSG_INTERNAL_ERROR, [Pos::None])
|
||||
pub async fn err_cascade() -> OrcErrv {
|
||||
mk_errv(ekey_cascade().await, MSG_INTERNAL_ERROR, [Pos::None])
|
||||
}
|
||||
|
||||
pub async fn err_not_applicable(i: &Interner) -> OrcErrv {
|
||||
mk_errv(ekey_not_applicable(i).await, MSG_INTERNAL_ERROR, [Pos::None])
|
||||
pub async fn err_not_applicable() -> OrcErrv {
|
||||
mk_errv(ekey_not_applicable().await, MSG_INTERNAL_ERROR, [Pos::None])
|
||||
}
|
||||
|
||||
pub struct LexContext<'a> {
|
||||
pub(crate) exprs: &'a BorrowedExprStore,
|
||||
pub ctx: SysCtx,
|
||||
pub text: &'a Tok<String>,
|
||||
pub id: api::ParsId,
|
||||
pub pos: u32,
|
||||
i: Interner,
|
||||
pub(crate) src: Sym,
|
||||
pub(crate) rep: &'a Reporter,
|
||||
}
|
||||
impl<'a> LexContext<'a> {
|
||||
pub fn new(
|
||||
exprs: &'a BorrowedExprStore,
|
||||
text: &'a Tok<String>,
|
||||
id: api::ParsId,
|
||||
pos: u32,
|
||||
src: Sym,
|
||||
rep: &'a Reporter,
|
||||
) -> Self {
|
||||
Self { exprs, i: i(), id, pos, rep, src, text }
|
||||
}
|
||||
pub fn src(&self) -> &Sym { &self.src }
|
||||
/// This function returns [PTokTree] because it can never return
|
||||
/// [orchid_base::tree::Token::NewExpr]. You can use
|
||||
@@ -51,17 +61,10 @@ impl<'a> LexContext<'a> {
|
||||
/// for embedding in the return value.
|
||||
pub async fn recurse(&self, tail: &'a str) -> OrcRes<(&'a str, PTokTree)> {
|
||||
let start = self.pos(tail);
|
||||
let Some(lx) = self.ctx.reqnot().request(api::SubLex { pos: start, id: self.id }).await else {
|
||||
return Err(err_cascade(self.ctx.i()).await);
|
||||
let Some(lx) = ctx().reqnot().request(api::SubLex { pos: start, id: self.id }).await else {
|
||||
return Err(err_cascade().await);
|
||||
};
|
||||
let tree = PTokTree::from_api(
|
||||
&lx.tree,
|
||||
&mut (self.ctx.clone(), self.exprs),
|
||||
&mut (),
|
||||
&self.src,
|
||||
self.ctx.i(),
|
||||
)
|
||||
.await;
|
||||
let tree = PTokTree::from_api(&lx.tree, &mut { self.exprs }, &mut (), &self.src, &i()).await;
|
||||
Ok((&self.text[lx.pos as usize..], tree))
|
||||
}
|
||||
|
||||
@@ -75,7 +78,7 @@ impl<'a> LexContext<'a> {
|
||||
}
|
||||
}
|
||||
impl ParseCtx for LexContext<'_> {
|
||||
fn i(&self) -> &Interner { self.ctx.i() }
|
||||
fn i(&self) -> &Interner { &self.i }
|
||||
fn rep(&self) -> &Reporter { self.rep }
|
||||
}
|
||||
|
||||
@@ -83,7 +86,7 @@ pub trait Lexer: Send + Sync + Sized + Default + 'static {
|
||||
const CHAR_FILTER: &'static [RangeInclusive<char>];
|
||||
fn lex<'a>(
|
||||
tail: &'a str,
|
||||
ctx: &'a LexContext<'a>,
|
||||
lctx: &'a LexContext<'a>,
|
||||
) -> impl Future<Output = OrcRes<(&'a str, GenTokTree)>>;
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ pub mod func_atom;
|
||||
pub mod gen_expr;
|
||||
pub mod lexer;
|
||||
// pub mod msg;
|
||||
pub mod context;
|
||||
pub mod other_system;
|
||||
pub mod parser;
|
||||
pub mod reflection;
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
use std::marker::PhantomData;
|
||||
use std::mem::size_of;
|
||||
|
||||
use crate::api;
|
||||
use crate::system::{DynSystemCard, SystemCard};
|
||||
|
||||
pub struct SystemHandle<C: SystemCard> {
|
||||
pub(crate) _card: PhantomData<C>,
|
||||
pub(crate) card: C,
|
||||
pub(crate) id: api::SysId,
|
||||
}
|
||||
impl<C: SystemCard> SystemHandle<C> {
|
||||
pub(crate) fn new(id: api::SysId) -> Self { Self { _card: PhantomData, id } }
|
||||
pub(crate) fn new(id: api::SysId) -> Self { Self { card: C::default(), id } }
|
||||
pub fn id(&self) -> api::SysId { self.id }
|
||||
}
|
||||
impl<C: SystemCard> Clone for SystemHandle<C> {
|
||||
@@ -21,16 +18,7 @@ pub trait DynSystemHandle {
|
||||
fn get_card(&self) -> &dyn DynSystemCard;
|
||||
}
|
||||
|
||||
pub fn leak_card<T: Default>() -> &'static T {
|
||||
const {
|
||||
if 0 != size_of::<T>() {
|
||||
panic!("Attempted to leak positively sized Card. Card types must always be zero-sized");
|
||||
}
|
||||
}
|
||||
Box::leak(Box::default())
|
||||
}
|
||||
|
||||
impl<C: SystemCard> DynSystemHandle for SystemHandle<C> {
|
||||
fn id(&self) -> api::SysId { self.id }
|
||||
fn get_card(&self) -> &'static dyn DynSystemCard { leak_card::<C>() }
|
||||
fn get_card(&self) -> &dyn DynSystemCard { &self.card }
|
||||
}
|
||||
|
||||
@@ -16,10 +16,10 @@ use orchid_base::reqnot::Requester;
|
||||
use orchid_base::tree::{TokTree, Token, ttv_into_api};
|
||||
|
||||
use crate::api;
|
||||
use crate::context::{SysCtxEntry, ctx, i};
|
||||
use crate::conv::ToExpr;
|
||||
use crate::expr::Expr;
|
||||
use crate::gen_expr::GExpr;
|
||||
use crate::system::{SysCtx, SysCtxEntry};
|
||||
use crate::tree::{GenTok, GenTokTree};
|
||||
|
||||
pub type PTok = Token<Expr, Never>;
|
||||
@@ -81,19 +81,18 @@ pub type ParserObj = &'static dyn DynParser;
|
||||
|
||||
pub struct ParsCtx<'a> {
|
||||
_parse: PhantomData<&'a mut ()>,
|
||||
ctx: SysCtx,
|
||||
module: Sym,
|
||||
reporter: &'a Reporter,
|
||||
i: Interner,
|
||||
}
|
||||
impl<'a> ParsCtx<'a> {
|
||||
pub(crate) fn new(ctx: SysCtx, module: Sym, reporter: &'a Reporter) -> Self {
|
||||
Self { _parse: PhantomData, ctx, module, reporter }
|
||||
pub(crate) fn new(module: Sym, reporter: &'a Reporter) -> Self {
|
||||
Self { _parse: PhantomData, module, reporter, i: i() }
|
||||
}
|
||||
pub fn ctx(&self) -> &SysCtx { &self.ctx }
|
||||
pub fn module(&self) -> Sym { self.module.clone() }
|
||||
}
|
||||
impl ParseCtx for ParsCtx<'_> {
|
||||
fn i(&self) -> &Interner { self.ctx.i() }
|
||||
fn i(&self) -> &Interner { &self.i }
|
||||
fn rep(&self) -> &Reporter { self.reporter }
|
||||
}
|
||||
|
||||
@@ -118,7 +117,7 @@ impl ParsedLine {
|
||||
name: Tok<String>,
|
||||
f: F,
|
||||
) -> Self {
|
||||
let cb = Box::new(|ctx| async move { f(ctx).await.to_expr().await }.boxed_local());
|
||||
let cb = Box::new(|ctx| async move { f(ctx).await.to_gen().await }.boxed_local());
|
||||
let kind = ParsedLineKind::Mem(ParsedMem { name, exported, kind: ParsedMemKind::Const(cb) });
|
||||
let comments = comments.into_iter().cloned().collect();
|
||||
ParsedLine { comments, sr: sr.clone(), kind }
|
||||
@@ -136,7 +135,7 @@ impl ParsedLine {
|
||||
let comments = comments.into_iter().cloned().collect();
|
||||
ParsedLine { comments, sr: sr.clone(), kind: line_kind }
|
||||
}
|
||||
pub async fn into_api(self, mut ctx: SysCtx) -> api::ParsedLine {
|
||||
pub async fn into_api(self) -> api::ParsedLine {
|
||||
api::ParsedLine {
|
||||
comments: self.comments.into_iter().map(|c| c.to_api()).collect(),
|
||||
source_range: self.sr.to_api(),
|
||||
@@ -146,23 +145,23 @@ impl ParsedLine {
|
||||
exported: mem.exported,
|
||||
kind: match mem.kind {
|
||||
ParsedMemKind::Const(cb) => api::ParsedMemberKind::Constant(api::ParsedConstId(
|
||||
ctx.get_or_default::<ParsedConstCtxEntry>().consts.add(cb).id(),
|
||||
ctx().get_or_default::<ParsedConstCtxEntry>().consts.add(cb).id(),
|
||||
)),
|
||||
ParsedMemKind::Mod { lines, use_prelude } => api::ParsedMemberKind::Module {
|
||||
lines: linev_into_api(lines, ctx).boxed_local().await,
|
||||
lines: linev_into_api(lines).boxed_local().await,
|
||||
use_prelude,
|
||||
},
|
||||
},
|
||||
}),
|
||||
ParsedLineKind::Rec(tv) =>
|
||||
api::ParsedLineKind::Recursive(ttv_into_api(tv, &mut (), &mut ctx).await),
|
||||
api::ParsedLineKind::Recursive(ttv_into_api(tv, &mut (), &mut ()).await),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn linev_into_api(v: Vec<ParsedLine>, ctx: SysCtx) -> Vec<api::ParsedLine> {
|
||||
join_all(v.into_iter().map(|l| l.into_api(ctx.clone()))).await
|
||||
pub(crate) async fn linev_into_api(v: Vec<ParsedLine>) -> Vec<api::ParsedLine> {
|
||||
join_all(v.into_iter().map(|l| l.into_api())).await
|
||||
}
|
||||
|
||||
pub enum ParsedLineKind {
|
||||
@@ -183,26 +182,23 @@ pub enum ParsedMemKind {
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ConstCtx {
|
||||
ctx: SysCtx,
|
||||
constid: api::ParsedConstId,
|
||||
}
|
||||
impl ConstCtx {
|
||||
pub fn ctx(&self) -> &SysCtx { &self.ctx }
|
||||
pub fn i(&self) -> &Interner { self.ctx.i() }
|
||||
pub fn names<'b>(
|
||||
&'b self,
|
||||
names: impl IntoIterator<Item = &'b Sym> + 'b,
|
||||
) -> impl Stream<Item = OrcRes<Sym>> + 'b {
|
||||
let resolve_names = api::ResolveNames {
|
||||
constid: self.constid,
|
||||
sys: self.ctx.sys_id(),
|
||||
sys: ctx().sys_id(),
|
||||
names: names.into_iter().map(|n| n.to_api()).collect_vec(),
|
||||
};
|
||||
stream(async |mut cx| {
|
||||
for name_opt in self.ctx.reqnot().request(resolve_names).await {
|
||||
for name_opt in ctx().reqnot().request(resolve_names).await {
|
||||
cx.emit(match name_opt {
|
||||
Err(e) => Err(OrcErrv::from_api(&e, self.ctx.i()).await),
|
||||
Ok(name) => Ok(Sym::from_api(name, self.ctx.i()).await),
|
||||
Err(e) => Err(OrcErrv::from_api(&e, &i()).await),
|
||||
Ok(name) => Ok(Sym::from_api(name, &i()).await),
|
||||
})
|
||||
.await
|
||||
}
|
||||
@@ -213,9 +209,9 @@ impl ConstCtx {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn get_const(id: api::ParsedConstId, ctx: SysCtx) -> GExpr {
|
||||
let ent = ctx.get_or_default::<ParsedConstCtxEntry>();
|
||||
let rec = ent.consts.get(id.0).expect("Bad ID or double read of parsed const");
|
||||
let ctx = ConstCtx { constid: id, ctx: ctx.clone() };
|
||||
rec.remove()(ctx).await
|
||||
pub(crate) async fn get_const(id: api::ParsedConstId) -> GExpr {
|
||||
let cb = (ctx().get_or_default::<ParsedConstCtxEntry>().consts.get(id.0))
|
||||
.expect("Bad ID or double read of parsed const")
|
||||
.remove();
|
||||
cb(ConstCtx { constid: id }).await
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use orchid_base::name::{NameLike, VPath};
|
||||
use orchid_base::reqnot::Requester;
|
||||
|
||||
use crate::api;
|
||||
use crate::system::{SysCtx, SysCtxEntry, WeakSysCtx};
|
||||
use crate::context::{SysCtxEntry, ctx, i};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ReflMemData {
|
||||
@@ -33,37 +33,33 @@ pub enum ReflMemKind {
|
||||
pub struct ReflModData {
|
||||
inferred: Mutex<bool>,
|
||||
path: VPath,
|
||||
ctx: WeakSysCtx,
|
||||
members: MemoMap<Tok<String>, ReflMem>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ReflMod(Rc<ReflModData>);
|
||||
impl ReflMod {
|
||||
fn ctx(&self) -> SysCtx {
|
||||
self.0.ctx.upgrade().expect("ReflectedModule accessed after context drop")
|
||||
}
|
||||
pub fn path(&self) -> &[Tok<String>] { &self.0.path[..] }
|
||||
pub fn is_root(&self) -> bool { self.0.path.is_empty() }
|
||||
async fn try_populate(&self) -> Result<(), api::LsModuleError> {
|
||||
let ctx = self.ctx();
|
||||
let path_tok = ctx.i().i(&self.0.path[..]).await;
|
||||
let reply = match ctx.reqnot().request(api::LsModule(ctx.sys_id(), path_tok.to_api())).await {
|
||||
let path_tok = i().i(&self.0.path[..]).await;
|
||||
let reply = match ctx().reqnot().request(api::LsModule(ctx().sys_id(), path_tok.to_api())).await
|
||||
{
|
||||
Err(api::LsModuleError::TreeUnavailable) =>
|
||||
panic!("Reflected tree accessed outside an interpreter call. This extension is faulty."),
|
||||
Err(err) => return Err(err),
|
||||
Ok(details) => details,
|
||||
};
|
||||
for (k, v) in reply.members {
|
||||
let k = ctx.i().ex(k).await;
|
||||
let k = i().ex(k).await;
|
||||
let mem = match self.0.members.get(&k) {
|
||||
Some(mem) => mem,
|
||||
None => {
|
||||
let path = self.0.path.clone().name_with_suffix(k.clone()).to_sym(ctx.i()).await;
|
||||
let path = self.0.path.clone().name_with_suffix(k.clone()).to_sym(&i()).await;
|
||||
let kind = match v.kind {
|
||||
api::MemberInfoKind::Constant => ReflMemKind::Const,
|
||||
api::MemberInfoKind::Module =>
|
||||
ReflMemKind::Mod(default_module(&ctx, VPath::new(path.segs()))),
|
||||
ReflMemKind::Mod(default_module(VPath::new(path.segs()))),
|
||||
};
|
||||
self.0.members.get_or_insert(&k, || default_member(self.is_root(), kind))
|
||||
},
|
||||
@@ -91,7 +87,6 @@ impl ReflMod {
|
||||
self.0.members.get(key).cloned()
|
||||
}
|
||||
pub async fn get_by_path(&self, path: &[Tok<String>]) -> Result<ReflMem, InvalidPathError> {
|
||||
let ctx = self.ctx();
|
||||
let (next, tail) = path.split_first().expect("Attempted to walk by empty path");
|
||||
let inferred_g = self.0.inferred.lock().await;
|
||||
if let Some(next) = self.0.members.get(next) {
|
||||
@@ -107,7 +102,7 @@ impl ReflMod {
|
||||
if !*inferred_g {
|
||||
return Err(InvalidPathError { keep_ancestry: true });
|
||||
}
|
||||
let candidate = default_module(&ctx, self.0.path.clone().suffix([next.clone()]));
|
||||
let candidate = default_module(self.0.path.clone().suffix([next.clone()]));
|
||||
if tail.is_empty() {
|
||||
return match candidate.try_populate().await {
|
||||
Ok(()) => {
|
||||
@@ -135,6 +130,7 @@ impl ReflMod {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ReflRoot(ReflMod);
|
||||
impl SysCtxEntry for ReflRoot {}
|
||||
|
||||
@@ -143,13 +139,8 @@ pub struct InvalidPathError {
|
||||
keep_ancestry: bool,
|
||||
}
|
||||
|
||||
fn default_module(ctx: &SysCtx, path: VPath) -> ReflMod {
|
||||
ReflMod(Rc::new(ReflModData {
|
||||
ctx: ctx.downgrade(),
|
||||
inferred: Mutex::new(true),
|
||||
path,
|
||||
members: MemoMap::new(),
|
||||
}))
|
||||
fn default_module(path: VPath) -> ReflMod {
|
||||
ReflMod(Rc::new(ReflModData { inferred: Mutex::new(true), path, members: MemoMap::new() }))
|
||||
}
|
||||
|
||||
fn default_member(is_root: bool, kind: ReflMemKind) -> ReflMem {
|
||||
@@ -159,8 +150,8 @@ fn default_member(is_root: bool, kind: ReflMemKind) -> ReflMem {
|
||||
}))
|
||||
}
|
||||
|
||||
fn get_root(ctx: &SysCtx) -> &ReflRoot {
|
||||
ctx.get_or_insert(|| ReflRoot(default_module(ctx, VPath::new([]))))
|
||||
fn get_root() -> ReflRoot {
|
||||
ctx().get_or_insert(|| ReflRoot(default_module(VPath::new([])))).clone()
|
||||
}
|
||||
|
||||
pub fn refl(ctx: &SysCtx) -> ReflMod { get_root(ctx).0.clone() }
|
||||
pub fn refl() -> ReflMod { get_root().0.clone() }
|
||||
|
||||
@@ -1,22 +1,18 @@
|
||||
use std::any::{Any, TypeId, type_name};
|
||||
use std::fmt;
|
||||
use std::any::{Any, TypeId};
|
||||
use std::future::Future;
|
||||
use std::num::NonZero;
|
||||
use std::pin::Pin;
|
||||
use std::rc::{Rc, Weak};
|
||||
|
||||
use futures::FutureExt;
|
||||
use futures::future::LocalBoxFuture;
|
||||
use memo_map::MemoMap;
|
||||
use orchid_api_traits::{Coding, Decode};
|
||||
use orchid_api_traits::{Coding, Decode, Encode, Request};
|
||||
use orchid_base::boxed_iter::BoxedIter;
|
||||
use orchid_base::builtin::Spawner;
|
||||
use orchid_base::interner::Interner;
|
||||
use orchid_base::logging::Logger;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::reqnot::{Receipt, ReqNot};
|
||||
use orchid_base::reqnot::{Receipt, Requester};
|
||||
|
||||
use crate::api;
|
||||
use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId, AtomicFeatures, ForeignAtom, TAtom, get_info};
|
||||
use crate::context::ctx;
|
||||
use crate::coroutine_exec::Replier;
|
||||
use crate::entrypoint::ExtReq;
|
||||
use crate::func_atom::{Fun, Lambda};
|
||||
@@ -32,7 +28,7 @@ pub trait SystemCard: Default + Send + Sync + 'static {
|
||||
fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>>;
|
||||
}
|
||||
|
||||
pub trait DynSystemCard: Send + Sync + 'static {
|
||||
pub trait DynSystemCard: Send + Sync + Any + 'static {
|
||||
fn name(&self) -> &'static str;
|
||||
/// Atoms explicitly defined by the system card. Do not rely on this for
|
||||
/// querying atoms as it doesn't include the general atom types
|
||||
@@ -84,16 +80,16 @@ impl<T: SystemCard> DynSystemCard for T {
|
||||
|
||||
/// System as defined by author
|
||||
pub trait System: Send + Sync + SystemCard + 'static {
|
||||
fn prelude(i: &Interner) -> impl Future<Output = Vec<Sym>>;
|
||||
fn env() -> Vec<GenMember>;
|
||||
fn prelude() -> impl Future<Output = Vec<Sym>>;
|
||||
fn env() -> impl Future<Output = Vec<GenMember>>;
|
||||
fn lexers() -> Vec<LexerObj>;
|
||||
fn parsers() -> Vec<ParserObj>;
|
||||
fn request(hand: ExtReq<'_>, req: Self::Req) -> impl Future<Output = Receipt<'_>>;
|
||||
}
|
||||
|
||||
pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
|
||||
fn dyn_prelude<'a>(&'a self, i: &'a Interner) -> LocalBoxFuture<'a, Vec<Sym>>;
|
||||
fn dyn_env(&'_ self) -> Vec<GenMember>;
|
||||
fn dyn_prelude(&self) -> LocalBoxFuture<'_, Vec<Sym>>;
|
||||
fn dyn_env(&self) -> LocalBoxFuture<'_, Vec<GenMember>>;
|
||||
fn dyn_lexers(&self) -> Vec<LexerObj>;
|
||||
fn dyn_parsers(&self) -> Vec<ParserObj>;
|
||||
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>>;
|
||||
@@ -101,10 +97,8 @@ pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
|
||||
}
|
||||
|
||||
impl<T: System> DynSystem for T {
|
||||
fn dyn_prelude<'a>(&'a self, i: &'a Interner) -> LocalBoxFuture<'a, Vec<Sym>> {
|
||||
Box::pin(Self::prelude(i))
|
||||
}
|
||||
fn dyn_env(&'_ self) -> Vec<GenMember> { Self::env() }
|
||||
fn dyn_prelude(&self) -> LocalBoxFuture<'_, Vec<Sym>> { Box::pin(Self::prelude()) }
|
||||
fn dyn_env(&self) -> LocalBoxFuture<'_, Vec<GenMember>> { Self::env().boxed_local() }
|
||||
fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() }
|
||||
fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() }
|
||||
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>> {
|
||||
@@ -118,7 +112,7 @@ impl<T: System> DynSystem for T {
|
||||
pub async fn downcast_atom<A>(foreign: ForeignAtom) -> Result<TAtom<A>, ForeignAtom>
|
||||
where A: AtomicFeatures {
|
||||
let mut data = &foreign.atom.data.0[..];
|
||||
let ctx = foreign.ctx().clone();
|
||||
let ctx = ctx();
|
||||
let value = AtomTypeId::decode(Pin::new(&mut data)).await;
|
||||
let own_inst = ctx.get::<CtedObj>().inst();
|
||||
let owner = if *ctx.get::<api::SysId>() == foreign.atom.owner {
|
||||
@@ -135,73 +129,23 @@ where A: AtomicFeatures {
|
||||
if value != typ_id {
|
||||
return Err(foreign);
|
||||
}
|
||||
let val = dynfo.decode(AtomCtx(data, foreign.atom.drop, ctx)).await;
|
||||
let val = dynfo.decode(AtomCtx(data, foreign.atom.drop)).await;
|
||||
let value = *val.downcast::<A::Data>().expect("atom decode returned wrong type");
|
||||
Ok(TAtom { value, untyped: foreign })
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct WeakSysCtx(Weak<MemoMap<TypeId, Box<dyn Any>>>);
|
||||
impl WeakSysCtx {
|
||||
pub fn upgrade(&self) -> Option<SysCtx> { Some(SysCtx(self.0.upgrade()?)) }
|
||||
pub async fn dep_req<Sys: SystemCard, Req: Request + Into<Sys::Req>>(req: Req) -> Req::Response {
|
||||
let ctx = ctx();
|
||||
let mut msg = Vec::new();
|
||||
req.into().encode(std::pin::pin!(&mut msg)).await;
|
||||
let own_inst = ctx.get::<CtedObj>().inst();
|
||||
let owner = if own_inst.card().type_id() == TypeId::of::<Sys>() {
|
||||
ctx.sys_id()
|
||||
} else {
|
||||
(ctx.get::<CtedObj>().deps().find(|s| s.get_card().type_id() == TypeId::of::<Sys>()))
|
||||
.expect("System not in dependency array")
|
||||
.id()
|
||||
};
|
||||
let reply = ctx.reqnot().request(api::SysFwd(owner, msg)).await;
|
||||
Req::Response::decode(std::pin::pin!(&reply[..])).await
|
||||
}
|
||||
impl fmt::Debug for WeakSysCtx {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "WeakSysCtx") }
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SysCtx(Rc<MemoMap<TypeId, Box<dyn Any>>>);
|
||||
impl SysCtx {
|
||||
pub fn new(
|
||||
id: api::SysId,
|
||||
i: Interner,
|
||||
reqnot: ReqNot<api::ExtMsgSet>,
|
||||
spawner: Spawner,
|
||||
logger: Logger,
|
||||
cted: CtedObj,
|
||||
) -> Self {
|
||||
let this = Self(Rc::new(MemoMap::new()));
|
||||
this.add(id).add(i).add(reqnot).add(spawner).add(logger).add(cted);
|
||||
this
|
||||
}
|
||||
pub fn downgrade(&self) -> WeakSysCtx { WeakSysCtx(Rc::downgrade(&self.0)) }
|
||||
pub fn add<T: SysCtxEntry>(&self, t: T) -> &Self {
|
||||
assert!(self.0.insert(TypeId::of::<T>(), Box::new(t)), "Key already exists");
|
||||
self
|
||||
}
|
||||
pub fn get_or_insert<T: SysCtxEntry>(&self, f: impl FnOnce() -> T) -> &T {
|
||||
(self.0.get_or_insert_owned(TypeId::of::<T>(), || Box::new(f())).downcast_ref())
|
||||
.expect("Keyed by TypeId")
|
||||
}
|
||||
pub fn get_or_default<T: SysCtxEntry + Default>(&self) -> &T { self.get_or_insert(T::default) }
|
||||
pub fn try_get<T: SysCtxEntry>(&self) -> Option<&T> {
|
||||
Some(self.0.get(&TypeId::of::<T>())?.downcast_ref().expect("Keyed by TypeId"))
|
||||
}
|
||||
pub fn get<T: SysCtxEntry>(&self) -> &T {
|
||||
self.try_get().unwrap_or_else(|| panic!("Context {} missing", type_name::<T>()))
|
||||
}
|
||||
/// Shorthand to get the [Interner] instance
|
||||
pub fn i(&self) -> &Interner { self.get::<Interner>() }
|
||||
/// Shorthand to get the messaging link
|
||||
pub fn reqnot(&self) -> &ReqNot<api::ExtMsgSet> { self.get::<ReqNot<api::ExtMsgSet>>() }
|
||||
/// Shorthand to get the system ID
|
||||
pub fn sys_id(&self) -> api::SysId { *self.get::<api::SysId>() }
|
||||
/// Shorthand to get the task spawner callback
|
||||
pub fn spawner(&self) -> &Spawner { self.get::<Spawner>() }
|
||||
/// Shorthand to get the logger
|
||||
pub fn logger(&self) -> &Logger { self.get::<Logger>() }
|
||||
/// Shorthand to get the constructed system object
|
||||
pub fn cted(&self) -> &CtedObj { self.get::<CtedObj>() }
|
||||
}
|
||||
impl fmt::Debug for SysCtx {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "SysCtx({:?})", self.sys_id())
|
||||
}
|
||||
}
|
||||
pub trait SysCtxEntry: 'static + Sized {}
|
||||
impl SysCtxEntry for api::SysId {}
|
||||
impl SysCtxEntry for ReqNot<api::ExtMsgSet> {}
|
||||
impl SysCtxEntry for Spawner {}
|
||||
impl SysCtxEntry for CtedObj {}
|
||||
impl SysCtxEntry for Logger {}
|
||||
impl SysCtxEntry for Interner {}
|
||||
|
||||
@@ -62,6 +62,8 @@ pub trait SystemCtor: Send + Sync + 'static {
|
||||
type Instance: System;
|
||||
const NAME: &'static str;
|
||||
const VERSION: f64;
|
||||
/// Create a system instance. When this function is called, a context object
|
||||
/// isn't yet available
|
||||
fn inst(deps: <Self::Deps as DepDef>::Sat) -> Self::Instance;
|
||||
}
|
||||
|
||||
|
||||
@@ -14,19 +14,19 @@ use substack::Substack;
|
||||
use trait_set::trait_set;
|
||||
|
||||
use crate::api;
|
||||
use crate::context::i;
|
||||
use crate::conv::ToExpr;
|
||||
use crate::entrypoint::MemberRecord;
|
||||
use crate::expr::{BorrowedExprStore, Expr, ExprHandle};
|
||||
use crate::func_atom::{ExprFunc, Fun};
|
||||
use crate::gen_expr::{GExpr, sym_ref};
|
||||
use crate::system::SysCtx;
|
||||
|
||||
pub type GenTokTree = TokTree<Expr, GExpr>;
|
||||
pub type GenTok = Token<Expr, GExpr>;
|
||||
|
||||
impl TokenVariant<api::Expression> for GExpr {
|
||||
type FromApiCtx<'a> = ();
|
||||
type ToApiCtx<'a> = SysCtx;
|
||||
type ToApiCtx<'a> = ();
|
||||
async fn from_api(
|
||||
_: &api::Expression,
|
||||
_: &mut Self::FromApiCtx<'_>,
|
||||
@@ -35,33 +35,31 @@ impl TokenVariant<api::Expression> for GExpr {
|
||||
) -> Self {
|
||||
panic!("Received new expression from host")
|
||||
}
|
||||
async fn into_api(self, ctx: &mut Self::ToApiCtx<'_>) -> api::Expression {
|
||||
self.api_return(ctx.clone()).await
|
||||
}
|
||||
async fn into_api(self, _: &mut Self::ToApiCtx<'_>) -> api::Expression { self.serialize().await }
|
||||
}
|
||||
|
||||
impl TokenVariant<api::ExprTicket> for Expr {
|
||||
type FromApiCtx<'a> = (SysCtx, &'a BorrowedExprStore);
|
||||
type FromApiCtx<'a> = &'a BorrowedExprStore;
|
||||
async fn from_api(
|
||||
api: &api::ExprTicket,
|
||||
(ctx, exprs): &mut Self::FromApiCtx<'_>,
|
||||
exprs: &mut Self::FromApiCtx<'_>,
|
||||
_: SrcRange,
|
||||
_: &Interner,
|
||||
) -> Self {
|
||||
// SAFETY: receiving trees from sublexers implies borrowing
|
||||
Expr::from_handle(ExprHandle::borrowed(ctx.clone(), *api, exprs))
|
||||
Expr::from_handle(ExprHandle::borrowed(*api, exprs))
|
||||
}
|
||||
type ToApiCtx<'a> = ();
|
||||
async fn into_api(self, (): &mut Self::ToApiCtx<'_>) -> api::ExprTicket { self.handle().tk }
|
||||
async fn into_api(self, (): &mut Self::ToApiCtx<'_>) -> api::ExprTicket { self.handle().ticket() }
|
||||
}
|
||||
|
||||
pub async fn x_tok(x: impl ToExpr) -> GenTok { GenTok::NewExpr(x.to_expr().await) }
|
||||
pub async fn x_tok(x: impl ToExpr) -> GenTok { GenTok::NewExpr(x.to_gen().await) }
|
||||
pub async fn ref_tok(path: Sym) -> GenTok { GenTok::NewExpr(sym_ref(path)) }
|
||||
|
||||
pub fn lazy(
|
||||
public: bool,
|
||||
name: &str,
|
||||
cb: impl AsyncFnOnce(Sym, SysCtx) -> MemKind + Clone + 'static,
|
||||
cb: impl AsyncFnOnce(Sym) -> MemKind + Clone + 'static,
|
||||
) -> Vec<GenMember> {
|
||||
vec![GenMember {
|
||||
name: name.to_string(),
|
||||
@@ -71,7 +69,7 @@ pub fn lazy(
|
||||
}]
|
||||
}
|
||||
pub fn cnst(public: bool, name: &str, value: impl ToExpr + Clone + 'static) -> Vec<GenMember> {
|
||||
lazy(public, name, async |_, _| MemKind::Const(value.to_expr().await))
|
||||
lazy(public, name, async |_| MemKind::Const(value.to_gen().await))
|
||||
}
|
||||
pub fn module(
|
||||
public: bool,
|
||||
@@ -86,8 +84,8 @@ pub fn root_mod(name: &str, mems: impl IntoIterator<Item = Vec<GenMember>>) -> (
|
||||
(name.to_string(), kind)
|
||||
}
|
||||
pub fn fun<I, O>(public: bool, name: &str, xf: impl ExprFunc<I, O>) -> Vec<GenMember> {
|
||||
let fac = LazyMemberFactory::new(async move |sym, ctx| {
|
||||
MemKind::Const(Fun::new(sym, ctx, xf).await.to_expr().await)
|
||||
let fac = LazyMemberFactory::new(async move |sym| {
|
||||
MemKind::Const(Fun::new(sym, xf).await.to_gen().await)
|
||||
});
|
||||
vec![GenMember { name: name.to_string(), kind: MemKind::Lazy(fac), public, comments: vec![] }]
|
||||
}
|
||||
@@ -149,14 +147,14 @@ pub fn merge_trivial(trees: impl IntoIterator<Item = Vec<GenMember>>) -> Vec<Gen
|
||||
|
||||
trait_set! {
|
||||
trait LazyMemberCallback =
|
||||
FnOnce(Sym, SysCtx) -> LocalBoxFuture<'static, MemKind> + DynClone
|
||||
FnOnce(Sym) -> LocalBoxFuture<'static, MemKind> + DynClone
|
||||
}
|
||||
pub struct LazyMemberFactory(Box<dyn LazyMemberCallback>);
|
||||
impl LazyMemberFactory {
|
||||
pub fn new(cb: impl AsyncFnOnce(Sym, SysCtx) -> MemKind + Clone + 'static) -> Self {
|
||||
Self(Box::new(|s, ctx| cb(s, ctx).boxed_local()))
|
||||
pub fn new(cb: impl AsyncFnOnce(Sym) -> MemKind + Clone + 'static) -> Self {
|
||||
Self(Box::new(|s| cb(s).boxed_local()))
|
||||
}
|
||||
pub async fn build(self, path: Sym, ctx: SysCtx) -> MemKind { (self.0)(path, ctx).await }
|
||||
pub async fn build(self, path: Sym) -> MemKind { (self.0)(path).await }
|
||||
}
|
||||
impl Clone for LazyMemberFactory {
|
||||
fn clone(&self) -> Self { Self(clone_box(&*self.0)) }
|
||||
@@ -169,11 +167,10 @@ pub struct GenMember {
|
||||
pub comments: Vec<String>,
|
||||
}
|
||||
impl GenMember {
|
||||
pub async fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::Member {
|
||||
let name = ctx.sys().i().i::<String>(&self.name).await;
|
||||
let kind = self.kind.into_api(&mut ctx.push_path(name.clone())).await;
|
||||
let comments =
|
||||
join_all(self.comments.iter().map(async |cmt| ctx.sys().i().i(cmt).await.to_api())).await;
|
||||
pub async fn into_api(self, tia_cx: &mut impl TreeIntoApiCtx) -> api::Member {
|
||||
let name = i().i::<String>(&self.name).await;
|
||||
let kind = self.kind.into_api(&mut tia_cx.push_path(name.clone())).await;
|
||||
let comments = join_all(self.comments.iter().map(async |cmt| i().i(cmt).await.to_api())).await;
|
||||
api::Member { kind, name: name.to_api(), comments, exported: self.public }
|
||||
}
|
||||
}
|
||||
@@ -187,7 +184,7 @@ impl MemKind {
|
||||
pub async fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::MemberKind {
|
||||
match self {
|
||||
Self::Lazy(lazy) => api::MemberKind::Lazy(ctx.with_lazy(lazy)),
|
||||
Self::Const(c) => api::MemberKind::Const(c.api_return(ctx.sys()).await),
|
||||
Self::Const(c) => api::MemberKind::Const(c.serialize().await),
|
||||
Self::Mod { members } => api::MemberKind::Module(api::Module {
|
||||
members: stream(async |mut cx| {
|
||||
for m in members {
|
||||
@@ -203,24 +200,20 @@ impl MemKind {
|
||||
}
|
||||
|
||||
pub trait TreeIntoApiCtx {
|
||||
fn sys(&self) -> SysCtx;
|
||||
fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId;
|
||||
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx;
|
||||
}
|
||||
|
||||
pub struct TreeIntoApiCtxImpl<'a, 'b> {
|
||||
pub sys: SysCtx,
|
||||
pub basepath: &'a [Tok<String>],
|
||||
pub path: Substack<'a, Tok<String>>,
|
||||
pub lazy_members: &'b mut HashMap<api::TreeId, MemberRecord>,
|
||||
}
|
||||
|
||||
impl TreeIntoApiCtx for TreeIntoApiCtxImpl<'_, '_> {
|
||||
fn sys(&self) -> SysCtx { self.sys.clone() }
|
||||
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx {
|
||||
TreeIntoApiCtxImpl {
|
||||
lazy_members: self.lazy_members,
|
||||
sys: self.sys.clone(),
|
||||
basepath: self.basepath,
|
||||
path: self.path.push(seg),
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ use orchid_base::tree::AtomRepr;
|
||||
|
||||
use crate::api;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::expr::{Expr, ExprParseCtx, PathSetBuilder};
|
||||
use crate::expr::{Expr, PathSetBuilder};
|
||||
use crate::extension::Extension;
|
||||
use crate::system::System;
|
||||
|
||||
@@ -58,15 +58,15 @@ impl AtomHand {
|
||||
#[must_use]
|
||||
pub async fn call(self, arg: Expr) -> Expr {
|
||||
let owner_sys = self.0.owner.clone();
|
||||
let ctx = owner_sys.ctx();
|
||||
let reqnot = owner_sys.reqnot();
|
||||
owner_sys.ext().exprs().give_expr(arg.clone());
|
||||
ctx.exprs.give_expr(arg.clone());
|
||||
let ret = match Rc::try_unwrap(self.0) {
|
||||
Ok(data) => reqnot.request(api::FinalCall(data.api(), arg.id())).await,
|
||||
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), arg.id())).await,
|
||||
};
|
||||
let mut parse_ctx = ExprParseCtx { ctx: owner_sys.ctx(), exprs: owner_sys.ext().exprs() };
|
||||
let val = Expr::from_api(&ret, PathSetBuilder::new(), &mut parse_ctx).await;
|
||||
owner_sys.ext().exprs().take_expr(arg.id());
|
||||
let val = Expr::from_api(&ret, PathSetBuilder::new(), ctx.clone()).await;
|
||||
ctx.exprs.take_expr(arg.id());
|
||||
val
|
||||
}
|
||||
#[must_use]
|
||||
|
||||
@@ -18,7 +18,7 @@ pub struct CtxData {
|
||||
pub spawn: Spawner,
|
||||
pub systems: RwLock<HashMap<api::SysId, WeakSystem>>,
|
||||
pub system_id: RefCell<NonZeroU16>,
|
||||
pub common_exprs: ExprStore,
|
||||
pub exprs: ExprStore,
|
||||
pub root: RwLock<WeakRoot>,
|
||||
}
|
||||
#[derive(Clone)]
|
||||
@@ -43,7 +43,7 @@ impl Ctx {
|
||||
i: Interner::default(),
|
||||
systems: RwLock::default(),
|
||||
system_id: RefCell::new(NonZero::new(1).unwrap()),
|
||||
common_exprs: ExprStore::default(),
|
||||
exprs: ExprStore::default(),
|
||||
root: RwLock::default(),
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -21,12 +21,6 @@ use crate::atom::AtomHand;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::expr_store::ExprStore;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ExprParseCtx<'a> {
|
||||
pub ctx: &'a Ctx,
|
||||
pub exprs: &'a ExprStore,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExprData {
|
||||
pos: Pos,
|
||||
@@ -61,42 +55,34 @@ impl Expr {
|
||||
)
|
||||
}
|
||||
#[must_use]
|
||||
pub async fn from_api(
|
||||
api: &api::Expression,
|
||||
psb: PathSetBuilder<'_, u64>,
|
||||
ctx: &mut ExprParseCtx<'_>,
|
||||
) -> Self {
|
||||
let pos = Pos::from_api(&api.location, &ctx.ctx.i).await;
|
||||
pub async fn from_api(api: &api::Expression, psb: PathSetBuilder<'_, u64>, ctx: Ctx) -> Self {
|
||||
let pos = Pos::from_api(&api.location, &ctx.i).await;
|
||||
let kind = match &api.kind {
|
||||
api::ExpressionKind::Arg(n) => {
|
||||
assert!(psb.register_arg(n), "Arguments must be enclosed in a matching lambda");
|
||||
ExprKind::Arg
|
||||
},
|
||||
api::ExpressionKind::Bottom(bot) =>
|
||||
ExprKind::Bottom(OrcErrv::from_api(bot, &ctx.ctx.i).await),
|
||||
api::ExpressionKind::Bottom(bot) => ExprKind::Bottom(OrcErrv::from_api(bot, &ctx.i).await),
|
||||
api::ExpressionKind::Call(f, x) => {
|
||||
let (lpsb, rpsb) = psb.split();
|
||||
ExprKind::Call(
|
||||
Expr::from_api(f, lpsb, ctx).boxed_local().await,
|
||||
Expr::from_api(f, lpsb, ctx.clone()).boxed_local().await,
|
||||
Expr::from_api(x, rpsb, ctx).boxed_local().await,
|
||||
)
|
||||
},
|
||||
api::ExpressionKind::Const(name) => ExprKind::Const(Sym::from_api(*name, &ctx.ctx.i).await),
|
||||
api::ExpressionKind::Const(name) => ExprKind::Const(Sym::from_api(*name, &ctx.i).await),
|
||||
api::ExpressionKind::Lambda(x, body) => {
|
||||
let lbuilder = psb.lambda(x);
|
||||
let body = Expr::from_api(body, lbuilder.stack(), ctx).boxed_local().await;
|
||||
ExprKind::Lambda(lbuilder.collect(), body)
|
||||
},
|
||||
api::ExpressionKind::NewAtom(a) =>
|
||||
ExprKind::Atom(AtomHand::from_api(a, pos.clone(), &mut ctx.ctx.clone()).await),
|
||||
api::ExpressionKind::Slot { tk, by_value: false } =>
|
||||
return ctx.exprs.get_expr(*tk).expect("Invalid slot"),
|
||||
api::ExpressionKind::Slot { tk, by_value: true } =>
|
||||
return ctx.exprs.take_expr(*tk).expect("Invalid slot"),
|
||||
ExprKind::Atom(AtomHand::from_api(a, pos.clone(), &mut ctx.clone()).await),
|
||||
api::ExpressionKind::Slot(tk) => return ctx.exprs.take_expr(*tk).expect("Invalid slot"),
|
||||
api::ExpressionKind::Seq(a, b) => {
|
||||
let (apsb, bpsb) = psb.split();
|
||||
ExprKind::Seq(
|
||||
Expr::from_api(a, apsb, ctx).boxed_local().await,
|
||||
Expr::from_api(a, apsb, ctx.clone()).boxed_local().await,
|
||||
Expr::from_api(b, bpsb, ctx).boxed_local().await,
|
||||
)
|
||||
},
|
||||
@@ -169,8 +155,8 @@ async fn print_exprkind<'a>(
|
||||
ExprKind::Bottom(e) if e.len() == 1 => format!("Bottom({e})").into(),
|
||||
ExprKind::Bottom(e) => format!("Bottom(\n\t{}\n)", indent(&e.to_string())).into(),
|
||||
ExprKind::Call(f, x) => tl_cache!(Rc<Variants>: Rc::new(Variants::default()
|
||||
.unbounded("{0} {1l}")
|
||||
.bounded("({0} {1b})")))
|
||||
.unbounded("{0b} {1l}")
|
||||
.bounded("({0b} {1})")))
|
||||
.units([print_expr(f, c, visited).await, print_expr(x, c, visited).await]),
|
||||
ExprKind::Identity(id) =>
|
||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{{{0}}}"))).units([print_expr(
|
||||
@@ -180,11 +166,11 @@ async fn print_exprkind<'a>(
|
||||
.await]),
|
||||
ExprKind::Const(c) => format!("{c}").into(),
|
||||
ExprKind::Lambda(None, body) => tl_cache!(Rc<Variants>: Rc::new(Variants::default()
|
||||
.unbounded("\\.{0l}")
|
||||
// .unbounded("\\.{0l}")
|
||||
.bounded("(\\.{0b})")))
|
||||
.units([print_expr(body, c, visited).await]),
|
||||
ExprKind::Lambda(Some(path), body) => tl_cache!(Rc<Variants>: Rc::new(Variants::default()
|
||||
.unbounded("\\{0b}. {1l}")
|
||||
// .unbounded("\\{0b}. {1l}")
|
||||
.bounded("(\\{0b}. {1b})")))
|
||||
.units([format!("{path}").into(), print_expr(body, c, visited).await]),
|
||||
ExprKind::Seq(l, r) =>
|
||||
@@ -361,14 +347,14 @@ impl TokenVariant<api::ExprTicket> for Expr {
|
||||
pub struct ExprWillPanic;
|
||||
|
||||
impl TokenVariant<api::Expression> for Expr {
|
||||
type FromApiCtx<'a> = ExprParseCtx<'a>;
|
||||
type FromApiCtx<'a> = Ctx;
|
||||
async fn from_api(
|
||||
api: &api::Expression,
|
||||
ctx: &mut Self::FromApiCtx<'_>,
|
||||
_: SrcRange,
|
||||
_: &Interner,
|
||||
) -> Self {
|
||||
Self::from_api(api, PathSetBuilder::new(), ctx).await
|
||||
Self::from_api(api, PathSetBuilder::new(), ctx.clone()).await
|
||||
}
|
||||
type ToApiCtx<'a> = ExprWillPanic;
|
||||
async fn into_api(self, ExprWillPanic: &mut Self::ToApiCtx<'_>) -> api::Expression {
|
||||
|
||||
@@ -13,7 +13,6 @@ use crate::expr::Expr;
|
||||
pub struct ExprStoreData {
|
||||
exprs: RefCell<HashMap<api::ExprTicket, (u32, Expr)>>,
|
||||
parent: Option<ExprStore>,
|
||||
tracking_parent: bool,
|
||||
}
|
||||
#[derive(Clone, Default)]
|
||||
pub struct ExprStore(Rc<ExprStoreData>);
|
||||
@@ -25,16 +24,12 @@ impl ExprStore {
|
||||
/// but operations on the parent can access the child exprs too until this
|
||||
/// store is dropped.
|
||||
#[must_use]
|
||||
pub fn derive(&self, tracking_parent: bool) -> Self {
|
||||
Self(Rc::new(ExprStoreData {
|
||||
exprs: RefCell::default(),
|
||||
parent: Some(self.clone()),
|
||||
tracking_parent,
|
||||
}))
|
||||
pub fn derive(&self) -> Self {
|
||||
Self(Rc::new(ExprStoreData { exprs: RefCell::default(), parent: Some(self.clone()) }))
|
||||
}
|
||||
pub fn give_expr(&self, expr: Expr) {
|
||||
if self.0.tracking_parent {
|
||||
self.0.parent.as_ref().unwrap().give_expr(expr.clone());
|
||||
if let Some(parent) = self.0.parent.as_ref() {
|
||||
parent.give_expr(expr.clone())
|
||||
}
|
||||
match self.0.exprs.borrow_mut().entry(expr.id()) {
|
||||
Entry::Occupied(mut oe) => oe.get_mut().0 += 1,
|
||||
@@ -44,8 +39,8 @@ impl ExprStore {
|
||||
}
|
||||
}
|
||||
pub fn take_expr(&self, ticket: api::ExprTicket) -> Option<Expr> {
|
||||
if self.0.tracking_parent {
|
||||
self.0.parent.as_ref().unwrap().take_expr(ticket);
|
||||
if let Some(parent) = self.0.parent.as_ref() {
|
||||
parent.take_expr(ticket);
|
||||
}
|
||||
match self.0.exprs.borrow_mut().entry(ticket) {
|
||||
Entry::Vacant(_) => panic!("Attempted to double-take expression"),
|
||||
@@ -79,13 +74,11 @@ impl Drop for ExprStore {
|
||||
if 1 < Rc::strong_count(&self.0) {
|
||||
return;
|
||||
}
|
||||
if !self.0.tracking_parent {
|
||||
return;
|
||||
}
|
||||
let parent = self.0.parent.as_ref().unwrap();
|
||||
for (id, (count, _)) in self.0.exprs.borrow().iter() {
|
||||
for _ in 0..*count {
|
||||
parent.take_expr(*id);
|
||||
if let Some(parent) = self.0.parent.as_ref() {
|
||||
for (id, (count, _)) in self.0.exprs.borrow().iter() {
|
||||
for _ in 0..*count {
|
||||
parent.take_expr(*id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,8 +28,7 @@ use crate::api;
|
||||
use crate::atom::AtomHand;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::dealias::{ChildError, ChildErrorKind, walk};
|
||||
use crate::expr::ExprKind;
|
||||
use crate::expr_store::ExprStore;
|
||||
use crate::expr::{Expr, PathSetBuilder};
|
||||
use crate::system::SystemCtor;
|
||||
use crate::tree::MemberKind;
|
||||
|
||||
@@ -47,7 +46,6 @@ pub struct ExtensionData {
|
||||
systems: Vec<SystemCtor>,
|
||||
logger: Logger,
|
||||
next_pars: RefCell<NonZeroU64>,
|
||||
exprs: ExprStore,
|
||||
exiting_snd: Sender<()>,
|
||||
lex_recur: Mutex<HashMap<api::ParsId, Sender<ReqPair<api::SubLex>>>>,
|
||||
}
|
||||
@@ -92,7 +90,6 @@ impl Extension {
|
||||
ExtensionData {
|
||||
name: init.name.clone(),
|
||||
exiting_snd,
|
||||
exprs: ctx.common_exprs.derive(false),
|
||||
ctx: ctx.clone(),
|
||||
systems: (init.systems.iter().cloned())
|
||||
.map(|decl| SystemCtor { decl, ext: WeakExtension(weak.clone()) })
|
||||
@@ -111,25 +108,15 @@ impl Extension {
|
||||
}
|
||||
match notif {
|
||||
api::ExtHostNotif::ExprNotif(api::ExprNotif::Acquire(acq)) => {
|
||||
let target = this.0.exprs.get_expr(acq.1).expect("Invalid ticket");
|
||||
this.0.exprs.give_expr(target)
|
||||
let target = this.0.ctx.exprs.get_expr(acq.1).expect("Invalid ticket");
|
||||
this.0.ctx.exprs.give_expr(target)
|
||||
}
|
||||
api::ExtHostNotif::ExprNotif(api::ExprNotif::Release(rel)) => {
|
||||
if this.is_own_sys(rel.0).await {
|
||||
this.0.exprs.take_expr(rel.1);
|
||||
this.0.ctx.exprs.take_expr(rel.1);
|
||||
} else {
|
||||
writeln!(this.reqnot().logger(), "Not our system {:?}", rel.0)
|
||||
}
|
||||
}
|
||||
api::ExtHostNotif::ExprNotif(api::ExprNotif::Move(mov)) => {
|
||||
if !this.is_own_sys(mov.dec).await {
|
||||
writeln!(this.reqnot().logger(), "Not our system {:?}", mov.dec);
|
||||
return;
|
||||
}
|
||||
let recp = this.ctx().system_inst(mov.inc).await.expect("invallid recipient sys id");
|
||||
let expr = this.0.exprs.get_expr(mov.expr).expect("invalid ticket");
|
||||
recp.ext().0.exprs.give_expr(expr);
|
||||
this.0.exprs.take_expr(mov.expr);
|
||||
},
|
||||
api::ExtHostNotif::Log(api::Log(str)) => this.logger().log(str),
|
||||
}
|
||||
@@ -180,17 +167,23 @@ impl Extension {
|
||||
}
|
||||
hand.handle(&sl, &rep_out.next().await.unwrap()).await
|
||||
},
|
||||
api::ExtHostReq::ExprReq(api::ExprReq::Inspect(
|
||||
ins @ api::Inspect { target },
|
||||
)) => {
|
||||
let expr = this.exprs().get_expr(target).expect("Invalid ticket");
|
||||
hand
|
||||
.handle(&ins, &api::Inspected {
|
||||
refcount: expr.strong_count() as u32,
|
||||
location: expr.pos().to_api(),
|
||||
kind: expr.to_api().await,
|
||||
})
|
||||
.await
|
||||
api::ExtHostReq::ExprReq(expr_req) => match expr_req {
|
||||
api::ExprReq::Inspect(ins @ api::Inspect { target }) => {
|
||||
let expr = ctx.exprs.get_expr(target).expect("Invalid ticket");
|
||||
hand
|
||||
.handle(&ins, &api::Inspected {
|
||||
refcount: expr.strong_count() as u32,
|
||||
location: expr.pos().to_api(),
|
||||
kind: expr.to_api().await,
|
||||
})
|
||||
.await
|
||||
},
|
||||
api::ExprReq::Create(ref cre @ api::Create(ref expr)) => {
|
||||
let expr = Expr::from_api(expr, PathSetBuilder::new(), ctx.clone()).await;
|
||||
let expr_id = expr.id();
|
||||
ctx.exprs.give_expr(expr);
|
||||
hand.handle(cre, &expr_id).await
|
||||
},
|
||||
},
|
||||
api::ExtHostReq::LsModule(ref ls @ api::LsModule(_sys, path)) => {
|
||||
let reply: <api::LsModule as Request>::Response = 'reply: {
|
||||
@@ -249,13 +242,6 @@ impl Extension {
|
||||
let unit = atom.print(&FmtCtxImpl { i: &this.ctx().i }).await;
|
||||
hand.handle(eap, &unit.to_api()).await
|
||||
},
|
||||
api::ExtHostReq::CreateAtom(ref create @ api::CreateAtom(ref atom, target)) => {
|
||||
let atom = AtomHand::from_api(atom, Pos::None, &mut ctx.clone()).await;
|
||||
let target = ctx.system_inst(target).await.expect("Invalid recipient for atom");
|
||||
let expr = ExprKind::Atom(atom).at(Pos::None);
|
||||
target.ext().exprs().give_expr(expr.clone());
|
||||
hand.handle(create, &expr.id()).await
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -273,8 +259,6 @@ impl Extension {
|
||||
pub fn logger(&self) -> &Logger { &self.0.logger }
|
||||
pub fn system_ctors(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() }
|
||||
#[must_use]
|
||||
pub fn exprs(&self) -> &ExprStore { &self.0.exprs }
|
||||
#[must_use]
|
||||
pub async fn is_own_sys(&self, id: api::SysId) -> bool {
|
||||
let Some(sys) = self.ctx().system_inst(id).await else {
|
||||
writeln!(self.logger(), "Invalid system ID {id:?}");
|
||||
|
||||
@@ -13,7 +13,7 @@ use orchid_base::tree::recur;
|
||||
|
||||
use crate::api;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::expr::{Expr, ExprParseCtx};
|
||||
use crate::expr::Expr;
|
||||
use crate::expr_store::ExprStore;
|
||||
use crate::parsed::{ParsTok, ParsTokTree, tt_to_api};
|
||||
use crate::system::System;
|
||||
@@ -60,14 +60,7 @@ impl<'a> LexCtx<'a> {
|
||||
}
|
||||
#[must_use]
|
||||
pub async fn des_subtree(&mut self, tree: &api::TokenTree, exprs: ExprStore) -> ParsTokTree {
|
||||
ParsTokTree::from_api(
|
||||
tree,
|
||||
&mut { exprs },
|
||||
&mut ExprParseCtx { ctx: self.ctx, exprs: &self.ctx.common_exprs },
|
||||
self.path,
|
||||
&self.ctx.i,
|
||||
)
|
||||
.await
|
||||
ParsTokTree::from_api(tree, &mut { exprs }, &mut self.ctx.clone(), self.path, &self.ctx.i).await
|
||||
}
|
||||
#[must_use]
|
||||
pub fn strip_char(&mut self, tgt: char) -> bool {
|
||||
@@ -146,9 +139,9 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
||||
let mut errors = Vec::new();
|
||||
if ctx.tail.starts_with(|c| sys.can_lex(c)) {
|
||||
let (source, pos, path) = (ctx.source.clone(), ctx.get_pos(), ctx.path.clone());
|
||||
let temp_store = ctx.ctx.exprs.derive();
|
||||
let ctx_lck = &Mutex::new(&mut *ctx);
|
||||
let errors_lck = &Mutex::new(&mut errors);
|
||||
let temp_store = sys.ext().exprs().derive(true);
|
||||
let temp_store_cb = temp_store.clone();
|
||||
let lx = sys
|
||||
.lex(source, path, pos, |pos| {
|
||||
|
||||
@@ -185,7 +185,7 @@ impl Tree for ParsedModule {
|
||||
impl Format for ParsedModule {
|
||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
let head_str = format!("export ::({})\n", self.exports.iter().join(", "));
|
||||
Variants::sequence(self.items.len() + 1, "\n", None).units(
|
||||
Variants::default().sequence(self.items.len() + 1, "", "\n", "", None).units_own(
|
||||
[head_str.into()].into_iter().chain(join_all(self.items.iter().map(|i| i.print(c))).await),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ use orchid_base::tree::ttv_from_api;
|
||||
use substack::Substack;
|
||||
|
||||
use crate::api;
|
||||
use crate::expr::ExprParseCtx;
|
||||
use crate::expr_store::ExprStore;
|
||||
use crate::parse::HostParseCtx;
|
||||
use crate::parsed::{
|
||||
@@ -35,7 +34,7 @@ impl Parser {
|
||||
comments: Vec<Comment>,
|
||||
callback: &mut impl AsyncFnMut(ModPath<'_>, Vec<ParsTokTree>) -> OrcRes<Vec<Item>>,
|
||||
) -> OrcRes<Vec<Item>> {
|
||||
let mut temp_store = self.system.ext().exprs().derive(true);
|
||||
let mut temp_store = self.system.ctx().exprs.derive();
|
||||
let src_path = line.first().expect("cannot be empty").sr.path();
|
||||
let line =
|
||||
join_all((line.into_iter()).map(|t| async { tt_to_api(&mut temp_store.clone(), t).await }))
|
||||
@@ -57,7 +56,6 @@ impl Parser {
|
||||
i: self.system.i(),
|
||||
mod_path: &mod_path,
|
||||
ext_exprs: &mut temp_store,
|
||||
pctx: &mut ExprParseCtx { ctx: self.system.ctx(), exprs: self.system.ext().exprs() },
|
||||
src_path: &src_path,
|
||||
sys: &self.system,
|
||||
})
|
||||
@@ -73,7 +71,6 @@ struct ConvCtx<'a> {
|
||||
src_path: &'a Sym,
|
||||
i: &'a Interner,
|
||||
ext_exprs: &'a mut ExprStore,
|
||||
pctx: &'a mut ExprParseCtx<'a>,
|
||||
}
|
||||
async fn conv(
|
||||
parsed_v: Vec<api::ParsedLine>,
|
||||
@@ -87,7 +84,8 @@ async fn conv(
|
||||
api::ParsedLineKind::Member(api::ParsedMember { name, exported, kind }) =>
|
||||
(name, exported, kind),
|
||||
api::ParsedLineKind::Recursive(rec) => {
|
||||
let tokens = ttv_from_api(rec, ctx.ext_exprs, ctx.pctx, ctx.src_path, ctx.i).await;
|
||||
let tokens =
|
||||
ttv_from_api(rec, ctx.ext_exprs, &mut ctx.sys.ctx().clone(), ctx.src_path, ctx.i).await;
|
||||
items.extend(callback(module.clone(), tokens).await?);
|
||||
continue;
|
||||
},
|
||||
|
||||
@@ -22,7 +22,7 @@ use orchid_base::reqnot::Requester;
|
||||
use crate::api;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::dealias::{ChildErrorKind, Tree, absolute_path, resolv_glob, walk};
|
||||
use crate::expr::{Expr, ExprParseCtx, PathSetBuilder};
|
||||
use crate::expr::{Expr, PathSetBuilder};
|
||||
use crate::parsed::{ItemKind, ParsedMemberKind, ParsedModule};
|
||||
use crate::system::System;
|
||||
|
||||
@@ -90,8 +90,7 @@ impl Root {
|
||||
for (path, (sys_id, pc_id)) in deferred_consts {
|
||||
let sys = this.ctx.system_inst(sys_id).await.expect("System dropped since parsing");
|
||||
let api_expr = sys.reqnot().request(api::FetchParsedConst(sys.id(), pc_id)).await;
|
||||
let mut xp_ctx = ExprParseCtx { ctx: &this.ctx, exprs: sys.ext().exprs() };
|
||||
let expr = Expr::from_api(&api_expr, PathSetBuilder::new(), &mut xp_ctx).await;
|
||||
let expr = Expr::from_api(&api_expr, PathSetBuilder::new(), this.ctx.clone()).await;
|
||||
new.0.write().await.consts.insert(path, expr);
|
||||
}
|
||||
new
|
||||
@@ -178,8 +177,7 @@ impl Module {
|
||||
api::MemberKind::Lazy(id) =>
|
||||
(Some(LazyMemberHandle { id, sys: ctx.sys.id(), path: name.clone() }), None),
|
||||
api::MemberKind::Const(val) => {
|
||||
let mut expr_ctx = ExprParseCtx { ctx: ctx.sys.ctx(), exprs: ctx.sys.ext().exprs() };
|
||||
let expr = Expr::from_api(&val, PathSetBuilder::new(), &mut expr_ctx).await;
|
||||
let expr = Expr::from_api(&val, PathSetBuilder::new(), ctx.sys.ctx().clone()).await;
|
||||
ctx.consts.insert(name.clone(), expr);
|
||||
(None, Some(MemberKind::Const))
|
||||
},
|
||||
@@ -463,8 +461,7 @@ impl LazyMemberHandle {
|
||||
let sys = ctx.system_inst(self.sys).await.expect("Missing system for lazy member");
|
||||
match sys.get_tree(self.id).await {
|
||||
api::MemberKind::Const(c) => {
|
||||
let mut pctx = ExprParseCtx { ctx: &ctx, exprs: sys.ext().exprs() };
|
||||
let expr = Expr::from_api(&c, PathSetBuilder::new(), &mut pctx).await;
|
||||
let expr = Expr::from_api(&c, PathSetBuilder::new(), ctx.clone()).await;
|
||||
let (.., path) = self.destructure();
|
||||
consts.insert(path, expr);
|
||||
MemberKind::Const
|
||||
|
||||
@@ -19,7 +19,9 @@ orchid-extension = { version = "0.1.0", path = "../orchid-extension", features =
|
||||
"tokio",
|
||||
] }
|
||||
ordered-float = "5.0.0"
|
||||
pastey = "0.1.1"
|
||||
rust_decimal = "1.38.0"
|
||||
subslice-offset = "0.1.1"
|
||||
substack = "1.1.1"
|
||||
tokio = { version = "1.47.1", features = ["full"] }
|
||||
|
||||
|
||||
@@ -2,8 +2,11 @@ mod macros;
|
||||
mod std;
|
||||
|
||||
pub use std::number::num_atom::{Float, HomoArray, Int, Num};
|
||||
pub use std::option::OrcOpt;
|
||||
pub use std::reflection::sym_atom::{SymAtom, sym_expr};
|
||||
pub use std::std_system::StdSystem;
|
||||
pub use std::string::str_atom::OrcString;
|
||||
pub use std::tuple::{HomoTpl, Tpl, Tuple, UntypedTuple};
|
||||
|
||||
pub use macros::macro_system::MacroSystem;
|
||||
pub use macros::mactree::{MacTok, MacTree};
|
||||
|
||||
@@ -4,12 +4,13 @@ use never::Never;
|
||||
use orchid_base::format::fmt;
|
||||
use orchid_extension::atom::{Atomic, TAtom};
|
||||
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::ToExpr;
|
||||
use orchid_extension::coroutine_exec::exec;
|
||||
use orchid_extension::expr::Expr;
|
||||
use orchid_extension::gen_expr::GExpr;
|
||||
|
||||
use crate::macros::mactree::{MacTok, MacTree, map_mactree};
|
||||
use crate::macros::mactree::{MacTok, MacTree};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct InstantiateTplCall {
|
||||
@@ -35,24 +36,24 @@ impl OwnedAtom for InstantiateTplCall {
|
||||
self.clone().call(arg).await
|
||||
}
|
||||
async fn call(mut self, arg: Expr) -> GExpr {
|
||||
exec("macros::instantiate_tpl", async move |mut h| {
|
||||
exec(async move |mut h| {
|
||||
match h.exec::<TAtom<MacTree>>(arg.clone()).await {
|
||||
Err(_) => panic!("Expected a macro param, found {}", fmt(&arg, arg.ctx().i()).await),
|
||||
Ok(t) => self.argv.push(own(t).await),
|
||||
Err(_) => panic!("Expected a macro param, found {}", fmt(&arg, &i()).await),
|
||||
Ok(t) => self.argv.push(own(&t).await),
|
||||
};
|
||||
if self.argv.len() < self.argc {
|
||||
return self.to_expr().await;
|
||||
return self.to_gen().await;
|
||||
}
|
||||
let mut args = self.argv.into_iter();
|
||||
let ret = map_mactree(&self.tpl, &mut false, &mut |mt| match mt.tok() {
|
||||
let ret = self.tpl.map(&mut false, &mut |mt| match mt.tok() {
|
||||
MacTok::Slot => Some(args.next().expect("Not enough arguments to fill all slots")),
|
||||
_ => None,
|
||||
});
|
||||
assert!(args.next().is_none(), "Too many arguments for all slots");
|
||||
ret.to_expr().await
|
||||
ret.to_gen().await
|
||||
})
|
||||
.await
|
||||
.to_expr()
|
||||
.to_gen()
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,11 +11,12 @@ use orchid_base::parse::{
|
||||
use orchid_base::sym;
|
||||
use orchid_base::tree::Paren;
|
||||
use orchid_extension::atom::TAtom;
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::TryFromExpr;
|
||||
use orchid_extension::gen_expr::{atom, call, sym_ref};
|
||||
use orchid_extension::parser::{ConstCtx, PSnippet, PTok, PTokTree, ParsCtx, ParsedLine, Parser};
|
||||
|
||||
use crate::macros::mactree::{MacTok, MacTree, glossary_v, map_mactree_v};
|
||||
use crate::macros::mactree::{MacTok, MacTree, MacTreeSeq};
|
||||
use crate::macros::ph_lexer::PhAtom;
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -40,21 +41,18 @@ impl Parser for LetLine {
|
||||
let aliased = parse_tokv(tail, &ctx).await;
|
||||
Ok(vec![ParsedLine::cnst(&line.sr(), &comments, exported, name, async move |ctx| {
|
||||
let rep = Reporter::new();
|
||||
let dealiased = dealias_mac_v(aliased, &ctx, &rep).await;
|
||||
let macro_input = MacTok::S(Paren::Round, dealiased).at(sr.pos());
|
||||
let macro_input =
|
||||
MacTok::S(Paren::Round, dealias_mac_v(&aliased, &ctx, &rep).await).at(sr.pos());
|
||||
if let Some(e) = rep.errv() {
|
||||
return Err(e);
|
||||
}
|
||||
Ok(call(sym_ref(sym!(macros::lower; ctx.i()).await), [call(
|
||||
sym_ref(sym!(macros::resolve; ctx.i()).await),
|
||||
[atom(macro_input)],
|
||||
)]))
|
||||
Ok(call(sym_ref(sym!(macros::resolve; i())), [atom(macro_input)]))
|
||||
})])
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn dealias_mac_v(aliased: Vec<MacTree>, ctx: &ConstCtx, rep: &Reporter) -> Vec<MacTree> {
|
||||
let keys = glossary_v(&aliased).collect_vec();
|
||||
pub async fn dealias_mac_v(aliased: &MacTreeSeq, ctx: &ConstCtx, rep: &Reporter) -> MacTreeSeq {
|
||||
let keys = aliased.glossary().iter().cloned().collect_vec();
|
||||
let mut names: HashMap<_, _> = HashMap::new();
|
||||
let mut stream = pin!(ctx.names(&keys).zip(stream::iter(&keys)));
|
||||
while let Some((canonical, local)) = stream.next().await {
|
||||
@@ -65,13 +63,13 @@ pub async fn dealias_mac_v(aliased: Vec<MacTree>, ctx: &ConstCtx, rep: &Reporter
|
||||
},
|
||||
}
|
||||
}
|
||||
map_mactree_v(&aliased, &mut false, &mut |tree| match &*tree.tok {
|
||||
aliased.map(&mut false, &mut |tree| match &*tree.tok {
|
||||
MacTok::Name(n) => names.get(n).map(|new_n| MacTok::Name(new_n.clone()).at(tree.pos())),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn parse_tokv(line: PSnippet<'_>, ctx: &impl ParseCtx) -> Vec<MacTree> {
|
||||
pub async fn parse_tokv(line: PSnippet<'_>, ctx: &impl ParseCtx) -> MacTreeSeq {
|
||||
if let Some((idx, arg)) = line.iter().enumerate().find_map(|(i, x)| Some((i, x.as_lambda()?))) {
|
||||
let (head, lambda) = line.split_at(idx as u32);
|
||||
let (_, body) = lambda.pop_front().unwrap();
|
||||
@@ -86,14 +84,14 @@ pub async fn parse_tokv(line: PSnippet<'_>, ctx: &impl ParseCtx) -> Vec<MacTree>
|
||||
.await,
|
||||
),
|
||||
};
|
||||
all
|
||||
MacTreeSeq::new(all)
|
||||
} else {
|
||||
parse_tokv_no_lambdas(&line, ctx).await
|
||||
MacTreeSeq::new(parse_tokv_no_lambdas(&line, ctx).await)
|
||||
}
|
||||
}
|
||||
|
||||
async fn parse_tokv_no_lambdas(line: &[PTokTree], ctx: &impl ParseCtx) -> Vec<MacTree> {
|
||||
stream::iter(line).filter_map(|tt| parse_tok(tt, ctx)).collect().await
|
||||
stream::iter(line).filter_map(|tt| parse_tok(tt, ctx)).collect::<Vec<_>>().await
|
||||
}
|
||||
|
||||
pub async fn parse_tok(tree: &PTokTree, ctx: &impl ParseCtx) -> Option<MacTree> {
|
||||
@@ -115,7 +113,7 @@ pub async fn parse_tok(tree: &PTokTree, ctx: &impl ParseCtx) -> Option<MacTree>
|
||||
},
|
||||
PTok::Handle(expr) => match TAtom::<PhAtom>::try_from_expr(expr.clone()).await {
|
||||
Err(_) => MacTok::Value(expr.clone()),
|
||||
Ok(ta) => MacTok::Ph(ta.value.to_full(ta.ctx()).await),
|
||||
Ok(ta) => MacTok::Ph(ta.value.to_full().await),
|
||||
},
|
||||
PTok::NewExpr(never) => match *never {},
|
||||
PTok::LambdaHead(_) => panic!("Lambda-head handled in the sequence parser"),
|
||||
|
||||
@@ -1,97 +1,65 @@
|
||||
use hashbrown::HashMap;
|
||||
use itertools::{Itertools, chain};
|
||||
use orchid_base::error::Reporter;
|
||||
use orchid_base::{clone, sym};
|
||||
use orchid_base::sym;
|
||||
use orchid_extension::atom::TAtom;
|
||||
use orchid_extension::atom_owned::own;
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::ToExpr;
|
||||
use orchid_extension::coroutine_exec::exec;
|
||||
use orchid_extension::func_atom::Lambda;
|
||||
use orchid_extension::gen_expr::{call, sym_ref};
|
||||
use orchid_extension::reflection::{ReflMemKind, refl};
|
||||
use orchid_extension::tree::{GenMember, MemKind, fun, lazy, prefix};
|
||||
use substack::Substack;
|
||||
use orchid_extension::tree::{GenMember, fun, prefix};
|
||||
|
||||
use crate::MacTok;
|
||||
use crate::macros::macro_value::{Macro, Matcher};
|
||||
use crate::macros::mactree::{LowerCtx, MacTree, Ph};
|
||||
use crate::macros::resolve::{ResolveCtx, resolve};
|
||||
use crate::macros::utils::{mactree, mactreev, mk_macro};
|
||||
use crate::macros::mactree::MacTree;
|
||||
use crate::macros::resolve::resolve;
|
||||
use crate::macros::utils::{build_macro, mactree, mactreev};
|
||||
|
||||
pub fn gen_macro_lib() -> Vec<GenMember> {
|
||||
pub async fn gen_macro_lib() -> Vec<GenMember> {
|
||||
prefix("macros", [
|
||||
fun(true, "lower", |tpl: TAtom<MacTree>| async move {
|
||||
let ctx = LowerCtx { sys: tpl.untyped.ctx().clone(), rep: &Reporter::new() };
|
||||
let res = own(tpl).await.lower(ctx, Substack::Bottom).await;
|
||||
if let Some(e) = Reporter::new().errv() { Err(e) } else { Ok(res) }
|
||||
}),
|
||||
fun(true, "recur", async |tpl: TAtom<MacTree>| {
|
||||
call(sym_ref(sym!(macros::lower; tpl.i()).await), [call(
|
||||
sym_ref(sym!(macros::resolve; tpl.i()).await),
|
||||
[tpl.to_expr().await],
|
||||
)])
|
||||
}),
|
||||
fun(true, "resolve", |tpl: TAtom<MacTree>| async move {
|
||||
exec("macros::resolve", async move |mut h| {
|
||||
let ctx = tpl.ctx().clone();
|
||||
let root = refl(&ctx);
|
||||
let tpl = own(tpl.clone()).await;
|
||||
let mut macros = HashMap::new();
|
||||
for n in tpl.glossary() {
|
||||
if let Ok(ReflMemKind::Const) = root.get_by_path(n).await.map(|m| m.kind()) {
|
||||
let Ok(mac) = h.exec::<TAtom<Macro>>(sym_ref(n.clone())).await else { continue };
|
||||
let mac = own(mac).await;
|
||||
macros.entry(mac.canonical_name(&ctx).await).or_insert(mac);
|
||||
}
|
||||
}
|
||||
let mut named = HashMap::new();
|
||||
let mut priod = Vec::new();
|
||||
for (_, mac) in macros.iter() {
|
||||
for rule in mac.0.rules.iter() {
|
||||
if rule.glossary.is_subset(tpl.glossary()) {
|
||||
match &rule.pattern {
|
||||
Matcher::Named(m) =>
|
||||
named.entry(m.head()).or_insert(Vec::new()).push((m, mac, rule)),
|
||||
Matcher::Priod(p) => priod.push((mac.0.prio, (p, mac, rule))),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let priod = priod.into_iter().sorted_unstable_by_key(|(p, _)| *p).map(|(_, r)| r).collect();
|
||||
let mut rctx = ResolveCtx { h, ctx: ctx.clone(), named, priod };
|
||||
let resolve_res = resolve(&mut rctx, &tpl).await;
|
||||
std::mem::drop(rctx);
|
||||
match resolve_res {
|
||||
Some(out_tree) => out_tree.to_expr().await,
|
||||
None => tpl.to_expr().await,
|
||||
}
|
||||
})
|
||||
.await
|
||||
}),
|
||||
fun(true, "resolve", async |tpl: TAtom<MacTree>| resolve(own(&tpl).await).await),
|
||||
// TODO test whether any of this worked
|
||||
lazy(true, "common", async |_, ctx| {
|
||||
let add_macro = {
|
||||
clone!(ctx);
|
||||
mk_macro(Some(1), ["+"], [(
|
||||
mactreev!(ctx.i(); "...$" lhs 0 macros::common::+ "...$" rhs 1),
|
||||
Lambda::new("std::number::add", async move |lhs: TAtom<MacTree>, rhs: TAtom<MacTree>| {
|
||||
mactree!(ctx.i(); std::number::add
|
||||
(macros::recur "'" lhs.ex();)
|
||||
(macros::recur "'" rhs.ex();)
|
||||
)
|
||||
}),
|
||||
)])
|
||||
};
|
||||
let mul_macro = mk_macro(Some(2), ["*"], [(
|
||||
mactreev!(ctx.i(); "...$" lhs 0 macros::common::* "...$" rhs 1),
|
||||
Lambda::new("std::number::mul", async |lhs: TAtom<MacTree>, rhs: TAtom<MacTree>| {
|
||||
mactree!(lhs.ctx().i(); std::number::mul
|
||||
(macros::recur "'" lhs.ex();)
|
||||
(macros::recur "'" rhs.ex();)
|
||||
)
|
||||
}),
|
||||
)]);
|
||||
MemKind::Mod { members: chain!(add_macro, mul_macro).collect_vec() }
|
||||
}),
|
||||
prefix("common", [
|
||||
build_macro(None, ["..", "_"]).finish(),
|
||||
build_macro(Some(1), ["+"])
|
||||
.rule(mactreev!("...$" lhs 0 macros::common::+ "...$" rhs 1), [async |[lhs, rhs]| {
|
||||
call(sym_ref(sym!(std::number::add; i())), [resolve(lhs).await, resolve(rhs).await])
|
||||
}])
|
||||
.finish(),
|
||||
build_macro(Some(2), ["*"])
|
||||
.rule(mactreev!("...$" lhs 0 macros::common::* "...$" rhs 1), [async |[lhs, rhs]| {
|
||||
call(sym_ref(sym!(std::number::mul; i())), [resolve(lhs).await, resolve(rhs).await])
|
||||
}])
|
||||
.finish(),
|
||||
build_macro(None, ["comma_list", ","])
|
||||
.rule(
|
||||
mactreev!(macros::common::comma_list ( "...$" head 0 macros::common::, "...$" tail 1)),
|
||||
[async |[head, tail]| {
|
||||
call(sym_ref(sym!(std::tuple::cat; i())), [
|
||||
call(sym_ref(sym!(std::tuple::one; i())), [head.to_gen().await]),
|
||||
resolve(mactree!(macros::common::comma_list "push" tail ;)).await,
|
||||
])
|
||||
}],
|
||||
)
|
||||
.rule(mactreev!(macros::common::comma_list ( "...$" final_tail 0 )), [async |[tail]| {
|
||||
call(sym_ref(sym!(std::tuple::one; i())), [tail.to_gen().await])
|
||||
}])
|
||||
.rule(mactreev!(macros::common::comma_list()), [async |[]| {
|
||||
sym_ref(sym!(std::tuple::empty; i()))
|
||||
}])
|
||||
.finish(),
|
||||
build_macro(None, ["semi_list", ";"])
|
||||
.rule(
|
||||
mactreev!(macros::common::semi_list ( "...$" head 0 macros::common::; "...$" tail 1)),
|
||||
[async |[head, tail]| {
|
||||
call(sym_ref(sym!(std::tuple::cat; i())), [
|
||||
call(sym_ref(sym!(std::tuple::one; i())), [resolve(head).await]),
|
||||
resolve(mactree!(macros::common::semi_list "push" tail ;)).await,
|
||||
])
|
||||
}],
|
||||
)
|
||||
.rule(mactreev!(macros::common::semi_list ( "...$" final_tail 0 )), [async |[tail]| {
|
||||
call(sym_ref(sym!(std::tuple::one; i())), [resolve(tail).await])
|
||||
}])
|
||||
.rule(mactreev!(macros::common::semi_list()), [async |[]| {
|
||||
sym_ref(sym!(std::tuple::empty; i()))
|
||||
}])
|
||||
.finish(),
|
||||
]),
|
||||
])
|
||||
}
|
||||
|
||||
@@ -12,14 +12,15 @@ use orchid_base::parse::{
|
||||
use orchid_base::tree::{Paren, Token};
|
||||
use orchid_base::{clone, sym};
|
||||
use orchid_extension::atom::TAtom;
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::{ToExpr, TryFromExpr};
|
||||
use orchid_extension::gen_expr::{atom, call, sym_ref};
|
||||
use orchid_extension::parser::{PSnippet, ParsCtx, ParsedLine, Parser};
|
||||
|
||||
use crate::macros::let_line::{dealias_mac_v, parse_tokv};
|
||||
use crate::macros::macro_value::{Macro, MacroData, Matcher, Rule};
|
||||
use crate::macros::mactree::{glossary_v, map_mactree_v};
|
||||
use crate::macros::rule::matcher::{NamedMatcher, PriodMatcher};
|
||||
use crate::macros::macro_value::{Macro, MacroData, Rule};
|
||||
use crate::macros::mactree::MacTreeSeq;
|
||||
use crate::macros::rule::matcher::Matcher;
|
||||
use crate::{Int, MacTok};
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -114,7 +115,7 @@ impl Parser for MacroLine {
|
||||
};
|
||||
let pattern = parse_tokv(pattern, &ctx).await;
|
||||
let mut placeholders = Vec::new();
|
||||
map_mactree_v(&pattern, &mut false, &mut |tok| {
|
||||
pattern.map(&mut false, &mut |tok| {
|
||||
if let MacTok::Ph(ph) = tok.tok() {
|
||||
placeholders.push((ph.clone(), tok.pos()))
|
||||
}
|
||||
@@ -123,23 +124,21 @@ impl Parser for MacroLine {
|
||||
let mut body_mactree = parse_tokv(body, &ctx).await;
|
||||
for (ph, ph_pos) in placeholders.iter().rev() {
|
||||
let name = ctx.module().suffix([ph.name.clone()], ctx.i()).await;
|
||||
body_mactree = vec![
|
||||
MacTok::Lambda(MacTok::Name(name).at(ph_pos.clone()), body_mactree).at(ph_pos.clone()),
|
||||
]
|
||||
body_mactree =
|
||||
MacTreeSeq::new([
|
||||
MacTok::Lambda(MacTok::Name(name).at(ph_pos.clone()), body_mactree).at(ph_pos.clone())
|
||||
])
|
||||
}
|
||||
let body_sr = body.sr();
|
||||
rules.push((name.clone(), placeholders, pattern));
|
||||
lines.push(ParsedLine::cnst(&sr, &line.output, true, name, async move |ctx| {
|
||||
let rep = Reporter::new();
|
||||
let body = dealias_mac_v(body_mactree, &ctx, &rep).await;
|
||||
let body = dealias_mac_v(&body_mactree, &ctx, &rep).await;
|
||||
let macro_input = MacTok::S(Paren::Round, body).at(body_sr.pos());
|
||||
if let Some(e) = rep.errv() {
|
||||
return Err(e);
|
||||
}
|
||||
Ok(call(sym_ref(sym!(macros::lower; ctx.i()).await), [call(
|
||||
sym_ref(sym!(macros::resolve; ctx.i()).await),
|
||||
[macro_input.to_expr().await],
|
||||
)]))
|
||||
Ok(call(sym_ref(sym!(macros::resolve; i())), [macro_input.to_gen().await]))
|
||||
}))
|
||||
}
|
||||
let mac_cell = Rc::new(OnceCell::new());
|
||||
@@ -152,20 +151,15 @@ impl Parser for MacroLine {
|
||||
let rep = Reporter::new();
|
||||
let rules = rules.borrow_mut().take().expect("once cell initializer runs");
|
||||
let rules = stream::iter(rules)
|
||||
.then(|(body_name, placeholders, pattern_macv)| {
|
||||
.then(|(body_name, placeholders, pattern_rel)| {
|
||||
let cctx = &cctx;
|
||||
let rep = &rep;
|
||||
let prio = &prio;
|
||||
async move {
|
||||
let pattern_abs = dealias_mac_v(pattern_macv, cctx, rep).await;
|
||||
let glossary = glossary_v(&pattern_abs).collect();
|
||||
let pattern_res = match prio {
|
||||
None => NamedMatcher::new(&pattern_abs, cctx.i()).await.map(Matcher::Named),
|
||||
Some(_) => PriodMatcher::new(&pattern_abs, cctx.i()).await.map(Matcher::Priod),
|
||||
};
|
||||
let pattern = dealias_mac_v(&pattern_rel, cctx, rep).await;
|
||||
let pattern_res = Matcher::new(pattern.clone()).await;
|
||||
let placeholders = placeholders.into_iter().map(|(ph, _)| ph.name).collect_vec();
|
||||
match pattern_res {
|
||||
Ok(pattern) => Some(Rule { body_name, pattern, glossary, placeholders }),
|
||||
Ok(matcher) => Some(Rule { body_name, matcher, pattern, placeholders }),
|
||||
Err(e) => {
|
||||
rep.report(e);
|
||||
None
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
use orchid_base::interner::Interner;
|
||||
use never::Never;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::reqnot::Receipt;
|
||||
use orchid_base::sym;
|
||||
use orchid_extension::atom::{AtomDynfo, AtomicFeatures};
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::entrypoint::ExtReq;
|
||||
use orchid_extension::lexer::LexerObj;
|
||||
use orchid_extension::other_system::SystemHandle;
|
||||
use orchid_extension::parser::ParserObj;
|
||||
use orchid_extension::system::{System, SystemCard};
|
||||
use orchid_extension::system_ctor::SystemCtor;
|
||||
use orchid_extension::tree::GenMember;
|
||||
use orchid_extension::tree::{GenMember, merge_trivial};
|
||||
|
||||
use crate::macros::instantiate_tpl::InstantiateTplCall;
|
||||
use crate::macros::let_line::LetLine;
|
||||
@@ -17,8 +18,10 @@ use crate::macros::macro_lib::gen_macro_lib;
|
||||
use crate::macros::macro_line::MacroLine;
|
||||
use crate::macros::macro_value::Macro;
|
||||
use crate::macros::mactree_lexer::MacTreeLexer;
|
||||
use crate::macros::match_macros::gen_match_macro_lib;
|
||||
use crate::macros::ph_lexer::{PhAtom, PhLexer};
|
||||
use crate::macros::requests::MacroReq;
|
||||
use crate::macros::std_macros::gen_std_macro_lib;
|
||||
use crate::macros::utils::MacroBodyArgCollector;
|
||||
use crate::{MacTree, StdSystem};
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -32,26 +35,30 @@ impl SystemCtor for MacroSystem {
|
||||
}
|
||||
impl SystemCard for MacroSystem {
|
||||
type Ctor = Self;
|
||||
type Req = MacroReq;
|
||||
type Req = Never;
|
||||
fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>> {
|
||||
[
|
||||
Some(InstantiateTplCall::dynfo()),
|
||||
Some(MacTree::dynfo()),
|
||||
Some(Macro::dynfo()),
|
||||
Some(PhAtom::dynfo()),
|
||||
Some(MacroBodyArgCollector::dynfo()),
|
||||
]
|
||||
}
|
||||
}
|
||||
impl System for MacroSystem {
|
||||
async fn request(_: ExtReq<'_>, req: Self::Req) -> Receipt<'_> { todo!("Handle {req:?}") }
|
||||
async fn prelude(i: &Interner) -> Vec<Sym> {
|
||||
async fn request(_: ExtReq<'_>, req: Never) -> Receipt<'_> { match req {} }
|
||||
async fn prelude() -> Vec<Sym> {
|
||||
vec![
|
||||
sym!(macros::resolve; i).await,
|
||||
sym!(macros::common::+; i).await,
|
||||
sym!(macros::common::*; i).await,
|
||||
sym!(macros::common::+; i()),
|
||||
sym!(macros::common::*; i()),
|
||||
sym!(macros::common::,; i()),
|
||||
sym!(std::tuple::t; i()),
|
||||
]
|
||||
}
|
||||
fn lexers() -> Vec<LexerObj> { vec![&MacTreeLexer, &PhLexer] }
|
||||
fn parsers() -> Vec<ParserObj> { vec![&LetLine, &MacroLine] }
|
||||
fn env() -> Vec<GenMember> { gen_macro_lib() }
|
||||
async fn env() -> Vec<GenMember> {
|
||||
merge_trivial([gen_macro_lib().await, gen_std_macro_lib().await, gen_match_macro_lib().await])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
use std::borrow::Cow;
|
||||
use std::rc::Rc;
|
||||
|
||||
use hashbrown::HashSet;
|
||||
use never::Never;
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_extension::atom::Atomic;
|
||||
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
|
||||
use orchid_extension::system::SysCtx;
|
||||
use orchid_extension::context::i;
|
||||
|
||||
use crate::macros::rule::matcher::{NamedMatcher, PriodMatcher};
|
||||
use crate::macros::mactree::MacTreeSeq;
|
||||
use crate::macros::rule::matcher::Matcher;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MacroData {
|
||||
@@ -21,23 +21,18 @@ pub struct MacroData {
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Macro(pub Rc<MacroData>);
|
||||
impl Macro {
|
||||
pub async fn canonical_name(&self, ctx: &SysCtx) -> Sym {
|
||||
self.0.module.suffix([self.0.rules[0].body_name.clone()], ctx.i()).await
|
||||
pub async fn canonical_name(&self) -> Sym {
|
||||
self.0.module.suffix([self.0.rules[0].body_name.clone()], &i()).await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Rule {
|
||||
pub pattern: Matcher,
|
||||
pub glossary: HashSet<Sym>,
|
||||
pub pattern: MacTreeSeq,
|
||||
pub matcher: Matcher,
|
||||
pub placeholders: Vec<Tok<String>>,
|
||||
pub body_name: Tok<String>,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum Matcher {
|
||||
Named(NamedMatcher),
|
||||
Priod(PriodMatcher),
|
||||
}
|
||||
impl Atomic for Macro {
|
||||
type Data = ();
|
||||
type Variant = OwnedVariant;
|
||||
|
||||
@@ -5,10 +5,9 @@ use std::rc::Rc;
|
||||
use futures::FutureExt;
|
||||
use futures::future::join_all;
|
||||
use hashbrown::HashSet;
|
||||
use itertools::Itertools;
|
||||
use orchid_api_derive::Coding;
|
||||
use orchid_base::error::{OrcErrv, Reporter, mk_errv};
|
||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants, fmt};
|
||||
use orchid_base::error::OrcErrv;
|
||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::Sym;
|
||||
@@ -16,16 +15,89 @@ use orchid_base::tl_cache;
|
||||
use orchid_base::tree::{Paren, indent};
|
||||
use orchid_extension::atom::Atomic;
|
||||
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
|
||||
use orchid_extension::conv::ToExpr;
|
||||
use orchid_extension::expr::Expr;
|
||||
use orchid_extension::gen_expr::{GExpr, arg, bot, call, lambda, sym_ref};
|
||||
use orchid_extension::system::SysCtx;
|
||||
use substack::Substack;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct LowerCtx<'a> {
|
||||
pub sys: SysCtx,
|
||||
pub rep: &'a Reporter,
|
||||
fn union_rc_sets(seq: impl IntoIterator<Item = Rc<HashSet<Sym>>>) -> Rc<HashSet<Sym>> {
|
||||
let mut acc = Rc::<HashSet<Sym>>::default();
|
||||
for right in seq {
|
||||
if acc.is_empty() {
|
||||
acc = right;
|
||||
continue;
|
||||
}
|
||||
if right.is_empty() {
|
||||
continue;
|
||||
}
|
||||
acc = match (Rc::try_unwrap(acc), Rc::try_unwrap(right)) {
|
||||
(Ok(mut left), Ok(right)) => {
|
||||
left.extend(right);
|
||||
Rc::new(left)
|
||||
},
|
||||
(Ok(mut owned), Err(borrowed)) | (Err(borrowed), Ok(mut owned)) => {
|
||||
owned.extend(borrowed.iter().cloned());
|
||||
Rc::new(owned)
|
||||
},
|
||||
(Err(left), Err(right)) => Rc::new(left.union(&right).cloned().collect()),
|
||||
}
|
||||
}
|
||||
acc
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MacTreeSeq {
|
||||
pub items: Rc<Vec<MacTree>>,
|
||||
pub top_glossary: Rc<HashSet<Sym>>,
|
||||
pub glossary: Rc<HashSet<Sym>>,
|
||||
}
|
||||
impl MacTreeSeq {
|
||||
pub fn new(i: impl IntoIterator<Item = MacTree>) -> Self {
|
||||
let mut items = Vec::new();
|
||||
let mut top_glossary = HashSet::new();
|
||||
let mut glossary = HashSet::new();
|
||||
for item in i {
|
||||
glossary.extend(item.glossary().iter().cloned());
|
||||
if let MacTok::Name(n) = item.tok() {
|
||||
top_glossary.insert(n.clone());
|
||||
}
|
||||
items.push(item);
|
||||
}
|
||||
Self { items: Rc::new(items), top_glossary: Rc::new(top_glossary), glossary: Rc::new(glossary) }
|
||||
}
|
||||
pub fn map<F: FnMut(MacTree) -> Option<MacTree>>(&self, changed: &mut bool, map: &mut F) -> Self {
|
||||
Self::new(self.items.iter().map(|tree| ro(changed, |changed| tree.map(changed, map))))
|
||||
}
|
||||
pub fn glossary(&self) -> &HashSet<Sym> { &self.glossary }
|
||||
pub fn concat(self, other: Self) -> Self {
|
||||
if self.items.is_empty() {
|
||||
return other;
|
||||
} else if other.items.is_empty() {
|
||||
return self;
|
||||
}
|
||||
let items = match (Rc::try_unwrap(self.items), Rc::try_unwrap(other.items)) {
|
||||
(Ok(mut left), Ok(mut right)) => {
|
||||
left.append(&mut right);
|
||||
left
|
||||
},
|
||||
(Ok(mut left), Err(right)) => {
|
||||
left.extend_from_slice(&right[..]);
|
||||
left
|
||||
},
|
||||
(Err(left), Ok(mut right)) => {
|
||||
right.splice(0..0, left.iter().cloned());
|
||||
right
|
||||
},
|
||||
(Err(left), Err(right)) => left.iter().chain(&right[..]).cloned().collect(),
|
||||
};
|
||||
Self {
|
||||
items: Rc::new(items),
|
||||
top_glossary: union_rc_sets([self.top_glossary, other.top_glossary]),
|
||||
glossary: union_rc_sets([self.glossary, other.glossary]),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Format for MacTreeSeq {
|
||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
mtreev_fmt(&self.items[..], c).await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -38,66 +110,21 @@ impl MacTree {
|
||||
pub fn tok(&self) -> &MacTok { &self.tok }
|
||||
pub fn pos(&self) -> Pos { self.pos.clone() }
|
||||
pub fn glossary(&self) -> &HashSet<Sym> { &self.glossary }
|
||||
pub async fn lower(&self, ctx: LowerCtx<'_>, args: Substack<'_, Sym>) -> GExpr {
|
||||
let expr = match self.tok() {
|
||||
MacTok::Bottom(e) => bot(e.clone()),
|
||||
MacTok::Lambda(arg, body) => {
|
||||
let MacTok::Name(name) = &*arg.tok else {
|
||||
return bot(mk_errv(
|
||||
ctx.sys.i().i("Syntax error after macros").await,
|
||||
"This token ends up as a binding, consider replacing it with a name",
|
||||
[arg.pos()],
|
||||
));
|
||||
};
|
||||
let arg_pos = args.len() as u64;
|
||||
let args = args.push(name.clone());
|
||||
let body = match &body[..] {
|
||||
[] => bot(mk_errv(
|
||||
ctx.sys.i().i("Empty lambda body").await,
|
||||
"Lambdas must evaluate to an expression",
|
||||
[self.pos()],
|
||||
)),
|
||||
[f, argv @ ..] => call(
|
||||
f.lower(ctx.clone(), args.clone()).boxed_local().await,
|
||||
lower_v(argv, ctx, args).await,
|
||||
),
|
||||
};
|
||||
lambda(arg_pos, body)
|
||||
pub fn map<F: FnMut(Self) -> Option<Self>>(&self, changed: &mut bool, map: &mut F) -> Self {
|
||||
let tok = match map(self.clone()) {
|
||||
Some(new_tok) => {
|
||||
*changed = true;
|
||||
return new_tok;
|
||||
},
|
||||
MacTok::Name(name) => match args.iter().enumerate().find(|(_, n)| *n == name) {
|
||||
None => sym_ref(name.clone()),
|
||||
Some((i, _)) => arg((args.len() - i - 1) as u64),
|
||||
None => match &*self.tok {
|
||||
MacTok::Lambda(arg, body) =>
|
||||
MacTok::Lambda(ro(changed, |changed| arg.map(changed, map)), body.map(changed, map)),
|
||||
MacTok::Name(_) | MacTok::Value(_) => return self.clone(),
|
||||
MacTok::Slot | MacTok::Ph(_) | MacTok::Bottom(_) => return self.clone(),
|
||||
MacTok::S(p, body) => MacTok::S(*p, body.map(changed, map)),
|
||||
},
|
||||
MacTok::Ph(ph) => {
|
||||
return bot(mk_errv(
|
||||
ctx.sys.i().i("Placeholder in value").await,
|
||||
format!("Placeholder {ph} is only supported in macro patterns"),
|
||||
[self.pos()],
|
||||
));
|
||||
},
|
||||
MacTok::S(Paren::Round, body) => match &body[..] {
|
||||
[fun, argv @ ..] => call(
|
||||
fun.lower(ctx.clone(), args.clone()).boxed_local().await,
|
||||
lower_v(argv, ctx, args).await,
|
||||
),
|
||||
[] =>
|
||||
return bot(mk_errv(
|
||||
ctx.sys.i().i("Empty ()").await,
|
||||
"Empty () is not a meaningful expression",
|
||||
[self.pos()],
|
||||
)),
|
||||
},
|
||||
MacTok::S(..) => {
|
||||
return bot(mk_errv(
|
||||
ctx.sys.i().i("[] or {} after macros").await,
|
||||
format!("{} didn't match any macro", fmt(self, ctx.sys.i()).await),
|
||||
[self.pos()],
|
||||
));
|
||||
},
|
||||
MacTok::Slot => panic!("Uninstantiated template should never be exposed"),
|
||||
MacTok::Value(v) => v.clone().to_expr().await,
|
||||
};
|
||||
expr.at(self.pos())
|
||||
if *changed { tok.at(self.pos()) } else { self.clone() }
|
||||
}
|
||||
}
|
||||
impl Atomic for MacTree {
|
||||
@@ -119,35 +146,31 @@ impl Format for MacTree {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn lower_v(v: &[MacTree], ctx: LowerCtx<'_>, args: Substack<'_, Sym>) -> Vec<GExpr> {
|
||||
join_all(v.iter().map(|t| t.lower(ctx.clone(), args.clone())).collect::<Vec<_>>()).await
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum MacTok {
|
||||
S(Paren, Vec<MacTree>),
|
||||
S(Paren, MacTreeSeq),
|
||||
Name(Sym),
|
||||
/// Only permitted in arguments to `instantiate_tpl`
|
||||
Slot,
|
||||
Value(Expr),
|
||||
Lambda(MacTree, Vec<MacTree>),
|
||||
Lambda(MacTree, MacTreeSeq),
|
||||
/// Only permitted in "pattern" values produced by macro blocks, which are
|
||||
/// never accessed as variables by usercode
|
||||
Ph(Ph),
|
||||
Bottom(OrcErrv),
|
||||
}
|
||||
impl MacTok {
|
||||
pub fn build_glossary(&self) -> HashSet<Sym> {
|
||||
pub fn build_glossary(&self) -> Rc<HashSet<Sym>> {
|
||||
match self {
|
||||
MacTok::Bottom(_) | MacTok::Ph(_) | MacTok::Slot | MacTok::Value(_) => HashSet::new(),
|
||||
MacTok::Name(sym) => HashSet::from([sym.clone()]),
|
||||
MacTok::S(_, body) => body.iter().flat_map(|mt| &*mt.glossary).cloned().collect(),
|
||||
MacTok::Bottom(_) | MacTok::Ph(_) | MacTok::Slot | MacTok::Value(_) => Rc::default(),
|
||||
MacTok::Name(sym) => Rc::new(HashSet::from([sym.clone()])),
|
||||
MacTok::S(_, body) => union_rc_sets(body.items.iter().map(|mt| mt.glossary.clone())),
|
||||
MacTok::Lambda(arg, body) =>
|
||||
body.iter().chain([arg]).flat_map(|mt| &*mt.glossary).cloned().collect(),
|
||||
union_rc_sets(body.items.iter().chain([arg]).map(|mt| mt.glossary.clone())),
|
||||
}
|
||||
}
|
||||
pub fn at(self, pos: impl Into<Pos>) -> MacTree {
|
||||
MacTree { pos: pos.into(), glossary: Rc::new(self.build_glossary()), tok: Rc::new(self) }
|
||||
MacTree { pos: pos.into(), glossary: self.build_glossary(), tok: Rc::new(self) }
|
||||
}
|
||||
}
|
||||
impl Format for MacTok {
|
||||
@@ -157,7 +180,7 @@ impl Format for MacTok {
|
||||
Self::Lambda(arg, b) => tl_cache!(Rc<Variants>: Rc::new(Variants::default()
|
||||
.unbounded("\\{0} {1l}")
|
||||
.bounded("(\\{0} {1b})")))
|
||||
.units([arg.print(c).boxed_local().await, mtreev_fmt(b, c).await]),
|
||||
.units([arg.print(c).boxed_local().await, b.print(c).await]),
|
||||
Self::Name(n) => format!("{n}").into(),
|
||||
Self::Ph(ph) => format!("{ph}").into(),
|
||||
Self::S(p, body) => match *p {
|
||||
@@ -165,7 +188,7 @@ impl Format for MacTok {
|
||||
Paren::Curly => tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{{0b}}"))),
|
||||
Paren::Square => tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("[{0b}]"))),
|
||||
}
|
||||
.units([mtreev_fmt(body, c).await]),
|
||||
.units([body.print(c).await]),
|
||||
Self::Slot => "$SLOT".into(),
|
||||
Self::Bottom(err) if err.len() == 1 => format!("Bottom({}) ", err.one().unwrap()).into(),
|
||||
Self::Bottom(err) => format!("Botttom(\n{}) ", indent(&err.to_string())).into(),
|
||||
@@ -177,7 +200,7 @@ pub async fn mtreev_fmt<'b>(
|
||||
v: impl IntoIterator<Item = &'b MacTree>,
|
||||
c: &(impl FmtCtx + ?Sized),
|
||||
) -> FmtUnit {
|
||||
FmtUnit::sequence(" ", None, join_all(v.into_iter().map(|t| t.print(c))).await)
|
||||
FmtUnit::sequence("", " ", "", None, join_all(v.into_iter().map(|t| t.print(c))).await)
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
@@ -203,36 +226,6 @@ pub enum PhKind {
|
||||
Vector { at_least_one: bool, priority: u8 },
|
||||
}
|
||||
|
||||
pub fn map_mactree<F: FnMut(MacTree) -> Option<MacTree>>(
|
||||
src: &MacTree,
|
||||
changed: &mut bool,
|
||||
map: &mut F,
|
||||
) -> MacTree {
|
||||
let tok = match map(src.clone()) {
|
||||
Some(new_tok) => {
|
||||
*changed = true;
|
||||
return new_tok;
|
||||
},
|
||||
None => match &*src.tok {
|
||||
MacTok::Lambda(arg, body) => MacTok::Lambda(
|
||||
ro(changed, |changed| map_mactree(arg, changed, map)),
|
||||
map_mactree_v(body, changed, map),
|
||||
),
|
||||
MacTok::Name(_) | MacTok::Value(_) => return src.clone(),
|
||||
MacTok::Slot | MacTok::Ph(_) | MacTok::Bottom(_) => return src.clone(),
|
||||
MacTok::S(p, body) => MacTok::S(*p, map_mactree_v(body, changed, map)),
|
||||
},
|
||||
};
|
||||
if *changed { tok.at(src.pos()) } else { src.clone() }
|
||||
}
|
||||
pub fn map_mactree_v<F: FnMut(MacTree) -> Option<MacTree>>(
|
||||
src: &[MacTree],
|
||||
changed: &mut bool,
|
||||
map: &mut F,
|
||||
) -> Vec<MacTree> {
|
||||
src.iter().map(|tree| ro(changed, |changed| map_mactree(tree, changed, map))).collect_vec()
|
||||
}
|
||||
|
||||
/// reverse "or". Inside, the flag is always false, but raising it will raise
|
||||
/// the outside flag too.
|
||||
fn ro<T>(flag: &mut bool, cb: impl FnOnce(&mut bool) -> T) -> T {
|
||||
@@ -241,7 +234,3 @@ fn ro<T>(flag: &mut bool, cb: impl FnOnce(&mut bool) -> T) -> T {
|
||||
*flag |= new_flag;
|
||||
val
|
||||
}
|
||||
|
||||
pub fn glossary_v(src: &[MacTree]) -> impl Iterator<Item = Sym> {
|
||||
src.iter().flat_map(|mt| mt.glossary()).cloned()
|
||||
}
|
||||
|
||||
@@ -12,21 +12,21 @@ use orchid_extension::tree::{GenTok, GenTokTree, x_tok};
|
||||
|
||||
use crate::macros::instantiate_tpl::InstantiateTplCall;
|
||||
use crate::macros::let_line::parse_tok;
|
||||
use crate::macros::mactree::{MacTok, MacTree};
|
||||
use crate::macros::mactree::{MacTok, MacTree, MacTreeSeq};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct MacTreeLexer;
|
||||
impl Lexer for MacTreeLexer {
|
||||
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['\''..='\''];
|
||||
async fn lex<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
||||
async fn lex<'a>(tail: &'a str, lctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
||||
let Some(tail2) = tail.strip_prefix('\'') else {
|
||||
return Err(err_not_applicable(ctx.i()).await);
|
||||
return Err(err_not_applicable().await);
|
||||
};
|
||||
let tail3 = tail2.trim_start();
|
||||
let mut args = Vec::new();
|
||||
return match mac_tree(tail3, &mut args, ctx).await {
|
||||
return match mac_tree(tail3, &mut args, lctx).await {
|
||||
Ok((tail4, mactree)) => {
|
||||
let range = ctx.pos_tt(tail, tail4);
|
||||
let range = lctx.pos_tt(tail, tail4);
|
||||
let tok = match &args[..] {
|
||||
[] => x_tok(mactree).await,
|
||||
_ => {
|
||||
@@ -38,7 +38,7 @@ impl Lexer for MacTreeLexer {
|
||||
};
|
||||
Ok((tail4, tok.at(range)))
|
||||
},
|
||||
Err(e) => Ok((tail2, GenTok::Bottom(e).at(ctx.pos_lt(1, tail2)))),
|
||||
Err(e) => Ok((tail2, GenTok::Bottom(e).at(lctx.pos_lt(1, tail2)))),
|
||||
};
|
||||
async fn mac_tree<'a>(
|
||||
tail: &'a str,
|
||||
@@ -51,7 +51,8 @@ impl Lexer for MacTreeLexer {
|
||||
return loop {
|
||||
let tail2 = body_tail.trim_start();
|
||||
if let Some(tail3) = tail2.strip_prefix(*rp) {
|
||||
break Ok((tail3, MacTok::S(*paren, items).at(ctx.pos_tt(tail, tail3).pos())));
|
||||
let tok = MacTok::S(*paren, MacTreeSeq::new(items));
|
||||
break Ok((tail3, tok.at(ctx.pos_tt(tail, tail3).pos())));
|
||||
} else if tail2.is_empty() {
|
||||
return Err(mk_errv(
|
||||
ctx.i().i("Unclosed block").await,
|
||||
@@ -83,7 +84,7 @@ impl Lexer for MacTreeLexer {
|
||||
body.push(body_tok);
|
||||
tail3 = tail5;
|
||||
}
|
||||
Ok((tail3, MacTok::Lambda(param, body).at(ctx.pos_tt(tail, tail3).pos())))
|
||||
Ok((tail3, MacTok::Lambda(param, MacTreeSeq::new(body)).at(ctx.pos_tt(tail, tail3).pos())))
|
||||
} else {
|
||||
let (tail2, sub) = ctx.recurse(tail).await?;
|
||||
let parsed = parse_tok(&sub, ctx).await.expect("Unexpected invalid token");
|
||||
|
||||
178
orchid-std/src/macros/match_macros.rs
Normal file
178
orchid-std/src/macros/match_macros.rs
Normal file
@@ -0,0 +1,178 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use async_fn_stream::stream;
|
||||
use futures::future::join_all;
|
||||
use futures::{Stream, StreamExt, stream};
|
||||
use never::Never;
|
||||
use orchid_api::ExprTicket;
|
||||
use orchid_api_derive::Coding;
|
||||
use orchid_base::error::{OrcRes, mk_errv};
|
||||
use orchid_base::format::fmt;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::sym;
|
||||
use orchid_extension::atom::{Atomic, TAtom};
|
||||
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::ToExpr;
|
||||
use orchid_extension::coroutine_exec::{ExecHandle, exec};
|
||||
use orchid_extension::expr::{Expr, ExprHandle};
|
||||
use orchid_extension::gen_expr::{GExpr, arg, bot, call, lambda, sym_ref};
|
||||
use orchid_extension::tree::{GenMember, fun, prefix};
|
||||
|
||||
use crate::macros::resolve::resolve;
|
||||
use crate::macros::utils::{build_macro, mactree, mactreev};
|
||||
use crate::std::reflection::sym_atom::SymAtom;
|
||||
use crate::std::tuple::Tuple;
|
||||
use crate::{HomoTpl, MacTok, MacTree, OrcOpt, Tpl, UntypedTuple, api};
|
||||
|
||||
#[derive(Clone, Coding)]
|
||||
pub struct MatcherData {
|
||||
keys: Vec<api::TStrv>,
|
||||
matcher: ExprTicket,
|
||||
}
|
||||
impl MatcherData {
|
||||
async fn matcher(&self) -> Expr { Expr::from_handle(ExprHandle::from_ticket(self.matcher).await) }
|
||||
pub async fn run_matcher(
|
||||
&self,
|
||||
h: &mut ExecHandle<'_>,
|
||||
val: impl ToExpr,
|
||||
) -> OrcRes<OrcOpt<HomoTpl<Expr>>> {
|
||||
h.exec::<OrcOpt<HomoTpl<Expr>>>(call(self.matcher().await.to_gen().await, [val.to_gen().await]))
|
||||
.await
|
||||
}
|
||||
pub fn keys(&self) -> impl Stream<Item = Sym> {
|
||||
stream(async |mut h| {
|
||||
for tk in &self.keys {
|
||||
h.emit(Sym::from_api(*tk, &i()).await).await
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
#[derive(Clone)]
|
||||
pub struct MatcherAtom {
|
||||
/// The names that subresults may be bound to
|
||||
pub(super) keys: Vec<Sym>,
|
||||
/// Takes the value-to-be-matched, returns an `option (tuple T1..TN)` of the
|
||||
/// subresults to be bound to the names returned by [Self::keys]
|
||||
pub(super) matcher: Expr,
|
||||
}
|
||||
impl Atomic for MatcherAtom {
|
||||
type Data = MatcherData;
|
||||
type Variant = OwnedVariant;
|
||||
}
|
||||
impl OwnedAtom for MatcherAtom {
|
||||
type Refs = Never;
|
||||
async fn val(&self) -> std::borrow::Cow<'_, Self::Data> {
|
||||
Cow::Owned(MatcherData {
|
||||
keys: self.keys.iter().map(|t| t.to_api()).collect(),
|
||||
matcher: self.matcher.handle().ticket(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn gen_match_macro_lib() -> Vec<GenMember> {
|
||||
prefix("pattern", [
|
||||
fun(
|
||||
true,
|
||||
"match_one",
|
||||
async |mat: TAtom<MatcherAtom>, value: Expr, then: Expr, default: Expr| {
|
||||
exec(async move |mut h| match mat.run_matcher(&mut h, value).await? {
|
||||
OrcOpt(Some(values)) =>
|
||||
Ok(call(then.to_gen().await, join_all(values.0.into_iter().map(|x| x.to_gen())).await)),
|
||||
OrcOpt(None) => Ok(default.to_gen().await),
|
||||
})
|
||||
.await
|
||||
},
|
||||
),
|
||||
fun(true, "matcher", async |names: HomoTpl<TAtom<SymAtom>>, matcher: Expr| MatcherAtom {
|
||||
keys: join_all(names.0.iter().map(async |atm| Sym::from_api(atm.0, &i()).await)).await,
|
||||
matcher,
|
||||
}),
|
||||
build_macro(None, ["match", "match_rule", "_row", "=>"])
|
||||
.rule(mactreev!("pattern::match" { "..$" rules 0 }), [async |[rules]| {
|
||||
exec(async move |mut h| {
|
||||
let rule_lines = h
|
||||
.exec::<TAtom<Tuple>>(call(sym_ref(sym!(macros::resolve; i())), [
|
||||
mactree!(macros::common::semi_list "push" rules.clone();).to_gen().await,
|
||||
]))
|
||||
.await?;
|
||||
let mut rule_atoms = Vec::<(TAtom<MatcherAtom>, Expr)>::new();
|
||||
for line_exprh in rule_lines.iter() {
|
||||
let line_mac = h
|
||||
.exec::<TAtom<MacTree>>(Expr::from_handle(ExprHandle::from_ticket(*line_exprh).await))
|
||||
.await?;
|
||||
let Tpl((matcher, body)) = h
|
||||
.exec(call(sym_ref(sym!(macros::resolve; i())), [
|
||||
mactree!(pattern::_row "push" own(&line_mac).await ;).to_gen().await,
|
||||
]))
|
||||
.await?;
|
||||
rule_atoms.push((matcher, body));
|
||||
}
|
||||
let base_case = lambda(0, [bot(mk_errv(
|
||||
i().i("No branches match").await,
|
||||
"None of the pattern provided matches this value",
|
||||
[rules.pos()],
|
||||
))]);
|
||||
let match_expr = stream::iter(rule_atoms.into_iter().rev())
|
||||
.fold(base_case, async |tail, (mat, body)| {
|
||||
lambda(0, [call(sym_ref(sym!(pattern::match_one; i())), [
|
||||
mat.to_gen().await,
|
||||
arg(0),
|
||||
body.to_gen().await,
|
||||
call(tail, [arg(0)]),
|
||||
])])
|
||||
})
|
||||
.await;
|
||||
Ok(match_expr)
|
||||
})
|
||||
.await
|
||||
}])
|
||||
.rule(mactreev!(pattern::match_rule (( "...$" pattern 0 ))), [async |[pattern]| {
|
||||
resolve(mactree!(pattern::match_rule "push" pattern; )).await
|
||||
}])
|
||||
.rule(mactreev!(pattern::_row ( "...$" pattern 0 pattern::=> "...$" value 1 )), [
|
||||
async |[pattern, mut value]| {
|
||||
exec(async move |mut h| -> OrcRes<Tpl<(TAtom<MatcherAtom>, GExpr)>> {
|
||||
let Ok(pat) = h
|
||||
.exec::<TAtom<MatcherAtom>>(call(sym_ref(sym!(macros::resolve; i())), [
|
||||
mactree!(pattern::match_rule "push" pattern.clone();).to_gen().await,
|
||||
]))
|
||||
.await
|
||||
else {
|
||||
return Err(mk_errv(
|
||||
i().i("Invalid pattern").await,
|
||||
format!("Could not parse {} as a match pattern", fmt(&pattern, &i()).await),
|
||||
[pattern.pos()],
|
||||
));
|
||||
};
|
||||
value = (pat.keys())
|
||||
.fold(value, async |value, name| mactree!("l_" name; ( "push" value ; )))
|
||||
.await;
|
||||
Ok(Tpl((pat, resolve(value).await)))
|
||||
})
|
||||
.await
|
||||
},
|
||||
])
|
||||
.finish(),
|
||||
fun(true, "ref_body", async |val| OrcOpt(Some(UntypedTuple(vec![val])))),
|
||||
build_macro(None, ["ref"])
|
||||
.rule(mactreev!(pattern::match_rule(pattern::ref "$" name)), [async |[name]| {
|
||||
let MacTok::Name(name) = name.tok() else {
|
||||
return Err(mk_errv(
|
||||
i().i("pattern 'ref' requires a name to bind to").await,
|
||||
format!(
|
||||
"'ref' was interpreted as a binding matcher, \
|
||||
but it was followed by {} instead of a name",
|
||||
fmt(&name, &i()).await
|
||||
),
|
||||
[name.pos()],
|
||||
));
|
||||
};
|
||||
Ok(MatcherAtom {
|
||||
keys: vec![name.clone()],
|
||||
matcher: sym_ref(sym!(pattern::ref_body; i())).to_expr().await,
|
||||
})
|
||||
}])
|
||||
.finish(),
|
||||
])
|
||||
}
|
||||
@@ -6,10 +6,11 @@ pub mod macro_system;
|
||||
mod macro_value;
|
||||
pub mod mactree;
|
||||
mod mactree_lexer;
|
||||
pub mod match_macros;
|
||||
mod ph_lexer;
|
||||
mod requests;
|
||||
mod resolve;
|
||||
mod rule;
|
||||
pub mod std_macros;
|
||||
mod utils;
|
||||
|
||||
use mactree::{MacTok, MacTree};
|
||||
|
||||
@@ -4,8 +4,8 @@ use orchid_base::format::FmtUnit;
|
||||
use orchid_base::parse::{name_char, name_start};
|
||||
use orchid_extension::atom::Atomic;
|
||||
use orchid_extension::atom_thin::{ThinAtom, ThinVariant};
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
|
||||
use orchid_extension::system::SysCtx;
|
||||
use orchid_extension::tree::{GenTokTree, x_tok};
|
||||
|
||||
use crate::macros::mactree::{Ph, PhKind};
|
||||
@@ -13,17 +13,15 @@ use crate::macros::mactree::{Ph, PhKind};
|
||||
#[derive(Clone, Coding)]
|
||||
pub struct PhAtom(orchid_api::TStr, PhKind);
|
||||
impl PhAtom {
|
||||
pub async fn to_full(&self, ctx: &SysCtx) -> Ph {
|
||||
Ph { kind: self.1, name: ctx.i().ex(self.0).await }
|
||||
}
|
||||
pub async fn to_full(&self) -> Ph { Ph { kind: self.1, name: i().ex(self.0).await } }
|
||||
}
|
||||
impl Atomic for PhAtom {
|
||||
type Data = Self;
|
||||
type Variant = ThinVariant;
|
||||
}
|
||||
impl ThinAtom for PhAtom {
|
||||
async fn print(&self, ctx: SysCtx) -> FmtUnit {
|
||||
Ph { name: ctx.i().ex(self.0).await, kind: self.1 }.to_string().into()
|
||||
async fn print(&self) -> FmtUnit {
|
||||
Ph { name: i().ex(self.0).await, kind: self.1 }.to_string().into()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,7 +52,7 @@ impl Lexer for PhLexer {
|
||||
(prio_num, tail)
|
||||
} else {
|
||||
return Err(mk_errv(
|
||||
ctx.ctx.i().i("Invalid priority, must be 0-255").await,
|
||||
i().i("Invalid priority, must be 0-255").await,
|
||||
format!("{prio} is not a valid placeholder priority"),
|
||||
[ctx.pos_lt(prio.len(), tail)],
|
||||
));
|
||||
@@ -70,10 +68,10 @@ impl Lexer for PhLexer {
|
||||
let (name, priority, tail) = name_and_prio(tail, ctx).await?;
|
||||
(tail, name, PhKind::Vector { at_least_one: true, priority })
|
||||
} else {
|
||||
return Err(err_not_applicable(ctx.ctx.i()).await);
|
||||
return Err(err_not_applicable().await);
|
||||
}
|
||||
};
|
||||
let ph_atom = PhAtom(ctx.ctx.i().i::<String>(name).await.to_api(), phkind);
|
||||
let ph_atom = PhAtom(i().i::<String>(name).await.to_api(), phkind);
|
||||
Ok((tail, x_tok(ph_atom).await.at(ctx.pos_tt(line, tail))))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
use orchid_api_derive::{Coding, Hierarchy};
|
||||
use orchid_api_traits::Request;
|
||||
|
||||
use crate::api;
|
||||
use crate::macros::mactree::PhKind;
|
||||
|
||||
/* TODO:
|
||||
Create handlers and wrappers for these, probably expose MacTree to other crates.
|
||||
Define new extension binary to test the request functionality.
|
||||
*/
|
||||
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extendable]
|
||||
pub enum MacroReq {
|
||||
CreateMacro(CreateMacro),
|
||||
CreateQuote(CreateQuote),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extends(MacroReq)]
|
||||
pub struct CreateMacro {
|
||||
pub module: api::TStrv,
|
||||
pub prio: Option<u64>,
|
||||
pub rules: Vec<CreateRule>,
|
||||
}
|
||||
impl Request for CreateMacro {
|
||||
type Response = api::ExprTicket;
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
pub struct CreateRule {
|
||||
pub pattern: Vec<api::TokenTree>,
|
||||
pub body_name: api::TStr,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extends(MacroReq)]
|
||||
pub struct CreateQuote(MsgMacTree);
|
||||
impl Request for CreateQuote {
|
||||
type Response = api::ExprTicket;
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
pub struct MsgMacTree {
|
||||
pub tok: MsgMacTok,
|
||||
pub location: api::Location,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
pub enum MsgMacTok {
|
||||
S(api::Paren, Vec<MsgMacTree>),
|
||||
Name(api::TStrv),
|
||||
/// Only permitted in arguments to `instantiate_tpl`
|
||||
Slot,
|
||||
Value(api::ExprTicket),
|
||||
Lambda(Box<MsgMacTree>, Vec<MsgMacTree>),
|
||||
/// Only permitted in "pattern" values produced by macro blocks, which are
|
||||
/// never accessed as variables by usercode
|
||||
Ph(MsgPh),
|
||||
Bottom(Vec<api::OrcError>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
pub struct MsgPh {
|
||||
kind: PhKind,
|
||||
name: api::TStr,
|
||||
}
|
||||
@@ -1,91 +1,275 @@
|
||||
use futures::FutureExt;
|
||||
use hashbrown::HashMap;
|
||||
use std::ops::{Add, Range};
|
||||
|
||||
use async_fn_stream::stream;
|
||||
use futures::{FutureExt, StreamExt};
|
||||
use hashbrown::{HashMap, HashSet};
|
||||
use itertools::Itertools;
|
||||
use orchid_base::error::mk_errv;
|
||||
use orchid_base::format::fmt;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::sym;
|
||||
use orchid_base::tree::Paren;
|
||||
use orchid_extension::atom::TAtom;
|
||||
use orchid_extension::atom_owned::own;
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::ToExpr;
|
||||
use orchid_extension::coroutine_exec::ExecHandle;
|
||||
use orchid_extension::gen_expr::{GExpr, call, sym_ref};
|
||||
use orchid_extension::system::SysCtx;
|
||||
use orchid_extension::coroutine_exec::{ExecHandle, exec};
|
||||
use orchid_extension::gen_expr::{GExpr, bot, call, lambda, sym_ref};
|
||||
use orchid_extension::reflection::{ReflMemKind, refl};
|
||||
use subslice_offset::SubsliceOffset;
|
||||
use substack::Substack;
|
||||
|
||||
use crate::macros::macro_value::{Macro, Rule};
|
||||
use crate::macros::rule::matcher::{NamedMatcher, PriodMatcher};
|
||||
use crate::macros::mactree::MacTreeSeq;
|
||||
use crate::macros::rule::state::{MatchState, StateEntry};
|
||||
use crate::{MacTok, MacTree};
|
||||
|
||||
pub struct ResolveCtx<'a> {
|
||||
pub ctx: SysCtx,
|
||||
pub h: ExecHandle<'a>,
|
||||
pub named: HashMap<Sym, Vec<(&'a NamedMatcher, &'a Macro, &'a Rule)>>,
|
||||
pub priod: Vec<(&'a PriodMatcher, &'a Macro, &'a Rule)>,
|
||||
pub async fn resolve(tpl: MacTree) -> GExpr {
|
||||
exec(async move |mut h| {
|
||||
let root = refl();
|
||||
let mut macros = HashMap::new();
|
||||
for n in tpl.glossary() {
|
||||
if let Ok(ReflMemKind::Const) = root.get_by_path(n).await.map(|m| m.kind()) {
|
||||
let Ok(mac) = h.exec::<TAtom<Macro>>(sym_ref(n.clone())).await else { continue };
|
||||
let mac = own(&mac).await;
|
||||
macros.entry(mac.canonical_name().await).or_insert(mac);
|
||||
}
|
||||
}
|
||||
let mut exclusive = Vec::new();
|
||||
let mut prios = Vec::<u64>::new();
|
||||
let mut priod = Vec::<FilteredMacroRecord>::new();
|
||||
for (_, mac) in macros.iter() {
|
||||
let mut record = FilteredMacroRecord { mac, rules: Vec::new() };
|
||||
for (rule_i, rule) in mac.0.rules.iter().enumerate() {
|
||||
if rule.pattern.glossary.is_subset(tpl.glossary()) {
|
||||
record.rules.push(rule_i);
|
||||
}
|
||||
}
|
||||
if !record.rules.is_empty() {
|
||||
match mac.0.prio {
|
||||
None => exclusive.push(record),
|
||||
Some(prio) => {
|
||||
let i = prios.partition_point(|p| *p > prio);
|
||||
prios.insert(i, prio);
|
||||
priod.insert(i, record);
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut rctx = ResolveCtx { h, exclusive, priod };
|
||||
resolve_one(&mut rctx, Substack::Bottom, &tpl).await
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn resolve(ctx: &mut ResolveCtx<'_>, value: &MacTree) -> Option<MacTree> {
|
||||
/// Rules belonging to one macro that passed a particular filter
|
||||
pub struct FilteredMacroRecord<'a> {
|
||||
mac: &'a Macro,
|
||||
/// The rules in increasing order of index
|
||||
rules: Vec<usize>,
|
||||
}
|
||||
|
||||
struct ResolveCtx<'a> {
|
||||
pub h: ExecHandle<'a>,
|
||||
/// If these overlap, that's a compile-time error
|
||||
pub exclusive: Vec<FilteredMacroRecord<'a>>,
|
||||
/// If these overlap, the priorities decide the order. In case of a tie, the
|
||||
/// order is unspecified
|
||||
pub priod: Vec<FilteredMacroRecord<'a>>,
|
||||
}
|
||||
|
||||
async fn resolve_one(
|
||||
ctx: &mut ResolveCtx<'_>,
|
||||
arg_stk: Substack<'_, Sym>,
|
||||
value: &MacTree,
|
||||
) -> GExpr {
|
||||
match value.tok() {
|
||||
MacTok::Ph(_) | MacTok::Slot => panic!("Forbidden element in value mactree"),
|
||||
MacTok::Bottom(_) | MacTok::Value(_) | MacTok::Name(_) => None,
|
||||
MacTok::Lambda(arg, body) =>
|
||||
Some(MacTok::Lambda(arg.clone(), resolve_seq(ctx, body).await?).at(value.pos())),
|
||||
MacTok::S(ptyp, body) => Some(MacTok::S(*ptyp, resolve_seq(ctx, body).await?).at(value.pos())),
|
||||
MacTok::Bottom(err) => bot(err.clone()),
|
||||
MacTok::Value(v) => v.clone().to_gen().await,
|
||||
MacTok::Name(n) => sym_ref(n.clone()),
|
||||
MacTok::Lambda(arg, body) => {
|
||||
let MacTok::Name(name) = &*arg.tok else {
|
||||
return bot(mk_errv(
|
||||
i().i("Syntax error after macros").await,
|
||||
"This token ends up as a binding, consider replacing it with a name",
|
||||
[arg.pos()],
|
||||
));
|
||||
};
|
||||
let arg_pos = arg_stk.len() as u64;
|
||||
let arg_stk = arg_stk.push(name.clone());
|
||||
lambda(arg_pos, [resolve_seq(ctx, arg_stk, body.clone(), value.pos()).await])
|
||||
},
|
||||
MacTok::S(Paren::Round, body) => resolve_seq(ctx, arg_stk, body.clone(), value.pos()).await,
|
||||
MacTok::S(..) => bot(mk_errv(
|
||||
i().i("Leftover [] or {} not matched by macro").await,
|
||||
format!("{} was not matched by any macro", fmt(value, &i()).await),
|
||||
[value.pos()],
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn resolve_seq(ctx: &mut ResolveCtx<'_>, val: &[MacTree]) -> Option<Vec<MacTree>> {
|
||||
let mut any_changed = false;
|
||||
let mut i = 0;
|
||||
let mut val = val.to_vec();
|
||||
'all_named: while i < val.len() {
|
||||
'one_named: {
|
||||
let MacTok::Name(key) = val[i].tok() else { break 'one_named };
|
||||
let Some(options) = ctx.named.get(key) else { break 'one_named };
|
||||
let matches = (options.iter())
|
||||
.filter_map(|r| Some((r.1, r.2, r.0.apply(&val[i..], |_| false)?)))
|
||||
.collect_vec();
|
||||
match matches.len() {
|
||||
0 => break 'one_named,
|
||||
1 => {
|
||||
any_changed = true;
|
||||
let (mac, rule, (state, tail)) = matches.into_iter().exactly_one().unwrap();
|
||||
let end = val.len() - tail.len();
|
||||
let body_call = mk_body_call(mac, rule, &state, &ctx.ctx).await;
|
||||
std::mem::drop(state);
|
||||
val.splice(i..end, [MacTok::Value(ctx.h.register(body_call).await).at(Pos::None)]);
|
||||
i = end;
|
||||
},
|
||||
2.. => todo!("Named macros conflict!"),
|
||||
type XMatches<'a> = Vec<(Range<usize>, &'a Macro, &'a Rule, MatchState<'a>)>;
|
||||
|
||||
/// find the subsection of the slice that satisfies both the lower and upper
|
||||
/// limit.
|
||||
fn subsection<T>(
|
||||
slice: &[T],
|
||||
lower_limit: impl FnMut(&T) -> bool,
|
||||
mut upper_limit: impl FnMut(&T) -> bool,
|
||||
) -> Range<usize> {
|
||||
let start = slice.partition_point(lower_limit);
|
||||
let len = slice[start..].partition_point(|t| !upper_limit(t));
|
||||
start..start + len
|
||||
}
|
||||
|
||||
async fn resolve_seq(
|
||||
ctx: &mut ResolveCtx<'_>,
|
||||
arg_stk: Substack<'_, Sym>,
|
||||
val: MacTreeSeq,
|
||||
fallback_pos: Pos,
|
||||
) -> GExpr {
|
||||
if val.items.is_empty() {
|
||||
return bot(mk_errv(
|
||||
i().i("Empty sequence").await,
|
||||
"() or (\\arg ) left after macro execution. \
|
||||
This is usually caused by an incomplete call to a macro with bad error detection",
|
||||
[fallback_pos],
|
||||
));
|
||||
}
|
||||
// A sorted collection of overlapping but non-nested matches to exclusive
|
||||
// macros
|
||||
let mut x_matches: XMatches = Vec::new();
|
||||
let top_glossary = val.top_glossary.clone();
|
||||
let mut new_val = val.items.to_vec();
|
||||
'x_macros: for x in &ctx.exclusive {
|
||||
let mut rules_iter = x.rules.iter();
|
||||
let ((before, state, after), rule) = 'rules: loop {
|
||||
let Some(ridx) = rules_iter.next() else { continue 'x_macros };
|
||||
let rule = &x.mac.0.rules[*ridx];
|
||||
if rule.pattern.top_glossary.is_subset(&top_glossary)
|
||||
&& let Some(record) = rule.matcher.apply(&val.items[..], &|_| true).await
|
||||
{
|
||||
break 'rules (record, rule);
|
||||
};
|
||||
};
|
||||
let new_r = (before.len()..new_val.len() - after.len(), x.mac, rule, state);
|
||||
// elements that overlap with us
|
||||
let overlap =
|
||||
subsection(&x_matches[..], |r| new_r.0.start < r.0.end, |r| r.0.start < new_r.0.end);
|
||||
let overlapping = &x_matches[overlap.clone()];
|
||||
// elements that fully contain us
|
||||
let geq_range =
|
||||
subsection(overlapping, |r| r.0.start <= new_r.0.start, |r| new_r.0.end <= r.0.end);
|
||||
let geq = &overlapping[geq_range.clone()];
|
||||
// if any of these is equal to us, all of them must be, otherwise the larger
|
||||
// ranges would have overridden the smaller ones
|
||||
if let Some(example) = geq.first() {
|
||||
// if they are equal to us, record the conflict.
|
||||
if example.0 == new_r.0 {
|
||||
let idx = (x_matches.subslice_offset(geq))
|
||||
.expect("this slice is statically derived from x_matches");
|
||||
x_matches.insert(idx, new_r);
|
||||
}
|
||||
continue 'all_named;
|
||||
// either way, we matched so no further rules can run.
|
||||
continue 'x_macros;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
for (matcher, mac, rule) in &ctx.priod {
|
||||
let Some(state) = matcher.apply(&val, |_| false) else { continue };
|
||||
return Some(vec![
|
||||
MacTok::Value(ctx.h.register(mk_body_call(mac, rule, &state, &ctx.ctx).await).await)
|
||||
.at(Pos::None),
|
||||
]);
|
||||
}
|
||||
for expr in val.iter_mut() {
|
||||
if let Some(new) = resolve(ctx, expr).boxed_local().await {
|
||||
*expr = new;
|
||||
any_changed = true;
|
||||
// elements we fully contain. Equal ranges have been handled above
|
||||
let lt_range =
|
||||
subsection(overlapping, |r| new_r.0.start <= r.0.start, |r| r.0.end <= new_r.0.end);
|
||||
let lt = &overlapping[lt_range.clone()];
|
||||
if lt.is_empty() {
|
||||
// an empty range
|
||||
let i = x_matches.partition_point(|r| r.0.start < new_r.0.start);
|
||||
x_matches.insert(i, new_r);
|
||||
} else {
|
||||
let lt_start =
|
||||
x_matches.subslice_offset(overlapping).expect("Slice statically derived from x_matches");
|
||||
x_matches.splice(lt_start..lt_start + lt_range.len(), [new_r]);
|
||||
}
|
||||
}
|
||||
if any_changed { Some(val) } else { None }
|
||||
// apply exclusive matches
|
||||
if !x_matches.is_empty() {
|
||||
// ranges of indices into x_matches which setwise conflict with each other.
|
||||
// Pairwise conflict reporting is excess noise, but a single conflict error
|
||||
// doesn't reveal where within the parenthesized block to look, so it's easiest
|
||||
// to group them setwise even if these sets may associate macros which don't
|
||||
// directly conflict.
|
||||
let conflict_sets = (0..x_matches.len()).map(|x| x..x + 1).coalesce(|lran, rran| {
|
||||
// each index was mapped to a range that contains only itself. Now we check if
|
||||
// the last match in the first range overlaps the first match in the second
|
||||
// range, and combine them if this is the case.
|
||||
if x_matches[rran.start].0.start < x_matches[lran.end].0.end {
|
||||
Ok(lran.start..rran.end)
|
||||
} else {
|
||||
Err((lran, rran))
|
||||
}
|
||||
});
|
||||
let mac_conflict_tk = i().i("Macro conflict").await;
|
||||
let error = conflict_sets
|
||||
.filter(|r| 1 < r.len())
|
||||
.map(|set| {
|
||||
mk_errv(
|
||||
mac_conflict_tk.clone(),
|
||||
"Multiple partially overlapping syntax elements detected. \n\
|
||||
Try parenthesizing whichever side is supposed to be the subexpression.",
|
||||
x_matches[set].iter().flat_map(|rec| rec.3.names()).flat_map(|name| name.1).cloned(),
|
||||
)
|
||||
})
|
||||
.reduce(|l, r| l + r);
|
||||
if let Some(error) = error {
|
||||
return bot(error);
|
||||
}
|
||||
// no conflicts, apply all exclusive matches
|
||||
for (range, mac, rule, state) in x_matches.into_iter().rev() {
|
||||
// backwards so that the non-overlapping ranges remain valid
|
||||
let pos = (state.names().flat_map(|r| r.1).cloned().reduce(Pos::add))
|
||||
.expect("All macro rules must contain at least one locally defined name");
|
||||
let subex = ctx.h.register(mk_body_call(mac, rule, &state, pos.clone()).await).await;
|
||||
new_val.splice(range, [MacTok::Value(subex).at(pos)]);
|
||||
}
|
||||
};
|
||||
// Does this glossary refresh actually pay off?
|
||||
let top_glossary = (new_val.iter())
|
||||
.flat_map(|t| if let MacTok::Name(t) = t.tok() { Some(t.clone()) } else { None })
|
||||
.collect::<HashSet<_>>();
|
||||
for FilteredMacroRecord { mac, rules } in &ctx.priod {
|
||||
for ridx in rules {
|
||||
let rule = &mac.0.rules[*ridx];
|
||||
if !rule.pattern.top_glossary.is_subset(&top_glossary) {
|
||||
continue;
|
||||
}
|
||||
let Some((pre, state, suf)) = rule.matcher.apply(&new_val, &|_| true).await else { continue };
|
||||
let range = pre.len()..new_val.len() - suf.len();
|
||||
let pos = (state.names().flat_map(|pair| pair.1).cloned().reduce(Pos::add))
|
||||
.expect("All macro rules must contain at least one locally defined name");
|
||||
let subex = ctx.h.register(mk_body_call(mac, rule, &state, pos.clone()).await).await;
|
||||
std::mem::drop(state);
|
||||
new_val.splice(range, [MacTok::Value(subex).at(pos)]);
|
||||
}
|
||||
}
|
||||
let exprs = stream(async |mut h| {
|
||||
for mt in new_val {
|
||||
h.emit(resolve_one(ctx, arg_stk.clone(), &mt).await).await
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.boxed_local()
|
||||
.await;
|
||||
exprs.into_iter().reduce(|f, x| call(f, [x])).expect(
|
||||
"We checked first that it isn't empty, and named macros get replaced with their results",
|
||||
)
|
||||
}
|
||||
|
||||
async fn mk_body_call(mac: &Macro, rule: &Rule, state: &MatchState<'_>, ctx: &SysCtx) -> GExpr {
|
||||
async fn mk_body_call(mac: &Macro, rule: &Rule, state: &MatchState<'_>, pos: Pos) -> GExpr {
|
||||
let mut call_args = vec![];
|
||||
for name in rule.placeholders.iter() {
|
||||
call_args.push(match state.get(name).expect("Missing state entry for placeholder") {
|
||||
StateEntry::Scalar(scal) => (**scal).clone().to_expr().await,
|
||||
StateEntry::Vec(vec) => MacTok::S(Paren::Round, vec.to_vec()).at(Pos::None).to_expr().await,
|
||||
StateEntry::Scalar(scal) => (**scal).clone().to_gen().await,
|
||||
StateEntry::Vec(vec) =>
|
||||
MacTok::S(Paren::Round, MacTreeSeq::new(vec.iter().cloned())).at(Pos::None).to_gen().await,
|
||||
});
|
||||
}
|
||||
call(sym_ref(sym!(macros::lower; ctx.i()).await), [call(
|
||||
sym_ref(mac.0.module.suffix([rule.body_name.clone()], ctx.i()).await),
|
||||
call_args,
|
||||
)])
|
||||
call(sym_ref(mac.0.module.suffix([rule.body_name.clone()], &i()).await), call_args)
|
||||
.at(pos.clone())
|
||||
}
|
||||
|
||||
@@ -2,9 +2,10 @@ use futures::FutureExt;
|
||||
use futures::future::join_all;
|
||||
use itertools::Itertools;
|
||||
use orchid_base::error::{OrcRes, mk_errv};
|
||||
use orchid_base::interner::{Interner, Tok};
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::join_ok;
|
||||
use orchid_base::side::Side;
|
||||
use orchid_extension::context::i;
|
||||
|
||||
use super::shared::{AnyMatcher, ScalMatcher, VecMatcher};
|
||||
use super::vec_attrs::vec_attrs;
|
||||
@@ -31,29 +32,29 @@ fn scal_cnt<'a>(iter: impl Iterator<Item = &'a MacTree>) -> usize {
|
||||
iter.take_while(|expr| vec_attrs(expr).is_none()).count()
|
||||
}
|
||||
|
||||
pub async fn mk_any(pattern: &[MacTree], i: &Interner) -> OrcRes<AnyMatcher> {
|
||||
pub async fn mk_any(pattern: &[MacTree]) -> OrcRes<AnyMatcher> {
|
||||
let left_split = scal_cnt(pattern.iter());
|
||||
if pattern.len() <= left_split {
|
||||
return Ok(AnyMatcher::Scalar(mk_scalv(pattern, i).await?));
|
||||
return Ok(AnyMatcher::Scalar(mk_scalv(pattern).await?));
|
||||
}
|
||||
let (left, not_left) = pattern.split_at(left_split);
|
||||
let right_split = not_left.len() - scal_cnt(pattern.iter().rev());
|
||||
let (mid, right) = not_left.split_at(right_split);
|
||||
join_ok! {
|
||||
left = mk_scalv(left, i).await;
|
||||
mid = mk_vec(mid, i).await;
|
||||
right = mk_scalv(right, i).await;
|
||||
left = mk_scalv(left).await;
|
||||
mid = mk_vec(mid).await;
|
||||
right = mk_scalv(right).await;
|
||||
}
|
||||
Ok(AnyMatcher::Vec { left, mid, right })
|
||||
}
|
||||
|
||||
/// Pattern MUST NOT contain vectorial placeholders
|
||||
async fn mk_scalv(pattern: &[MacTree], i: &Interner) -> OrcRes<Vec<ScalMatcher>> {
|
||||
join_all(pattern.iter().map(|pat| mk_scalar(pat, i))).await.into_iter().collect()
|
||||
async fn mk_scalv(pattern: &[MacTree]) -> OrcRes<Vec<ScalMatcher>> {
|
||||
join_all(pattern.iter().map(mk_scalar)).await.into_iter().collect()
|
||||
}
|
||||
|
||||
/// Pattern MUST start and end with a vectorial placeholder
|
||||
pub async fn mk_vec(pattern: &[MacTree], i: &Interner) -> OrcRes<VecMatcher> {
|
||||
pub async fn mk_vec(pattern: &[MacTree]) -> OrcRes<VecMatcher> {
|
||||
debug_assert!(!pattern.is_empty(), "pattern cannot be empty");
|
||||
debug_assert!(pattern.first().map(vec_attrs).is_some(), "pattern must start with a vectorial");
|
||||
debug_assert!(pattern.last().map(vec_attrs).is_some(), "pattern must end with a vectorial");
|
||||
@@ -68,8 +69,8 @@ pub async fn mk_vec(pattern: &[MacTree], i: &Interner) -> OrcRes<VecMatcher> {
|
||||
(&[], &[]) => Ok(VecMatcher::Placeh { key, nonzero }),
|
||||
(&[], _) => {
|
||||
join_ok! {
|
||||
sep = mk_scalv(r_sep, i).await;
|
||||
right = mk_vec(r_side, i).boxed_local().await;
|
||||
sep = mk_scalv(r_sep).await;
|
||||
right = mk_vec(r_side).boxed_local().await;
|
||||
}
|
||||
Ok(VecMatcher::Scan {
|
||||
direction: Side::Left,
|
||||
@@ -80,8 +81,8 @@ pub async fn mk_vec(pattern: &[MacTree], i: &Interner) -> OrcRes<VecMatcher> {
|
||||
},
|
||||
(_, &[]) => {
|
||||
join_ok! {
|
||||
left = mk_vec(l_side, i).boxed_local().await;
|
||||
sep = mk_scalv(l_sep, i).await;
|
||||
left = mk_vec(l_side).boxed_local().await;
|
||||
sep = mk_scalv(l_sep).await;
|
||||
}
|
||||
Ok(VecMatcher::Scan {
|
||||
direction: Side::Right,
|
||||
@@ -95,10 +96,10 @@ pub async fn mk_vec(pattern: &[MacTree], i: &Interner) -> OrcRes<VecMatcher> {
|
||||
l_side.iter().chain(r_side.iter()).filter_map(vec_attrs).collect::<Vec<_>>();
|
||||
key_order.sort_by_key(|(_, prio, _)| -(*prio as i64));
|
||||
join_ok! {
|
||||
left = mk_vec(l_side, i).boxed_local().await;
|
||||
left_sep = mk_scalv(l_sep, i).await;
|
||||
right_sep = mk_scalv(r_sep, i).await;
|
||||
right = mk_vec(r_side, i).boxed_local().await;
|
||||
left = mk_vec(l_side).boxed_local().await;
|
||||
left_sep = mk_scalv(l_sep).await;
|
||||
right_sep = mk_scalv(r_sep).await;
|
||||
right = mk_vec(r_side).boxed_local().await;
|
||||
}
|
||||
Ok(VecMatcher::Middle {
|
||||
left: Box::new(left),
|
||||
@@ -113,7 +114,7 @@ pub async fn mk_vec(pattern: &[MacTree], i: &Interner) -> OrcRes<VecMatcher> {
|
||||
}
|
||||
|
||||
/// Pattern MUST NOT be a vectorial placeholder
|
||||
async fn mk_scalar(pattern: &MacTree, i: &Interner) -> OrcRes<ScalMatcher> {
|
||||
async fn mk_scalar(pattern: &MacTree) -> OrcRes<ScalMatcher> {
|
||||
Ok(match &*pattern.tok {
|
||||
MacTok::Name(n) => ScalMatcher::Name(n.clone()),
|
||||
MacTok::Ph(Ph { name, kind }) => match kind {
|
||||
@@ -122,10 +123,10 @@ async fn mk_scalar(pattern: &MacTree, i: &Interner) -> OrcRes<ScalMatcher> {
|
||||
},
|
||||
PhKind::Scalar => ScalMatcher::Placeh { key: name.clone() },
|
||||
},
|
||||
MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body, i).boxed_local().await?)),
|
||||
MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(&body.items).boxed_local().await?)),
|
||||
MacTok::Lambda(..) =>
|
||||
return Err(mk_errv(
|
||||
i.i("Lambda in matcher").await,
|
||||
i().i("Lambda in matcher").await,
|
||||
"Lambdas can't be matched for, only generated in templates",
|
||||
[pattern.pos()],
|
||||
)),
|
||||
@@ -136,50 +137,52 @@ async fn mk_scalar(pattern: &MacTree, i: &Interner) -> OrcRes<ScalMatcher> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use orchid_base::interner::Interner;
|
||||
use orchid_base::location::SrcRange;
|
||||
use orchid_base::sym;
|
||||
use orchid_base::tokens::Paren;
|
||||
use orchid_extension::context::{i, mock_ctx, with_ctx};
|
||||
use test_executors::spin_on;
|
||||
|
||||
use super::mk_any;
|
||||
use crate::macros::MacTok;
|
||||
use crate::macros::mactree::{Ph, PhKind};
|
||||
use crate::macros::mactree::{MacTreeSeq, Ph, PhKind};
|
||||
|
||||
#[test]
|
||||
fn test_scan() {
|
||||
spin_on(async {
|
||||
let i = Interner::new_master();
|
||||
let ex = |tok: MacTok| async { tok.at(SrcRange::mock(&i).await.pos()) };
|
||||
spin_on(with_ctx(mock_ctx(), async {
|
||||
let ex = |tok: MacTok| async { tok.at(SrcRange::mock(&i()).await.pos()) };
|
||||
let pattern = vec![
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
name: i.i("::prefix").await,
|
||||
name: i().i("::prefix").await,
|
||||
}))
|
||||
.await,
|
||||
ex(MacTok::Name(sym!(prelude::do; i).await)).await,
|
||||
ex(MacTok::S(Paren::Round, vec![
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
name: i.i("expr").await,
|
||||
}))
|
||||
.await,
|
||||
ex(MacTok::Name(sym!(prelude::; ; i).await)).await,
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 1, at_least_one: false },
|
||||
name: i.i("rest").await,
|
||||
}))
|
||||
.await,
|
||||
]))
|
||||
ex(MacTok::Name(sym!(prelude::do; i()))).await,
|
||||
ex(MacTok::S(
|
||||
Paren::Round,
|
||||
MacTreeSeq::new([
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
name: i().i("expr").await,
|
||||
}))
|
||||
.await,
|
||||
ex(MacTok::Name(sym!(prelude::; ; i()))).await,
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 1, at_least_one: false },
|
||||
name: i().i("rest").await,
|
||||
}))
|
||||
.await,
|
||||
]),
|
||||
))
|
||||
.await,
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
name: i.i("::suffix").await,
|
||||
name: i().i("::suffix").await,
|
||||
}))
|
||||
.await,
|
||||
];
|
||||
let matcher = mk_any(&pattern, &i).await.expect("This matcher isn't broken");
|
||||
let matcher = mk_any(&pattern).await.expect("This matcher isn't broken");
|
||||
println!("{matcher}");
|
||||
})
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,87 +1,61 @@
|
||||
use std::fmt;
|
||||
use std::rc::Rc;
|
||||
|
||||
use itertools::Itertools;
|
||||
use orchid_base::error::OrcRes;
|
||||
use orchid_base::interner::{Interner, Tok};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_extension::context::i;
|
||||
|
||||
use super::any_match::any_match;
|
||||
use super::build::{mk_any, mk_vec};
|
||||
use super::shared::{AnyMatcher, VecMatcher};
|
||||
use super::build::mk_any;
|
||||
use super::shared::AnyMatcher;
|
||||
use super::state::{MatchState, StateEntry};
|
||||
use super::vec_attrs::vec_attrs;
|
||||
use super::vec_match::vec_match;
|
||||
use crate::macros::mactree::{Ph, PhKind};
|
||||
use crate::macros::mactree::{MacTreeSeq, Ph, PhKind};
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
|
||||
pub struct NamedMatcher {
|
||||
pub struct Matcher {
|
||||
inner: AnyMatcher,
|
||||
head: Sym,
|
||||
after_tok: Tok<String>,
|
||||
}
|
||||
impl NamedMatcher {
|
||||
pub async fn new(pattern: &[MacTree], i: &Interner) -> OrcRes<Self> {
|
||||
let head = match pattern.first().map(|tree| tree.tok()) {
|
||||
Some(MacTok::Name(name)) => name.clone(),
|
||||
_ => panic!("Named matchers must begin with a name"),
|
||||
};
|
||||
let after_tok = i.i("::after").await;
|
||||
let inner = match pattern.last().and_then(vec_attrs).is_some() {
|
||||
true => mk_any(pattern, i).await?,
|
||||
false => {
|
||||
let kind = PhKind::Vector { priority: 0, at_least_one: false };
|
||||
let suffix = [MacTok::Ph(Ph { name: after_tok.clone(), kind }).at(Pos::None)];
|
||||
mk_any(&pattern.iter().cloned().chain(suffix).collect_vec(), i).await?
|
||||
},
|
||||
};
|
||||
Ok(Self { after_tok, inner, head })
|
||||
}
|
||||
pub fn head(&self) -> Sym { self.head.clone() }
|
||||
/// Also returns the tail, if any, which should be matched further
|
||||
/// Note that due to how priod works below, the main usable information from
|
||||
/// the tail is its length
|
||||
pub fn apply<'a>(
|
||||
&self,
|
||||
seq: &'a [MacTree],
|
||||
save_loc: impl Fn(Sym) -> bool,
|
||||
) -> Option<(MatchState<'a>, &'a [MacTree])> {
|
||||
let mut state = any_match(&self.inner, seq, &save_loc)?;
|
||||
match state.remove(self.after_tok.clone()) {
|
||||
Some(StateEntry::Scalar(_)) => panic!("{} can never be a scalar entry!", self.after_tok),
|
||||
Some(StateEntry::Vec(v)) => Some((state, v)),
|
||||
None => Some((state, &[][..])),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl fmt::Display for NamedMatcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) }
|
||||
}
|
||||
impl fmt::Debug for NamedMatcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "NamedMatcher({self})") }
|
||||
}
|
||||
|
||||
pub struct PriodMatcher(VecMatcher);
|
||||
impl PriodMatcher {
|
||||
pub async fn new(pattern: &[MacTree], i: &Interner) -> OrcRes<Self> {
|
||||
assert!(
|
||||
pattern.first().and_then(vec_attrs).is_some() && pattern.last().and_then(vec_attrs).is_some(),
|
||||
"Prioritized matchers must start and end with a vectorial",
|
||||
);
|
||||
Ok(Self(mk_vec(pattern, i).await?))
|
||||
impl Matcher {
|
||||
pub async fn new(pattern: MacTreeSeq) -> OrcRes<Self> {
|
||||
let mut pattern = Rc::unwrap_or_clone(pattern.items);
|
||||
let kind = PhKind::Vector { at_least_one: false, priority: 0 };
|
||||
let first = pattern.first().expect("Empty pattern is not allowed");
|
||||
if vec_attrs(first).is_none() {
|
||||
let pos = first.pos();
|
||||
pattern.insert(0, MacTok::Ph(Ph { name: i().i("::before").await, kind }).at(pos));
|
||||
}
|
||||
let last = pattern.last().expect("first returned Some above");
|
||||
if vec_attrs(last).is_none() {
|
||||
let pos = last.pos();
|
||||
pattern.insert(0, MacTok::Ph(Ph { name: i().i("::after").await, kind }).at(pos));
|
||||
}
|
||||
Ok(Matcher { inner: mk_any(&pattern).await? })
|
||||
}
|
||||
/// tokens before the offset always match the prefix
|
||||
pub fn apply<'a>(
|
||||
/// Also returns the head and tail, which should be matched by overarching
|
||||
/// matchers attempted later.
|
||||
pub async fn apply<'a>(
|
||||
&self,
|
||||
seq: &'a [MacTree],
|
||||
save_loc: impl Fn(Sym) -> bool,
|
||||
) -> Option<MatchState<'a>> {
|
||||
vec_match(&self.0, seq, &save_loc)
|
||||
save_loc: &dyn Fn(Sym) -> bool,
|
||||
) -> Option<(&'a [MacTree], MatchState<'a>, &'a [MacTree])> {
|
||||
let mut result = any_match(&self.inner, seq, &save_loc)?;
|
||||
async fn remove_frame<'a>(result: &mut MatchState<'a>, key: &str) -> &'a [MacTree] {
|
||||
match result.remove(i().i(key).await) {
|
||||
Some(StateEntry::Scalar(_)) => panic!("{key} is defined in the constructor as a Vec"),
|
||||
Some(StateEntry::Vec(v)) => v,
|
||||
None => &[],
|
||||
}
|
||||
}
|
||||
let before = remove_frame(&mut result, "::before").await;
|
||||
let after = remove_frame(&mut result, "::after").await;
|
||||
Some((before, result, after))
|
||||
}
|
||||
}
|
||||
impl fmt::Display for PriodMatcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) }
|
||||
impl fmt::Display for Matcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) }
|
||||
}
|
||||
impl fmt::Debug for PriodMatcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "PriodMatcher({self})") }
|
||||
impl fmt::Debug for Matcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "NamedMatcher({self})") }
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ pub fn scal_match<'a>(
|
||||
(ScalMatcher::Placeh { key }, _) =>
|
||||
Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))),
|
||||
(ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 =>
|
||||
any_match(b_mat, &body[..], save_loc),
|
||||
any_match(b_mat, &body.items, save_loc),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,6 +54,9 @@ impl<'a> MatchState<'a> {
|
||||
pub fn from_name(name: Sym, location: Pos) -> Self {
|
||||
Self { name_posv: HashMap::from([(name, vec![location])]), placeholders: HashMap::new() }
|
||||
}
|
||||
pub fn names(&self) -> impl Iterator<Item = (Sym, &[Pos])> {
|
||||
self.name_posv.iter().map(|(sym, vec)| (sym.clone(), &vec[..]))
|
||||
}
|
||||
pub fn get(&self, key: &Tok<String>) -> Option<&StateEntry<'a>> { self.placeholders.get(key) }
|
||||
pub fn remove(&mut self, name: Tok<String>) -> Option<StateEntry<'a>> {
|
||||
self.placeholders.remove(&name)
|
||||
|
||||
177
orchid-std/src/macros/std_macros.rs
Normal file
177
orchid-std/src/macros/std_macros.rs
Normal file
@@ -0,0 +1,177 @@
|
||||
use futures::{StreamExt, stream};
|
||||
use orchid_base::error::OrcRes;
|
||||
use orchid_base::sym;
|
||||
use orchid_extension::atom::TAtom;
|
||||
use orchid_extension::atom_owned::own;
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::ToExpr;
|
||||
use orchid_extension::coroutine_exec::exec;
|
||||
use orchid_extension::expr::Expr;
|
||||
use orchid_extension::gen_expr::{GExpr, call, sym_ref};
|
||||
use orchid_extension::tree::{GenMember, fun, prefix};
|
||||
|
||||
use crate::macros::match_macros::MatcherAtom;
|
||||
use crate::macros::resolve::resolve;
|
||||
use crate::macros::utils::{build_macro, mactree, mactreev};
|
||||
use crate::{HomoTpl, MacTree, OrcOpt, Tpl};
|
||||
|
||||
pub async fn gen_std_macro_lib() -> Vec<GenMember> {
|
||||
prefix("std", [
|
||||
prefix("option", [
|
||||
fun(false, "is_some_body", |sub: TAtom<MatcherAtom>, val: OrcOpt<Expr>| {
|
||||
exec(async move |mut h| {
|
||||
let Some(sub_val) = val.0 else { return Ok(OrcOpt(None)) };
|
||||
h.exec::<OrcOpt<Expr>>(call(sub.to_gen().await, [sub_val.to_gen().await])).await
|
||||
})
|
||||
}),
|
||||
fun(false, "is_none_body", async |val: OrcOpt<Expr>| {
|
||||
if val.0.is_none() { OrcOpt(Some(Tpl(()))) } else { OrcOpt(None) }
|
||||
}),
|
||||
build_macro(None, ["of", "empty"])
|
||||
.rule(mactreev!(pattern::match_rule ( std::option::of "...$" sub_pattern 0)), [
|
||||
|[sub]: [_; _]| {
|
||||
exec(async move |mut h| {
|
||||
let sub = h
|
||||
.exec::<TAtom<MatcherAtom>>(
|
||||
resolve(mactree!(pattern::match_rule "push" sub;)).await,
|
||||
)
|
||||
.await?;
|
||||
Ok(MatcherAtom {
|
||||
keys: sub.keys().collect().await,
|
||||
matcher: h
|
||||
.register(call(sym_ref(sym!(std::option::is_some_body; i())), [sub
|
||||
.to_gen()
|
||||
.await]))
|
||||
.await,
|
||||
})
|
||||
})
|
||||
},
|
||||
])
|
||||
.rule(mactreev!(pattern::match_rule(std::option::empty)), [|[]: [_; _]| {
|
||||
exec(async |mut h| {
|
||||
Ok(MatcherAtom {
|
||||
keys: vec![],
|
||||
matcher: h.register(sym_ref(sym!(std::option::is_none_body; i()))).await,
|
||||
})
|
||||
})
|
||||
}])
|
||||
.finish(),
|
||||
]),
|
||||
prefix("tuple", [
|
||||
build_macro(None, ["t"])
|
||||
.rule(mactreev!(std::tuple::t [ "...$" elements 0 ]), [|[elements]: [_; _]| {
|
||||
exec(async move |mut h| {
|
||||
let tup = h
|
||||
.exec::<HomoTpl<TAtom<MacTree>>>(call(sym_ref(sym!(macros::resolve; i())), [
|
||||
mactree!((macros::common::comma_list "push" elements ;)).to_gen().await,
|
||||
]))
|
||||
.await?;
|
||||
let val = stream::iter(&tup.0[..])
|
||||
.fold(sym_ref(sym!(std::tuple::empty; i())), async |head, new| {
|
||||
call(sym_ref(sym!(std::tuple::cat; i())), [
|
||||
head,
|
||||
call(sym_ref(sym!(std::tuple::one; i())), [call(
|
||||
sym_ref(sym!(macros::resolve; i())),
|
||||
[new.clone().to_gen().await],
|
||||
)]),
|
||||
])
|
||||
})
|
||||
.await;
|
||||
Ok(val)
|
||||
})
|
||||
}])
|
||||
.rule(
|
||||
mactreev!(pattern::match_rule(std::tuple::t[ "...$" elements 0 macros::common::..])),
|
||||
[async |[elements]: [_; _]| parse_tpl(elements, Some(mactree!(macros::common::_))).await],
|
||||
)
|
||||
.rule(
|
||||
mactreev!(pattern::match_rule(
|
||||
std::tuple::t[ "...$" elements 1 macros::common::.. "...$" tail 0]
|
||||
)),
|
||||
[async |[elements, tail]: [_; _]| parse_tpl(elements, Some(tail)).await],
|
||||
)
|
||||
.rule(mactreev!(pattern::match_rule(std::tuple::t[ "...$" elements 0])), [
|
||||
|[elements]: [_; _]| parse_tpl(elements, None),
|
||||
])
|
||||
.finish(),
|
||||
fun(false, "matcher_body", tuple_matcher_body),
|
||||
]),
|
||||
])
|
||||
}
|
||||
|
||||
fn parse_tpl(elements: MacTree, tail_matcher: Option<MacTree>) -> impl Future<Output = GExpr> {
|
||||
exec(async move |mut h| -> OrcRes<MatcherAtom> {
|
||||
let tup = h
|
||||
.exec::<HomoTpl<TAtom<MacTree>>>(call(sym_ref(sym!(macros::resolve; i())), [
|
||||
mactree!((macros::common::comma_list "push" elements ;)).to_gen().await,
|
||||
]))
|
||||
.await?;
|
||||
let mut subs = Vec::with_capacity(tup.0.len());
|
||||
for mac_a in &tup.0[..] {
|
||||
let mac = own(mac_a).await;
|
||||
let sub = h
|
||||
.exec::<TAtom<MatcherAtom>>(call(sym_ref(sym!(macros::resolve; i())), [
|
||||
mactree!(pattern::match_rule "push" mac ;).to_gen().await,
|
||||
]))
|
||||
.await?;
|
||||
subs.push(sub);
|
||||
}
|
||||
let tail_matcher = match tail_matcher {
|
||||
Some(mac) => Some(
|
||||
h.exec::<TAtom<MatcherAtom>>(call(sym_ref(sym!(macros::resolve; i())), [
|
||||
mactree!(pattern::match_rule "push" mac ;).to_gen().await,
|
||||
]))
|
||||
.await?,
|
||||
),
|
||||
None => None,
|
||||
};
|
||||
Ok(MatcherAtom {
|
||||
keys: stream::iter(&subs[..])
|
||||
.flat_map(|t| t.keys())
|
||||
.chain(stream::iter(&tail_matcher).flat_map(|mat| mat.keys()))
|
||||
.collect()
|
||||
.await,
|
||||
matcher: call(sym_ref(sym!(std::tuple::matcher_body; i())), [
|
||||
HomoTpl(subs).to_gen().await,
|
||||
OrcOpt(tail_matcher).to_gen().await,
|
||||
])
|
||||
.to_expr()
|
||||
.await,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn tuple_matcher_body(
|
||||
children: HomoTpl<TAtom<MatcherAtom>>,
|
||||
tail: OrcOpt<TAtom<MatcherAtom>>,
|
||||
value: HomoTpl<Expr>,
|
||||
) -> impl Future<Output = GExpr> {
|
||||
exec(async move |mut h| -> OrcRes<OrcOpt<GExpr>> {
|
||||
if value.0.len() < children.0.len() {
|
||||
return Ok(OrcOpt(None));
|
||||
}
|
||||
let mut binds = Vec::new();
|
||||
for (sub_mat, sub_val) in children.0.iter().zip(&value.0) {
|
||||
match sub_mat.run_matcher(&mut h, sub_val.clone()).await? {
|
||||
OrcOpt(None) => return Ok(OrcOpt(None)),
|
||||
OrcOpt(Some(subres)) => binds.extend(subres.0),
|
||||
}
|
||||
}
|
||||
match tail.0 {
|
||||
None if children.0.len() < value.0.len() => return Ok(OrcOpt(None)),
|
||||
None => (),
|
||||
Some(tail_mat) => {
|
||||
let tail_tpl = stream::iter(&value.0[children.0.len()..])
|
||||
.fold(sym_ref(sym!(std::tuple::empty; i())), async |prefix, new| {
|
||||
call(sym_ref(sym!(std::tuple::cat; i())), [prefix, new.clone().to_gen().await])
|
||||
})
|
||||
.await;
|
||||
match tail_mat.run_matcher(&mut h, tail_tpl).await? {
|
||||
OrcOpt(Some(tail_binds)) => binds.extend(tail_binds.0),
|
||||
OrcOpt(None) => return Ok(OrcOpt(None)),
|
||||
}
|
||||
},
|
||||
};
|
||||
todo!()
|
||||
})
|
||||
}
|
||||
@@ -1,166 +1,275 @@
|
||||
use std::borrow::Cow;
|
||||
use std::rc::Rc;
|
||||
|
||||
use async_fn_stream::stream;
|
||||
use futures::StreamExt;
|
||||
use futures::future::LocalBoxFuture;
|
||||
use itertools::{Itertools, chain};
|
||||
use never::Never;
|
||||
use orchid_base::name::{NameLike, Sym, VPath};
|
||||
use orchid_extension::atom::{Atomic, TAtom};
|
||||
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::ToExpr;
|
||||
use orchid_extension::gen_expr::sym_ref;
|
||||
use orchid_extension::gen_expr::{GExpr, sym_ref};
|
||||
use orchid_extension::tree::{GenMember, MemKind, cnst, lazy};
|
||||
|
||||
use crate::macros::macro_value::{Macro, MacroData, Matcher, Rule};
|
||||
use crate::macros::mactree::map_mactree_v;
|
||||
use crate::macros::rule::matcher::{NamedMatcher, PriodMatcher};
|
||||
use crate::macros::macro_value::{Macro, MacroData, Rule};
|
||||
use crate::macros::mactree::MacTreeSeq;
|
||||
use crate::macros::rule::matcher::Matcher;
|
||||
use crate::{MacTok, MacTree};
|
||||
|
||||
pub(crate) fn mk_macro<B: ToExpr + Clone + 'static>(
|
||||
pub type Args = Vec<MacTree>;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MacroBodyArgCollector {
|
||||
argc: usize,
|
||||
args: Args,
|
||||
cb: Rc<dyn Fn(Args) -> LocalBoxFuture<'static, GExpr>>,
|
||||
}
|
||||
impl Atomic for MacroBodyArgCollector {
|
||||
type Data = ();
|
||||
type Variant = OwnedVariant;
|
||||
}
|
||||
impl OwnedAtom for MacroBodyArgCollector {
|
||||
type Refs = Never;
|
||||
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
|
||||
async fn call_ref(&self, arg: orchid_extension::expr::Expr) -> GExpr {
|
||||
eprintln!("This is an intermediary value. It should never be copied");
|
||||
self.clone().call(arg).await
|
||||
}
|
||||
async fn call(mut self, arg: orchid_extension::expr::Expr) -> GExpr {
|
||||
let atom = (TAtom::downcast(arg.handle()).await).unwrap_or_else(|_| {
|
||||
panic!("This is an intermediary value, the argument types are known in advance")
|
||||
});
|
||||
self.args.push(own(&atom).await);
|
||||
if self.argc == self.args.len() {
|
||||
(self.cb)(self.args).await.to_gen().await
|
||||
} else {
|
||||
self.to_gen().await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn body_name(name: &str, counter: usize) -> String { format!("({name})::{counter}") }
|
||||
|
||||
pub(crate) fn build_macro(
|
||||
prio: Option<u64>,
|
||||
own_kws: impl IntoIterator<Item = &'static str>,
|
||||
rules: impl IntoIterator<Item = (Vec<MacTree>, B)>,
|
||||
) -> Vec<GenMember> {
|
||||
let own_kws = own_kws.into_iter().collect_vec();
|
||||
let name = own_kws[0];
|
||||
let (patterns, bodies) = rules.into_iter().unzip::<_, _, Vec<Vec<MacTree>>, Vec<B>>();
|
||||
let main_const = lazy(true, name, async move |path, ctx| {
|
||||
let module = (Sym::new(path.split_last_seg().1.iter().cloned(), ctx.i()).await)
|
||||
.expect("Default macro in global root");
|
||||
MemKind::Const(
|
||||
Macro(Rc::new(MacroData {
|
||||
module,
|
||||
prio,
|
||||
rules: stream(async |mut h| {
|
||||
for (counter, pat) in patterns.into_iter().enumerate() {
|
||||
let mut placeholders = Vec::new();
|
||||
map_mactree_v(&pat, &mut false, &mut |tt| {
|
||||
if let MacTok::Ph(ph) = &*tt.tok {
|
||||
placeholders.push(ph.name.clone())
|
||||
}
|
||||
None
|
||||
});
|
||||
let pattern = match prio {
|
||||
Some(_) => Matcher::Priod(PriodMatcher::new(&pat, ctx.i()).await.unwrap()),
|
||||
None => Matcher::Named(NamedMatcher::new(&pat, ctx.i()).await.unwrap()),
|
||||
};
|
||||
h.emit(Rule {
|
||||
glossary: pat.iter().flat_map(|t| t.glossary()).cloned().collect(),
|
||||
pattern,
|
||||
placeholders,
|
||||
body_name: ctx.i().i(&format!("({name})::{counter}")).await,
|
||||
})
|
||||
.await;
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
) -> MacroBuilder {
|
||||
MacroBuilder {
|
||||
prio,
|
||||
own_kws: own_kws.into_iter().collect(),
|
||||
patterns: Vec::new(),
|
||||
body_consts: Vec::new(),
|
||||
}
|
||||
}
|
||||
pub(crate) struct MacroBuilder {
|
||||
prio: Option<u64>,
|
||||
own_kws: Vec<&'static str>,
|
||||
patterns: Vec<MacTreeSeq>,
|
||||
body_consts: Vec<GenMember>,
|
||||
}
|
||||
impl MacroBuilder {
|
||||
pub(crate) fn rule<const N: usize, R: ToExpr>(
|
||||
mut self,
|
||||
pat: MacTreeSeq,
|
||||
body: [impl AsyncFn([MacTree; N]) -> R + 'static; 1],
|
||||
) -> Self {
|
||||
let [body] = body;
|
||||
let body = Rc::new(body);
|
||||
let name = &body_name(self.own_kws[0], self.body_consts.len());
|
||||
self.body_consts.extend(match N {
|
||||
0 => lazy(true, name, async move |_| {
|
||||
let argv = [].into_iter().collect_array().expect("N is 0");
|
||||
MemKind::Const(body(argv).await.to_gen().await)
|
||||
}),
|
||||
1.. => cnst(true, name, MacroBodyArgCollector {
|
||||
argc: N,
|
||||
args: Vec::new(),
|
||||
cb: Rc::new(move |argv| {
|
||||
let arr = argv.into_iter().collect_array::<N>().expect("argc should enforce the length");
|
||||
let body = body.clone();
|
||||
Box::pin(async move { body(arr).await.to_gen().await })
|
||||
}),
|
||||
}),
|
||||
});
|
||||
self.patterns.push(pat);
|
||||
self
|
||||
}
|
||||
pub(crate) fn finish(self) -> Vec<GenMember> {
|
||||
let Self { own_kws, prio, patterns, body_consts } = self;
|
||||
let name = own_kws[0];
|
||||
let main_const = lazy(true, name, async move |path| {
|
||||
let module = (Sym::new(path.split_last_seg().1.iter().cloned(), &i()).await)
|
||||
.expect("Default macro in global root");
|
||||
MemKind::Const(
|
||||
Macro(Rc::new(MacroData {
|
||||
module,
|
||||
prio,
|
||||
rules: stream(async |mut h| {
|
||||
for (counter, pattern) in patterns.into_iter().enumerate() {
|
||||
let mut placeholders = Vec::new();
|
||||
pattern.map(&mut false, &mut |tt| {
|
||||
if let MacTok::Ph(ph) = &*tt.tok {
|
||||
placeholders.push(ph.name.clone())
|
||||
}
|
||||
None
|
||||
});
|
||||
h.emit(Rule {
|
||||
matcher: Matcher::new(pattern.clone()).await.unwrap(),
|
||||
pattern,
|
||||
placeholders,
|
||||
body_name: i().i(&format!("({name})::{counter}")).await,
|
||||
})
|
||||
.await;
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
.await,
|
||||
}))
|
||||
.to_gen()
|
||||
.await,
|
||||
}))
|
||||
.to_expr()
|
||||
.await,
|
||||
)
|
||||
});
|
||||
let kw_consts = own_kws[1..].iter().flat_map(|kw| {
|
||||
lazy(true, kw, async |path, ctx| {
|
||||
let name = VPath::new(path.split_last_seg().1.iter().cloned())
|
||||
.name_with_suffix(ctx.i().i(*kw).await)
|
||||
.to_sym(ctx.i())
|
||||
.await;
|
||||
MemKind::Const(sym_ref(name))
|
||||
})
|
||||
});
|
||||
let body_consts = (bodies.into_iter().enumerate())
|
||||
.flat_map(|(counter, body)| cnst(false, &format!("({name})::{counter}"), body));
|
||||
chain!(main_const, kw_consts, body_consts).collect()
|
||||
)
|
||||
});
|
||||
let kw_consts = own_kws[1..].iter().flat_map(|kw| {
|
||||
lazy(true, kw, async |path| {
|
||||
let main_const_name = VPath::new(path.split_last_seg().1.iter().cloned())
|
||||
.name_with_suffix(i().i(name).await)
|
||||
.to_sym(&i())
|
||||
.await;
|
||||
MemKind::Const(sym_ref(main_const_name))
|
||||
})
|
||||
});
|
||||
chain!(main_const, kw_consts, body_consts).collect()
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! mactree {
|
||||
($i:expr; $($body:tt)*) => {
|
||||
$crate::macros::utils::mactreev!($i; ($($body)*)).remove(0)
|
||||
($($body:tt)*) => {
|
||||
$crate::macros::utils::mactreev!(($($body)*)).items[0].clone()
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! mactreev {
|
||||
(@RECUR $i:expr; $ret:ident) => {};
|
||||
(@RECUR $i:expr; $ret:ident "..$" $name:ident $prio:literal $($tail:tt)*) => {
|
||||
ret.push(MacTok::Ph(Ph{
|
||||
name: i.i(stringify!($name)).await,
|
||||
kind: PhKind::Vector{ at_least_one: false, priority: $prio }
|
||||
}).at(Pos::Inherit));
|
||||
mactreev!(@RECUR $i; $ret $($tail)*);
|
||||
macro_rules! mactreev_impl {
|
||||
(@RECUR $ret:ident) => {};
|
||||
(@RECUR $ret:ident "..$" $name:ident $prio:literal $($tail:tt)*) => {
|
||||
$ret.push($crate::macros::mactree::MacTok::Ph($crate::macros::mactree::Ph{
|
||||
name: orchid_extension::context::i().i(stringify!($name)).await,
|
||||
kind: $crate::macros::mactree::PhKind::Vector{ at_least_one: false, priority: $prio }
|
||||
}).at(orchid_base::location::Pos::Inherit));
|
||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
||||
};
|
||||
(@RECUR $i:expr; $ret:ident "...$" $name:ident $prio:literal $($tail:tt)*) => {
|
||||
$ret.push(MacTok::Ph(Ph{
|
||||
name: $i.i(stringify!($name)).await,
|
||||
(@RECUR $ret:ident "...$" $name:ident $prio:literal $($tail:tt)*) => {
|
||||
$ret.push($crate::macros::mactree::MacTok::Ph($crate::macros::mactree::Ph{
|
||||
name: orchid_extension::context::i().i(stringify!($name)).await,
|
||||
kind: $crate::macros::mactree::PhKind::Vector{ at_least_one: true, priority: $prio }
|
||||
}).at(orchid_base::location::Pos::Inherit));
|
||||
mactreev!(@RECUR $i; $ret $($tail)*);
|
||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
||||
};
|
||||
(@RECUR $i:expr; $ret:ident "$" $name:ident $($tail:tt)*) => {
|
||||
$ret.push(MacTok::Ph(Ph{
|
||||
name: $i.i(stringify!(name)).await,
|
||||
(@RECUR $ret:ident "$" $name:ident $($tail:tt)*) => {
|
||||
$ret.push($crate::macros::mactree::MacTok::Ph($crate::macros::mactree::Ph{
|
||||
name: orchid_extension::context::i().i(stringify!(name)).await,
|
||||
kind: $crate::macros::mactree::PhKind::Scalar
|
||||
}).at(orchid_base::location::Pos::Inherit));
|
||||
mactreev!(@RECUR $i; $ret $($tail)*);
|
||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
||||
};
|
||||
(@RECUR $i:expr; $ret:ident "'" $arg:expr ; $($tail:tt)*) => {
|
||||
$ret.push(MacTok::Value($arg).at(orchid_base::location::Pos::Inherit));
|
||||
mactreev!(@RECUR $i; $ret $($tail)*);
|
||||
(@RECUR $ret:ident "Val" $arg:expr ; $($tail:tt)*) => {
|
||||
$ret.push(
|
||||
$crate::macros::mactree::MacTok::Value($arg)
|
||||
.at(orchid_base::location::Pos::Inherit)
|
||||
);
|
||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
||||
};
|
||||
(@RECUR $i:expr; $ret:ident "" $arg:expr ; $($tail:tt)*) => {
|
||||
(@RECUR $ret:ident "push" $arg:expr ; $($tail:tt)*) => {
|
||||
$ret.push($arg);
|
||||
mactreev!(@RECUR $i; $ret $($tail)*);
|
||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
||||
};
|
||||
(@RECUR $i:expr; $ret:ident "l_" $arg:expr ; ($($body:tt)*) $($tail:tt)*) => {
|
||||
(@RECUR $ret:ident "l_" $arg:expr ; ($($body:tt)*) $($tail:tt)*) => {
|
||||
$ret.push(MacTok::Lambda(
|
||||
MacTok::Value($arg).at(orchid_base::location::Pos::Inherit),
|
||||
mactreev!(i; $($body)*)
|
||||
).at(Pos::Inherit));
|
||||
mactreev!(@RECUR $i; $ret $($tail)*);
|
||||
};
|
||||
(@RECUR $i:expr; $ret:ident "l" $argh:tt $(:: $arg:tt)* ($($body:tt)*) $($tail:tt)*) => {
|
||||
$ret.push(MacTok::Lambda(
|
||||
MacTok::Name(sym!($argh $(:: $arg)*; $i).await).at(orchid_base::location::Pos::Inherit),
|
||||
mactreev!(i; $($body)*)
|
||||
MacTok::Name($arg).at(orchid_base::location::Pos::Inherit),
|
||||
$crate::macros::utils::mactreev!($($body)*)
|
||||
).at(orchid_base::location::Pos::Inherit));
|
||||
mactreev!(@RECUR $i; $ret $($tail)*);
|
||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
||||
};
|
||||
(@RECUR $i:expr; $ret:ident ( $($body:tt)* ) $($tail:tt)*) => {
|
||||
(@RECUR $ret:ident "l" $argh:tt $(:: $arg:tt)+ ($($body:tt)*) $($tail:tt)*) => {
|
||||
$ret.push(MacTok::Lambda(
|
||||
MacTok::Name(sym!($argh $(:: $arg)+; orchid_extension::context::i()).await).at(orchid_base::location::Pos::Inherit),
|
||||
$crate::macros::utils::mactreev!($($body)*)
|
||||
).at(orchid_base::location::Pos::Inherit));
|
||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
||||
};
|
||||
(@RECUR $ret:ident $name:literal $($tail:tt)*) => {
|
||||
assert!(
|
||||
$name.contains("::"),
|
||||
"{} was treated as a name, but it doesn't have a namespace prefix",
|
||||
$name
|
||||
);
|
||||
let sym = orchid_base::name::Sym::parse(
|
||||
$name,
|
||||
&orchid_extension::context::i()
|
||||
).await.expect("Empty string in sym literal in Rust");
|
||||
$ret.push(
|
||||
MacTok::S(orchid_base::tree::Paren::Round, mactreev!($i; $($body)*))
|
||||
$crate::macros::mactree::MacTok::Name(sym)
|
||||
.at(orchid_base::location::Pos::Inherit)
|
||||
);
|
||||
mactreev!(@RECUR $i; $ret $($tail)*);
|
||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
||||
};
|
||||
(@RECUR $i:expr; $ret:ident [ $($body:tt)* ] $($tail:tt)*) => {
|
||||
(@RECUR $ret:ident ( $($body:tt)* ) $($tail:tt)*) => {
|
||||
$ret.push(
|
||||
MacTok::S(orchid_base::tree::Paren::Square, mactreev!($i; $($body)*))
|
||||
$crate::macros::mactree::MacTok::S(
|
||||
orchid_base::tree::Paren::Round,
|
||||
$crate::macros::utils::mactreev!($($body)*)
|
||||
)
|
||||
.at(orchid_base::location::Pos::Inherit)
|
||||
);
|
||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
||||
};
|
||||
(@RECUR $ret:ident [ $($body:tt)* ] $($tail:tt)*) => {
|
||||
$ret.push(
|
||||
$crate::macros::mactree::MacTok::S(
|
||||
orchid_base::tree::Paren::Square,
|
||||
$crate::macros::utils::mactreev!($($body)*)
|
||||
)
|
||||
.at(orchid_base::location::Pos::Inherit)
|
||||
);
|
||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
||||
};
|
||||
(@RECUR $ret:ident { $($body:tt)* } $($tail:tt)*) => {
|
||||
$ret.push(
|
||||
$crate::macros::mactree::MacTok::S(
|
||||
orchid_base::tree::Paren::Curly,
|
||||
$crate::macros::utils::mactreev!($($body)*)
|
||||
)
|
||||
.at(orchid_base::location::Pos::Inherit)
|
||||
);
|
||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
||||
};
|
||||
(@RECUR $ret:ident $ns:ident :: $nhead:tt $($tail:tt)*) => {
|
||||
$crate::macros::utils::mactreev_impl!(@NAME_MUNCHER $ret ($ns :: $nhead) $($tail)*)
|
||||
};
|
||||
(@NAME_MUNCHER $ret:ident ($($munched:tt)*) :: $name:tt $($tail:tt)*) => {
|
||||
$crate::macros::utils::mactreev_impl!(@NAME_MUNCHER $ret ($($munched)* :: $name) $($tail)*)
|
||||
};
|
||||
(@NAME_MUNCHER $ret:ident ($($munched:tt)*) $($tail:tt)*) => {
|
||||
let sym = orchid_base::sym!($($munched)* ; orchid_extension::context::i());
|
||||
$ret.push(
|
||||
$crate::macros::mactree::MacTok::Name(sym)
|
||||
.at(orchid_base::location::Pos::Inherit)
|
||||
);
|
||||
mactreev!(@RECUR $i; $ret $($tail)*);
|
||||
$crate::macros::utils::mactreev_impl!(@RECUR $ret $($tail)*);
|
||||
};
|
||||
(@RECUR $i:expr; $ret:ident { $($body:tt)* } $($tail:tt)*) => {
|
||||
$ret.push(
|
||||
MacTok::S(orchid_base::tree::Paren::Curly, mactreev!($i; $($body)*))
|
||||
.at(orchid_base::location::Pos::Inherit)
|
||||
);
|
||||
mactreev!(@RECUR $i; $ret $($tail)*);
|
||||
};
|
||||
(@RECUR $i:expr; $ret:ident $nhead:tt $($tail:tt)*) => {
|
||||
mactreev!(@NAME_MUNCHER $i; $ret ($nhead) $($tail)*)
|
||||
};
|
||||
(@NAME_MUNCHER $i:expr; $ret:ident ($($munched:tt)*) :: $name:tt $($tail:tt)*) => {
|
||||
mactreev!(@NAME_MUNCHER $i; $ret ($($munched)* :: $name) $($tail)*)
|
||||
};
|
||||
(@NAME_MUNCHER $i:expr; $ret:ident ($($munched:tt)*) $($tail:tt)*) => {
|
||||
let sym = orchid_base::sym!($($munched)* ; $i).await;
|
||||
$ret.push(MacTok::Name(sym).at(orchid_base::location::Pos::Inherit));
|
||||
mactreev!(@RECUR $i; $ret $($tail)*);
|
||||
};
|
||||
($i:expr; ) => { Vec::new() };
|
||||
($i:expr; $($tail:tt)*) => {
|
||||
() => { Vec::new() };
|
||||
}
|
||||
macro_rules! mactreev {
|
||||
($($tail:tt)*) => {
|
||||
{
|
||||
let mut ret = Vec::<MacTree>::new();
|
||||
mactreev!(@RECUR $i; ret $($tail)*);
|
||||
ret
|
||||
let mut ret = Vec::<$crate::macros::mactree::MacTree>::new();
|
||||
ret.extend([]); // silence unneeded mut warning
|
||||
$crate::macros::utils::mactreev_impl!(@RECUR ret $($tail)*);
|
||||
$crate::macros::mactree::MacTreeSeq::new(ret)
|
||||
}
|
||||
};
|
||||
}
|
||||
pub(crate) use {mactree, mactreev};
|
||||
|
||||
pub(crate) use {mactree, mactreev, mactreev_impl};
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
pub mod number;
|
||||
pub mod string;
|
||||
|
||||
pub mod option;
|
||||
pub mod protocol;
|
||||
pub mod record;
|
||||
pub mod reflection;
|
||||
pub mod std_system;
|
||||
pub mod string;
|
||||
pub mod tuple;
|
||||
|
||||
@@ -1,15 +1,20 @@
|
||||
use orchid_api_derive::Coding;
|
||||
use orchid_api_traits::Request;
|
||||
use orchid_base::error::OrcRes;
|
||||
use orchid_base::format::FmtUnit;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::number::Numeric;
|
||||
use orchid_extension::atom::{AtomFactory, Atomic, AtomicFeatures, ToAtom, TAtom};
|
||||
use orchid_extension::atom::{AtomFactory, Atomic, AtomicFeatures, Supports, TAtom, ToAtom};
|
||||
use orchid_extension::atom_thin::{ThinAtom, ThinVariant};
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::TryFromExpr;
|
||||
use orchid_extension::expr::Expr;
|
||||
use orchid_extension::system::SysCtx;
|
||||
use ordered_float::NotNan;
|
||||
use rust_decimal::prelude::Zero;
|
||||
|
||||
use crate::std::protocol::types::GetTagIdMethod;
|
||||
use crate::std::string::to_string::ToStringMethod;
|
||||
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
pub struct Int(pub i64);
|
||||
impl Atomic for Int {
|
||||
@@ -17,13 +22,23 @@ impl Atomic for Int {
|
||||
type Data = Self;
|
||||
}
|
||||
impl ThinAtom for Int {
|
||||
async fn print(&self, _: SysCtx) -> FmtUnit { self.0.to_string().into() }
|
||||
async fn print(&self) -> FmtUnit { self.0.to_string().into() }
|
||||
}
|
||||
impl TryFromExpr for Int {
|
||||
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
||||
TAtom::<Int>::try_from_expr(expr).await.map(|t| t.value)
|
||||
}
|
||||
}
|
||||
impl Supports<GetTagIdMethod> for Int {
|
||||
async fn handle(&self, _: GetTagIdMethod) -> <GetTagIdMethod as Request>::Response {
|
||||
Sym::parse("std::number::Int", &i()).await.unwrap().to_api()
|
||||
}
|
||||
}
|
||||
impl Supports<ToStringMethod> for Int {
|
||||
async fn handle(&self, _: ToStringMethod) -> <ToStringMethod as Request>::Response {
|
||||
self.0.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
pub struct Float(pub NotNan<f64>);
|
||||
@@ -32,13 +47,18 @@ impl Atomic for Float {
|
||||
type Data = Self;
|
||||
}
|
||||
impl ThinAtom for Float {
|
||||
async fn print(&self, _: SysCtx) -> FmtUnit { self.0.to_string().into() }
|
||||
async fn print(&self) -> FmtUnit { self.0.to_string().into() }
|
||||
}
|
||||
impl TryFromExpr for Float {
|
||||
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
||||
Ok(Self(Num::try_from_expr(expr).await?.0.to_f64()))
|
||||
}
|
||||
}
|
||||
impl Supports<ToStringMethod> for Float {
|
||||
async fn handle(&self, _: ToStringMethod) -> <ToStringMethod as Request>::Response {
|
||||
self.0.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Num(pub Numeric);
|
||||
impl TryFromExpr for Num {
|
||||
|
||||
@@ -3,6 +3,7 @@ use std::ops::RangeInclusive;
|
||||
use orchid_base::error::OrcRes;
|
||||
use orchid_base::number::{num_to_errv, parse_num};
|
||||
use orchid_extension::atom::ToAtom;
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::lexer::{LexContext, Lexer};
|
||||
use orchid_extension::tree::{GenTokTree, x_tok};
|
||||
|
||||
@@ -12,13 +13,13 @@ use super::num_atom::Num;
|
||||
pub struct NumLexer;
|
||||
impl Lexer for NumLexer {
|
||||
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['0'..='9'];
|
||||
async fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
||||
async fn lex<'a>(all: &'a str, lxcx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
||||
let ends_at = all.find(|c: char| !c.is_ascii_hexdigit() && !"xX._pP".contains(c));
|
||||
let (chars, tail) = all.split_at(ends_at.unwrap_or(all.len()));
|
||||
let fac = match parse_num(chars) {
|
||||
Ok(numeric) => Num(numeric).to_atom_factory(),
|
||||
Err(e) => return Err(num_to_errv(e, ctx.pos(all), ctx.src(), ctx.ctx.i()).await),
|
||||
Err(e) => return Err(num_to_errv(e, lxcx.pos(all), lxcx.src(), &i()).await),
|
||||
};
|
||||
Ok((tail, x_tok(fac).await.at(ctx.pos_lt(chars.len(), tail))))
|
||||
Ok((tail, x_tok(fac).await.at(lxcx.pos_lt(chars.len(), tail))))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,28 +6,28 @@ use super::num_atom::{Float, HomoArray, Int, Num};
|
||||
|
||||
pub fn gen_num_lib() -> Vec<GenMember> {
|
||||
prefix("std::number", [
|
||||
fun(true, "add", |a: Num, b: Num| async move {
|
||||
fun(true, "add", async |a: Num, b: Num| {
|
||||
Num(match HomoArray::new([a.0, b.0]) {
|
||||
HomoArray::Int([a, b]) => Numeric::Int(a + b),
|
||||
HomoArray::Float([a, b]) => Numeric::Float(a + b),
|
||||
})
|
||||
}),
|
||||
fun(true, "neg", |a: Num| async move {
|
||||
fun(true, "neg", async |a: Num| {
|
||||
Num(match a.0 {
|
||||
Numeric::Int(i) => Numeric::Int(-i),
|
||||
Numeric::Float(f) => Numeric::Float(-f),
|
||||
})
|
||||
}),
|
||||
fun(true, "mul", |a: Num, b: Num| async move {
|
||||
fun(true, "mul", async |a: Num, b: Num| {
|
||||
Num(match HomoArray::new([a.0, b.0]) {
|
||||
HomoArray::Int([a, b]) => Numeric::Int(a * b),
|
||||
HomoArray::Float([a, b]) => Numeric::Float(a * b),
|
||||
})
|
||||
}),
|
||||
fun(true, "idiv", |a: Int, b: Int| async move { Int(a.0 / b.0) }),
|
||||
fun(true, "imod", |a: Int, b: Int| async move { Int(a.0 % b.0) }),
|
||||
fun(true, "fdiv", |a: Float, b: Float| async move { Float(a.0 / b.0) }),
|
||||
fun(true, "fmod", |a: Float, b: Float| async move {
|
||||
fun(true, "idiv", async |a: Int, b: Int| Int(a.0 / b.0)),
|
||||
fun(true, "imod", async |a: Int, b: Int| Int(a.0 % b.0)),
|
||||
fun(true, "fdiv", async |a: Float, b: Float| Float(a.0 / b.0)),
|
||||
fun(true, "fmod", async |a: Float, b: Float| {
|
||||
Float(a.0 - NotNan::new((a.0 / b.0).trunc()).unwrap() * b.0)
|
||||
}),
|
||||
])
|
||||
|
||||
75
orchid-std/src/std/option.rs
Normal file
75
orchid-std/src/std/option.rs
Normal file
@@ -0,0 +1,75 @@
|
||||
use std::borrow::Cow;
|
||||
use std::pin::Pin;
|
||||
|
||||
use futures::AsyncWrite;
|
||||
use orchid_api_traits::Encode;
|
||||
use orchid_base::error::mk_errv;
|
||||
use orchid_base::sym;
|
||||
use orchid_extension::atom::{Atomic, ForeignAtom, TAtom};
|
||||
use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::{ToExpr, TryFromExpr};
|
||||
use orchid_extension::expr::{Expr, ExprHandle};
|
||||
use orchid_extension::gen_expr::{call, sym_ref};
|
||||
use orchid_extension::tree::{GenMember, cnst, fun, prefix};
|
||||
|
||||
use crate::{OrcString, api};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OptAtom(Option<Expr>);
|
||||
impl Atomic for OptAtom {
|
||||
type Data = Option<api::ExprTicket>;
|
||||
type Variant = OwnedVariant;
|
||||
}
|
||||
impl OwnedAtom for OptAtom {
|
||||
type Refs = Vec<Expr>;
|
||||
async fn val(&self) -> Cow<'_, Self::Data> {
|
||||
Cow::Owned(self.0.as_ref().map(|ex| ex.handle().ticket()))
|
||||
}
|
||||
async fn deserialize(mut ctx: impl DeserializeCtx, refs: Self::Refs) -> Self {
|
||||
Self(ctx.read::<bool>().await.then(|| refs.into_iter().next().unwrap()))
|
||||
}
|
||||
async fn serialize(&self, write: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
|
||||
self.0.is_some().encode(write).await;
|
||||
self.0.iter().cloned().collect()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct OrcOpt<T>(pub Option<T>);
|
||||
impl<T: TryFromExpr> TryFromExpr for OrcOpt<T> {
|
||||
async fn try_from_expr(expr: Expr) -> orchid_base::error::OrcRes<Self> {
|
||||
let atom = TAtom::<OptAtom>::try_from_expr(expr).await?;
|
||||
match atom.value {
|
||||
None => Ok(OrcOpt(None)),
|
||||
Some(tk) => Ok(OrcOpt(Some(
|
||||
T::try_from_expr(Expr::from_handle(ExprHandle::from_ticket(tk).await)).await?,
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<T: ToExpr + 'static> ToExpr for OrcOpt<T> {
|
||||
async fn to_gen(self) -> orchid_extension::gen_expr::GExpr {
|
||||
if let Some(val) = self.0 {
|
||||
call(sym_ref(sym!(std::option::some; i())), [val.to_gen().await])
|
||||
} else {
|
||||
sym_ref(sym!(std::option::none; i()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn gen_option_lib() -> Vec<GenMember> {
|
||||
prefix("std::option", [
|
||||
cnst(true, "none", OptAtom(None)),
|
||||
fun(true, "some", async |ex: Expr| OptAtom(Some(ex))),
|
||||
fun(true, "expect", async |opt: ForeignAtom, msg: OrcString| {
|
||||
match OrcOpt::try_from_expr(opt.clone().ex()).await? {
|
||||
OrcOpt(Some(ex)) => Ok::<Expr, _>(ex),
|
||||
OrcOpt(None) => Err(mk_errv(
|
||||
i().i("Unwrapped std::option::none").await,
|
||||
msg.get_string().await.as_str(),
|
||||
[opt.pos()],
|
||||
)),
|
||||
}
|
||||
}),
|
||||
])
|
||||
}
|
||||
4
orchid-std/src/std/protocol/mod.rs
Normal file
4
orchid-std/src/std/protocol/mod.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
pub mod parse_impls;
|
||||
pub mod proto_parser;
|
||||
pub mod type_parser;
|
||||
pub mod types;
|
||||
78
orchid-std/src/std/protocol/parse_impls.rs
Normal file
78
orchid-std/src/std/protocol/parse_impls.rs
Normal file
@@ -0,0 +1,78 @@
|
||||
use itertools::{Itertools, chain};
|
||||
use orchid_base::error::{OrcRes, mk_errv};
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::parse::{
|
||||
Import, ParseCtx, Parsed, Snippet, expect_tok, line_items, parse_multiname, token_errv,
|
||||
};
|
||||
use orchid_base::tree::{Paren, Token};
|
||||
use orchid_extension::parser::{
|
||||
PTokTree, ParsCtx, ParsedLine, ParsedLineKind, p_tree2gen, p_v2gen,
|
||||
};
|
||||
|
||||
pub async fn parse_impls(
|
||||
ctx: &ParsCtx<'_>,
|
||||
lines: &mut Vec<ParsedLine>,
|
||||
impls: &mut Vec<(Sym, Tok<String>)>,
|
||||
body_tt: &PTokTree,
|
||||
) -> OrcRes<()> {
|
||||
let i = ctx.i().clone();
|
||||
let body = match &body_tt.tok {
|
||||
Token::S(Paren::Round, body) => line_items(ctx, Snippet::new(body_tt, body)).await,
|
||||
Token::S(ptyp, _) =>
|
||||
return Err(mk_errv(
|
||||
i.i("Incorrect paren type").await,
|
||||
format!("Expected () block, found {ptyp}"),
|
||||
[body_tt.sr().pos()],
|
||||
)),
|
||||
_ =>
|
||||
return Err(
|
||||
token_errv(ctx, body_tt, "Expected body", |s| {
|
||||
format!("Expected (impl ...) block, found {s}")
|
||||
})
|
||||
.await,
|
||||
),
|
||||
};
|
||||
for Parsed { tail: line, output: comments } in body {
|
||||
if let Ok(Parsed { tail, .. }) = expect_tok(ctx, line, i.i("impl").await).await {
|
||||
let Parsed { tail, output: name_tt } = parse_multiname(ctx, tail).await?;
|
||||
let (name, name_sr) = match name_tt.into_iter().at_most_one() {
|
||||
Ok(None) => panic!("multiname is always at least one name"),
|
||||
Ok(Some(ref n @ Import { name: Some(_), ref sr, .. })) =>
|
||||
(n.clone().mspath().to_sym(&i).await, sr.clone()),
|
||||
Ok(Some(Import { name: None, sr, .. })) =>
|
||||
return Err(mk_errv(
|
||||
i.i("impl line with globstar").await,
|
||||
"::* is not permitted in a protocol impl",
|
||||
[sr.pos()],
|
||||
)),
|
||||
Err(e) =>
|
||||
return Err(mk_errv(
|
||||
i.i("Impl line with multiple protocol names").await,
|
||||
"::() is not permitted in a protocol impl",
|
||||
e.map(|i| i.sr.pos()),
|
||||
)),
|
||||
};
|
||||
let Parsed { tail, .. } = expect_tok(ctx, tail, i.i("as").await).await?;
|
||||
let cnst_name = i.i(&format!("{}{}", lines.len(), name.iter().join("__"))).await;
|
||||
lines.push(ParsedLine {
|
||||
comments,
|
||||
sr: line.sr(),
|
||||
kind: ParsedLineKind::Rec(Vec::from_iter(chain![
|
||||
[Token::Name(i.i("let").await).at(line.sr())],
|
||||
[Token::Name(cnst_name.clone()).at(name_sr)],
|
||||
[Token::Name(i.i("=").await).at(line.sr())],
|
||||
tail.iter().cloned().map(p_tree2gen),
|
||||
])),
|
||||
});
|
||||
impls.push((name, cnst_name));
|
||||
} else {
|
||||
lines.push(ParsedLine {
|
||||
sr: line.sr(),
|
||||
comments,
|
||||
kind: ParsedLineKind::Rec(p_v2gen(line.to_vec())),
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
77
orchid-std/src/std/protocol/proto_parser.rs
Normal file
77
orchid-std/src/std/protocol/proto_parser.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use orchid_base::error::{OrcRes, mk_errv};
|
||||
use orchid_base::parse::{Comment, Parsed, expect_end, try_pop_no_fluff};
|
||||
use orchid_base::sym;
|
||||
use orchid_base::tree::Token;
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::coroutine_exec::exec;
|
||||
use orchid_extension::gen_expr::{call, sym_ref};
|
||||
use orchid_extension::parser::{PSnippet, ParsCtx, ParsedLine, Parser};
|
||||
|
||||
use crate::std::protocol::parse_impls::parse_impls;
|
||||
use crate::std::protocol::types::Tag;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct AsProtoParser;
|
||||
impl Parser for AsProtoParser {
|
||||
const LINE_HEAD: &'static str = "as_proto";
|
||||
async fn parse<'a>(
|
||||
pcx: ParsCtx<'a>,
|
||||
exported: bool,
|
||||
cmts: Vec<Comment>,
|
||||
line: PSnippet<'a>,
|
||||
) -> OrcRes<Vec<ParsedLine>> {
|
||||
let Parsed { output: body_tt, tail } = try_pop_no_fluff(&pcx, line).await?;
|
||||
expect_end(&pcx, tail).await?;
|
||||
if exported {
|
||||
return Err(mk_errv(
|
||||
i().i("Exported internal line").await,
|
||||
"as_proto cannot be exported, the type shares the enclosing module's visibility",
|
||||
[line.sr().pos()],
|
||||
));
|
||||
}
|
||||
let mut lines = Vec::new();
|
||||
let mut impls = Vec::new();
|
||||
parse_impls(&pcx, &mut lines, &mut impls, body_tt).await?;
|
||||
let id = pcx.module();
|
||||
let proto_tag_name = i().i("__protocol_tag__").await;
|
||||
let proto_tag_path = id.suffix([proto_tag_name.clone()], &i()).await;
|
||||
lines.push(ParsedLine::cnst(&line.sr(), &cmts, true, proto_tag_name, async |_ccx| {
|
||||
exec(async move |mut h| {
|
||||
let mut new_impls = HashMap::new();
|
||||
for (k, v) in impls {
|
||||
new_impls.insert(k.clone(), h.register(sym_ref(id.suffix([v], &i()).await)).await);
|
||||
}
|
||||
Tag { id, impls: Rc::new(new_impls) }
|
||||
})
|
||||
.await
|
||||
}));
|
||||
lines.push(ParsedLine::cnst(&line.sr(), [], false, i().i("resolve").await, async move |_| {
|
||||
call(sym_ref(sym!(std::protocol::resolve; i())), [sym_ref(proto_tag_path)])
|
||||
}));
|
||||
Ok(lines)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ProtoParser;
|
||||
impl Parser for ProtoParser {
|
||||
const LINE_HEAD: &'static str = "proto";
|
||||
async fn parse<'a>(
|
||||
ctx: ParsCtx<'a>,
|
||||
exported: bool,
|
||||
cmts: Vec<Comment>,
|
||||
line: PSnippet<'a>,
|
||||
) -> OrcRes<Vec<ParsedLine>> {
|
||||
let Parsed { output: name_tt, tail } = try_pop_no_fluff(&ctx, line).await?;
|
||||
let Token::Name(name) = &name_tt.tok else {
|
||||
return Err(mk_errv(i().i("missing name for type").await, "A type needs a name", [name_tt
|
||||
.sr()
|
||||
.pos()]));
|
||||
};
|
||||
let lines = AsProtoParser::parse(ctx, false, cmts.clone(), tail).await?;
|
||||
Ok(vec![ParsedLine::module(&line.sr(), &cmts, exported, name, true, lines)])
|
||||
}
|
||||
}
|
||||
82
orchid-std/src/std/protocol/type_parser.rs
Normal file
82
orchid-std/src/std/protocol/type_parser.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use orchid_base::error::{OrcRes, mk_errv};
|
||||
use orchid_base::parse::{Comment, Parsed, expect_end, try_pop_no_fluff};
|
||||
use orchid_base::sym;
|
||||
use orchid_base::tree::Token;
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::coroutine_exec::exec;
|
||||
use orchid_extension::gen_expr::{call, sym_ref};
|
||||
use orchid_extension::parser::{PSnippet, ParsCtx, ParsedLine, Parser};
|
||||
|
||||
use crate::std::protocol::parse_impls::parse_impls;
|
||||
use crate::std::protocol::types::Tag;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct AsTypeParser;
|
||||
impl Parser for AsTypeParser {
|
||||
const LINE_HEAD: &'static str = "as_type";
|
||||
async fn parse<'a>(
|
||||
ctx: ParsCtx<'a>,
|
||||
exported: bool,
|
||||
cmts: Vec<Comment>,
|
||||
line: PSnippet<'a>,
|
||||
) -> OrcRes<Vec<ParsedLine>> {
|
||||
let Parsed { output: body_tt, tail } = try_pop_no_fluff(&ctx, line).await?;
|
||||
expect_end(&ctx, tail).await?;
|
||||
if exported {
|
||||
return Err(mk_errv(
|
||||
i().i("Exported internal line").await,
|
||||
"as_type cannot be exported, the type shares the enclosing module's visibility",
|
||||
[line.sr().pos()],
|
||||
));
|
||||
}
|
||||
let mut lines = Vec::new();
|
||||
let mut impls = Vec::new();
|
||||
parse_impls(&ctx, &mut lines, &mut impls, body_tt).await?;
|
||||
let id = ctx.module();
|
||||
let type_tag_name = i().i("__type_tag__").await;
|
||||
let type_tag_path = id.suffix([type_tag_name.clone()], &i()).await;
|
||||
lines.push(ParsedLine::cnst(&line.sr(), &cmts, true, type_tag_name, async |_ccx| {
|
||||
exec(async move |mut h| {
|
||||
let mut new_impls = HashMap::new();
|
||||
for (k, v) in impls {
|
||||
new_impls.insert(k.clone(), h.register(sym_ref(id.suffix([v], &i()).await)).await);
|
||||
}
|
||||
Tag { id, impls: Rc::new(new_impls) }
|
||||
})
|
||||
.await
|
||||
}));
|
||||
let type_tag_path_1 = type_tag_path.clone();
|
||||
lines.push(ParsedLine::cnst(&line.sr(), [], false, i().i("wrap").await, async move |_ccx| {
|
||||
call(sym_ref(sym!(std::protocol::wrap; i())), [sym_ref(type_tag_path_1)])
|
||||
}));
|
||||
let type_tag_path_1 = type_tag_path.clone();
|
||||
lines.push(ParsedLine::cnst(&line.sr(), [], false, i().i("unwrap").await, async move |_ccx| {
|
||||
call(sym_ref(sym!(std::protocol::unwrap; i())), [sym_ref(type_tag_path_1)])
|
||||
}));
|
||||
Ok(lines)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct TypeParser;
|
||||
impl Parser for TypeParser {
|
||||
const LINE_HEAD: &'static str = "type";
|
||||
async fn parse<'a>(
|
||||
ctx: ParsCtx<'a>,
|
||||
exported: bool,
|
||||
cmts: Vec<Comment>,
|
||||
line: PSnippet<'a>,
|
||||
) -> OrcRes<Vec<ParsedLine>> {
|
||||
let Parsed { output: name_tt, tail } = try_pop_no_fluff(&ctx, line).await?;
|
||||
let Token::Name(name) = &name_tt.tok else {
|
||||
return Err(mk_errv(i().i("missing name for type").await, "A type needs a name", [name_tt
|
||||
.sr()
|
||||
.pos()]));
|
||||
};
|
||||
let lines = AsTypeParser::parse(ctx, false, cmts.clone(), tail).await?;
|
||||
Ok(vec![ParsedLine::module(&line.sr(), &cmts, exported, name, true, lines)])
|
||||
}
|
||||
}
|
||||
141
orchid-std/src/std/protocol/types.rs
Normal file
141
orchid-std/src/std/protocol/types.rs
Normal file
@@ -0,0 +1,141 @@
|
||||
use std::borrow::Cow;
|
||||
use std::rc::Rc;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use never::Never;
|
||||
use orchid_api_derive::Coding;
|
||||
use orchid_api_traits::Request;
|
||||
use orchid_base::error::{OrcRes, mk_errv};
|
||||
use orchid_base::format::fmt;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_extension::atom::{AtomMethod, Atomic, ForeignAtom, MethodSetBuilder, Supports, TAtom};
|
||||
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::ToExpr;
|
||||
use orchid_extension::expr::Expr;
|
||||
use orchid_extension::gen_expr::call;
|
||||
use orchid_extension::tree::{GenMember, fun, prefix};
|
||||
|
||||
use crate::api;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Tag {
|
||||
pub id: Sym,
|
||||
pub impls: Rc<HashMap<Sym, Expr>>,
|
||||
}
|
||||
impl Atomic for Tag {
|
||||
type Data = api::TStrv;
|
||||
type Variant = OwnedVariant;
|
||||
fn reg_reqs() -> MethodSetBuilder<Self> { MethodSetBuilder::new().handle::<GetImplMethod>() }
|
||||
}
|
||||
impl OwnedAtom for Tag {
|
||||
type Refs = Never;
|
||||
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(self.id.to_api()) }
|
||||
}
|
||||
impl Supports<GetImplMethod> for Tag {
|
||||
async fn handle(&self, req: GetImplMethod) -> <GetImplMethod as Request>::Response {
|
||||
self.impls.get(&Sym::from_api(req.0, &i()).await).map(|expr| expr.handle().ticket())
|
||||
}
|
||||
}
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
pub struct GetImplMethod(pub api::TStrv);
|
||||
impl Request for GetImplMethod {
|
||||
type Response = Option<api::ExprTicket>;
|
||||
}
|
||||
impl AtomMethod for GetImplMethod {
|
||||
const NAME: &str = "std::protocol::get_impl";
|
||||
}
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
pub struct GetTagIdMethod;
|
||||
impl Request for GetTagIdMethod {
|
||||
type Response = api::TStrv;
|
||||
}
|
||||
impl AtomMethod for GetTagIdMethod {
|
||||
const NAME: &str = "std::protocol::get_tag_id";
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Tagged {
|
||||
pub tag: Tag,
|
||||
pub value: Expr,
|
||||
}
|
||||
impl Atomic for Tagged {
|
||||
type Data = api::TStrv;
|
||||
type Variant = OwnedVariant;
|
||||
}
|
||||
impl OwnedAtom for Tagged {
|
||||
type Refs = Never;
|
||||
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(self.tag.id.to_api()) }
|
||||
}
|
||||
impl Supports<GetImplMethod> for Tagged {
|
||||
async fn handle(&self, req: GetImplMethod) -> <GetImplMethod as Request>::Response {
|
||||
self.tag.handle(req).await
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_impl(receiver: ForeignAtom, proto: ForeignAtom) -> OrcRes<Expr> {
|
||||
let Some(proto_id) = proto.request(GetTagIdMethod).await else {
|
||||
return Err(mk_errv(i().i("Not a protocol").await, "Protocol does not have a tag ID", [
|
||||
proto.pos()
|
||||
]));
|
||||
};
|
||||
let Some(impl_val_opt) = receiver.request(GetImplMethod(proto_id)).await else {
|
||||
return Err(mk_errv(
|
||||
i().i("Receiver not tagged").await,
|
||||
"The receiver does not have a type tag",
|
||||
[receiver.pos()],
|
||||
));
|
||||
};
|
||||
if let Some(impl_val) = impl_val_opt {
|
||||
return Ok(Expr::deserialize(impl_val).await);
|
||||
}
|
||||
let Some(type_id) = receiver.request(GetTagIdMethod).await else {
|
||||
return Err(mk_errv(
|
||||
i().i("Incorrect protocols implementation in extension").await,
|
||||
"Atom provides an impl table but no tag ID",
|
||||
[receiver.pos()],
|
||||
));
|
||||
};
|
||||
let Some(impl_val_opt) = proto.request(GetImplMethod(type_id)).await else {
|
||||
return Err(mk_errv(
|
||||
i().i("Incorrect protocols implementation in extension").await,
|
||||
"Proto table atom provides a tag ID but no impl table",
|
||||
[receiver.pos()],
|
||||
));
|
||||
};
|
||||
if let Some(impl_val) = impl_val_opt {
|
||||
return Ok(Expr::deserialize(impl_val).await);
|
||||
}
|
||||
return Err(mk_errv(
|
||||
i().i("Implementation not found").await,
|
||||
"This protocol is not implemented for this receiver",
|
||||
[receiver.pos(), proto.pos()],
|
||||
));
|
||||
}
|
||||
|
||||
pub fn gen_protocol_lib() -> Vec<GenMember> {
|
||||
prefix("std::protocol", [
|
||||
fun(false, "resolve", async |tag: ForeignAtom, value: ForeignAtom| {
|
||||
Ok(call(get_impl(value.clone(), tag).await?.to_gen().await, [value.to_gen().await]))
|
||||
}),
|
||||
fun(false, "wrap", async |tag: TAtom<Tag>, value: Expr| Tagged { tag: own(&tag).await, value }),
|
||||
fun(false, "unwrap", async |tag: TAtom<Tag>, value: TAtom<Tagged>| {
|
||||
let own_tag = own(&tag).await;
|
||||
let own_val = own(&value).await;
|
||||
if own_val.tag.id == own_tag.id {
|
||||
Ok(own_val.value.to_gen().await)
|
||||
} else {
|
||||
Err(mk_errv(
|
||||
i().i("Type mismatch").await,
|
||||
format!(
|
||||
"{} has type {}, expected {}",
|
||||
fmt(&value, &i()).await,
|
||||
own_val.tag.id,
|
||||
own_tag.id
|
||||
),
|
||||
[value.pos()],
|
||||
))
|
||||
}
|
||||
}),
|
||||
])
|
||||
}
|
||||
2
orchid-std/src/std/record/mod.rs
Normal file
2
orchid-std/src/std/record/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub mod record_atom;
|
||||
pub mod record_lib;
|
||||
39
orchid-std/src/std/record/record_atom.rs
Normal file
39
orchid-std/src/std/record/record_atom.rs
Normal file
@@ -0,0 +1,39 @@
|
||||
use std::borrow::Cow;
|
||||
use std::pin::Pin;
|
||||
use std::rc::Rc;
|
||||
|
||||
use futures::AsyncWrite;
|
||||
use futures::future::join_all;
|
||||
use hashbrown::HashMap;
|
||||
use orchid_api_traits::Encode;
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_extension::atom::Atomic;
|
||||
use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::expr::Expr;
|
||||
|
||||
use crate::api;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Record(pub Rc<HashMap<Tok<String>, Expr>>);
|
||||
impl Atomic for Record {
|
||||
type Data = ();
|
||||
type Variant = OwnedVariant;
|
||||
}
|
||||
impl OwnedAtom for Record {
|
||||
type Refs = Vec<Expr>;
|
||||
async fn serialize(&self, write: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
|
||||
let (keys, values) =
|
||||
self.0.iter().map(|(k, v)| (k.to_api(), v.clone())).unzip::<_, _, Vec<_>, Vec<_>>();
|
||||
keys.encode(write).await;
|
||||
values
|
||||
}
|
||||
async fn deserialize(mut dctx: impl DeserializeCtx, refs: Self::Refs) -> Self {
|
||||
let keys =
|
||||
join_all(dctx.decode::<Vec<api::TStr>>().await.iter().map(|t| async { i().ex(*t).await }))
|
||||
.await;
|
||||
Record(Rc::new(keys.into_iter().zip(refs).collect()))
|
||||
}
|
||||
|
||||
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
|
||||
}
|
||||
30
orchid-std/src/std/record/record_lib.rs
Normal file
30
orchid-std/src/std/record/record_lib.rs
Normal file
@@ -0,0 +1,30 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use orchid_extension::atom::TAtom;
|
||||
use orchid_extension::atom_owned::own;
|
||||
use orchid_extension::expr::Expr;
|
||||
use orchid_extension::tree::{GenMember, cnst, fun, prefix};
|
||||
|
||||
use crate::std::option::OrcOpt;
|
||||
use crate::std::record::record_atom::Record;
|
||||
use crate::std::string::str_atom::IntStrAtom;
|
||||
|
||||
pub fn gen_record_lib() -> Vec<GenMember> {
|
||||
prefix("std::record", [
|
||||
cnst(true, "empty", Record(Rc::new(HashMap::new()))),
|
||||
fun(true, "set", async |map: TAtom<Record>, key: IntStrAtom, val: Expr| {
|
||||
let mut map = own(&map).await.0.as_ref().clone();
|
||||
map.insert(key.0.clone(), val);
|
||||
Record(Rc::new(map))
|
||||
}),
|
||||
fun(true, "get", async |map: TAtom<Record>, key: IntStrAtom| {
|
||||
OrcOpt(own(&map).await.0.get(&key.0).cloned())
|
||||
}),
|
||||
fun(true, "delete", async |map: TAtom<Record>, key: IntStrAtom| {
|
||||
let mut map = own(&map).await.0.as_ref().clone();
|
||||
map.remove(&key.0);
|
||||
Record(Rc::new(map))
|
||||
}),
|
||||
])
|
||||
}
|
||||
1
orchid-std/src/std/reflection/mod.rs
Normal file
1
orchid-std/src/std/reflection/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod sym_atom;
|
||||
67
orchid-std/src/std/reflection/sym_atom.rs
Normal file
67
orchid-std/src/std/reflection/sym_atom.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use orchid_api::TStrv;
|
||||
use orchid_api_derive::{Coding, Hierarchy};
|
||||
use orchid_api_traits::Request;
|
||||
use orchid_base::error::mk_errv;
|
||||
use orchid_base::name::{NameLike, Sym};
|
||||
use orchid_extension::atom::{Atomic, Supports, TAtom};
|
||||
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::expr::{Expr, ExprHandle};
|
||||
use orchid_extension::system::dep_req;
|
||||
use orchid_extension::tree::{GenMember, fun, prefix};
|
||||
|
||||
use crate::std::std_system::StdReq;
|
||||
use crate::std::string::str_atom::IntStrAtom;
|
||||
use crate::std::string::to_string::ToStringMethod;
|
||||
use crate::{HomoTpl, StdSystem, api};
|
||||
|
||||
#[derive(Clone, Coding)]
|
||||
pub struct SymAtomData(pub api::TStrv);
|
||||
#[derive(Clone)]
|
||||
pub struct SymAtom(pub(crate) Sym);
|
||||
impl Atomic for SymAtom {
|
||||
type Data = SymAtomData;
|
||||
type Variant = OwnedVariant;
|
||||
}
|
||||
impl OwnedAtom for SymAtom {
|
||||
type Refs = ();
|
||||
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(SymAtomData(self.0.tok().to_api())) }
|
||||
}
|
||||
impl Supports<ToStringMethod> for SymAtom {
|
||||
async fn handle(&self, _: ToStringMethod) -> <ToStringMethod as Request>::Response {
|
||||
self.0.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extends(StdReq)]
|
||||
pub struct CreateSymAtom(pub TStrv);
|
||||
impl Request for CreateSymAtom {
|
||||
type Response = api::ExprTicket;
|
||||
}
|
||||
|
||||
pub async fn sym_expr(sym: Sym) -> Expr {
|
||||
Expr::from_handle(ExprHandle::deserialize(
|
||||
dep_req::<StdSystem, _>(CreateSymAtom(sym.to_api())).await,
|
||||
))
|
||||
}
|
||||
|
||||
pub async fn gen_sym_lib() -> Vec<GenMember> {
|
||||
prefix("std::refl::sym", [
|
||||
fun(true, "from_str", async move |str: TAtom<IntStrAtom>| {
|
||||
match Sym::parse(&i().ex(*str).await, &i()).await {
|
||||
Ok(sym) => Ok(SymAtom(sym)),
|
||||
Err(_) => Err(mk_errv(
|
||||
i().i("Cannot parse sym from empty string").await,
|
||||
"Empty string passed to std::refl::sym::from_str",
|
||||
[str.pos()],
|
||||
)),
|
||||
}
|
||||
}),
|
||||
fun(true, "to_tpl", async move |sym: TAtom<SymAtom>| {
|
||||
HomoTpl(own(&sym).await.0.segs().map(IntStrAtom).collect())
|
||||
}),
|
||||
])
|
||||
}
|
||||
@@ -1,10 +1,15 @@
|
||||
use never::Never;
|
||||
use orchid_base::interner::Interner;
|
||||
use std::rc::Rc;
|
||||
|
||||
use futures::future::join_all;
|
||||
use orchid_api_derive::{Coding, Hierarchy};
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::reqnot::Receipt;
|
||||
use orchid_base::sym;
|
||||
use orchid_extension::atom::{AtomDynfo, AtomicFeatures};
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::ToExpr;
|
||||
use orchid_extension::entrypoint::ExtReq;
|
||||
use orchid_extension::expr::Expr;
|
||||
use orchid_extension::lexer::LexerObj;
|
||||
use orchid_extension::parser::ParserObj;
|
||||
use orchid_extension::system::{System, SystemCard};
|
||||
@@ -15,9 +20,25 @@ use super::number::num_lib::gen_num_lib;
|
||||
use super::string::str_atom::{IntStrAtom, StrAtom};
|
||||
use super::string::str_lib::gen_str_lib;
|
||||
use crate::std::number::num_lexer::NumLexer;
|
||||
use crate::std::option::{OptAtom, gen_option_lib};
|
||||
use crate::std::protocol::proto_parser::{AsProtoParser, ProtoParser};
|
||||
use crate::std::protocol::type_parser::{AsTypeParser, TypeParser};
|
||||
use crate::std::protocol::types::{Tag, Tagged, gen_protocol_lib};
|
||||
use crate::std::record::record_atom::Record;
|
||||
use crate::std::record::record_lib::gen_record_lib;
|
||||
use crate::std::reflection::sym_atom::{CreateSymAtom, SymAtom, gen_sym_lib};
|
||||
use crate::std::string::str_lexer::StringLexer;
|
||||
use crate::std::string::to_string::AsStrTag;
|
||||
use crate::std::tuple::{CreateTuple, Tuple, TupleBuilder, gen_tuple_lib};
|
||||
use crate::{Float, Int};
|
||||
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extendable]
|
||||
pub enum StdReq {
|
||||
CreateTuple(CreateTuple),
|
||||
CreateSymAtom(CreateSymAtom),
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct StdSystem;
|
||||
impl SystemCtor for StdSystem {
|
||||
@@ -29,15 +50,51 @@ impl SystemCtor for StdSystem {
|
||||
}
|
||||
impl SystemCard for StdSystem {
|
||||
type Ctor = Self;
|
||||
type Req = Never;
|
||||
type Req = StdReq;
|
||||
fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>> {
|
||||
[Some(Int::dynfo()), Some(Float::dynfo()), Some(StrAtom::dynfo()), Some(IntStrAtom::dynfo())]
|
||||
[
|
||||
Some(Int::dynfo()),
|
||||
Some(Float::dynfo()),
|
||||
Some(StrAtom::dynfo()),
|
||||
Some(IntStrAtom::dynfo()),
|
||||
Some(OptAtom::dynfo()),
|
||||
Some(Record::dynfo()),
|
||||
Some(Tuple::dynfo()),
|
||||
Some(TupleBuilder::dynfo()),
|
||||
Some(Tag::dynfo()),
|
||||
Some(Tagged::dynfo()),
|
||||
Some(AsStrTag::dynfo()),
|
||||
]
|
||||
}
|
||||
}
|
||||
impl System for StdSystem {
|
||||
async fn request(_: ExtReq<'_>, req: Self::Req) -> Receipt<'_> { match req {} }
|
||||
async fn request(xreq: ExtReq<'_>, req: Self::Req) -> Receipt<'_> {
|
||||
match req {
|
||||
StdReq::CreateTuple(ref req @ CreateTuple(ref items)) => {
|
||||
let tpl = Tuple(Rc::new(join_all(items.iter().copied().map(Expr::deserialize)).await));
|
||||
let tk = tpl.to_expr().await.serialize().await;
|
||||
xreq.handle(req, &tk).await
|
||||
},
|
||||
StdReq::CreateSymAtom(ref req @ CreateSymAtom(sym_tok)) => {
|
||||
let sym_atom = SymAtom(Sym::from_api(sym_tok, &i()).await);
|
||||
xreq.handle(req, &sym_atom.to_expr().await.serialize().await).await
|
||||
},
|
||||
}
|
||||
}
|
||||
fn lexers() -> Vec<LexerObj> { vec![&StringLexer, &NumLexer] }
|
||||
fn parsers() -> Vec<ParserObj> { vec![] }
|
||||
fn env() -> Vec<GenMember> { merge_trivial([gen_num_lib(), gen_str_lib()]) }
|
||||
async fn prelude(i: &Interner) -> Vec<Sym> { vec![sym!(std; i).await] }
|
||||
fn parsers() -> Vec<ParserObj> { vec![&AsTypeParser, &TypeParser, &AsProtoParser, &ProtoParser] }
|
||||
async fn env() -> Vec<GenMember> {
|
||||
merge_trivial([
|
||||
gen_num_lib(),
|
||||
gen_str_lib(),
|
||||
gen_option_lib(),
|
||||
gen_record_lib(),
|
||||
gen_tuple_lib(),
|
||||
gen_protocol_lib(),
|
||||
gen_sym_lib().await,
|
||||
])
|
||||
}
|
||||
async fn prelude() -> Vec<Sym> {
|
||||
vec![sym!(std; i()), sym!(std::tuple; i()), sym!(std::option; i())]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod str_atom;
|
||||
pub mod str_lexer;
|
||||
pub mod str_lib;
|
||||
pub mod to_string;
|
||||
|
||||
@@ -11,9 +11,11 @@ use orchid_base::format::{FmtCtx, FmtUnit};
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_extension::atom::{AtomMethod, Atomic, MethodSetBuilder, Supports, TAtom};
|
||||
use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::TryFromExpr;
|
||||
use orchid_extension::expr::Expr;
|
||||
use orchid_extension::system::SysCtx;
|
||||
|
||||
use crate::std::string::to_string::ToStringMethod;
|
||||
|
||||
#[derive(Copy, Clone, Debug, Coding)]
|
||||
pub struct StringGetVal;
|
||||
@@ -24,8 +26,11 @@ impl AtomMethod for StringGetVal {
|
||||
const NAME: &str = "std::string_get_val";
|
||||
}
|
||||
impl Supports<StringGetVal> for StrAtom {
|
||||
async fn handle(&self, _: SysCtx, _: StringGetVal) -> <StringGetVal as Request>::Response {
|
||||
self.0.clone()
|
||||
async fn handle(&self, _: StringGetVal) -> <StringGetVal as Request>::Response { self.0.clone() }
|
||||
}
|
||||
impl Supports<ToStringMethod> for StrAtom {
|
||||
async fn handle(&self, _: ToStringMethod) -> <ToStringMethod as Request>::Response {
|
||||
self.0.as_str().to_string()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,7 +51,7 @@ impl Deref for StrAtom {
|
||||
impl OwnedAtom for StrAtom {
|
||||
type Refs = ();
|
||||
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
|
||||
async fn serialize(&self, _: SysCtx, sink: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
|
||||
async fn serialize(&self, sink: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
|
||||
self.deref().encode(sink).await
|
||||
}
|
||||
async fn print_atom<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
@@ -58,7 +63,7 @@ impl OwnedAtom for StrAtom {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct IntStrAtom(Tok<String>);
|
||||
pub struct IntStrAtom(pub(crate) Tok<String>);
|
||||
impl Atomic for IntStrAtom {
|
||||
type Variant = OwnedVariant;
|
||||
type Data = orchid_api::TStr;
|
||||
@@ -72,19 +77,28 @@ impl OwnedAtom for IntStrAtom {
|
||||
async fn print_atom<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
format!("{:?}i", *self.0).into()
|
||||
}
|
||||
async fn serialize(&self, _: SysCtx, write: Pin<&mut (impl AsyncWrite + ?Sized)>) {
|
||||
async fn serialize(&self, write: Pin<&mut (impl AsyncWrite + ?Sized)>) {
|
||||
self.0.encode(write).await
|
||||
}
|
||||
async fn deserialize(mut ctx: impl DeserializeCtx, _: ()) -> Self {
|
||||
let s = ctx.decode::<String>().await;
|
||||
Self(ctx.sys().i().i(&s).await)
|
||||
async fn deserialize(mut dctx: impl DeserializeCtx, _: ()) -> Self {
|
||||
let s = dctx.decode::<String>().await;
|
||||
Self(i().i(&s).await)
|
||||
}
|
||||
}
|
||||
impl TryFromExpr for IntStrAtom {
|
||||
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
||||
Ok(IntStrAtom(i().ex(TAtom::<IntStrAtom>::try_from_expr(expr).await?.value).await))
|
||||
}
|
||||
}
|
||||
impl Supports<ToStringMethod> for IntStrAtom {
|
||||
async fn handle(&self, _: ToStringMethod) -> <ToStringMethod as Request>::Response {
|
||||
self.0.as_str().to_string()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OrcString {
|
||||
kind: OrcStringKind,
|
||||
ctx: SysCtx,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -95,7 +109,7 @@ pub enum OrcStringKind {
|
||||
impl OrcString {
|
||||
pub async fn get_string(&self) -> Rc<String> {
|
||||
match &self.kind {
|
||||
OrcStringKind::Int(tok) => self.ctx.i().ex(**tok).await.rc(),
|
||||
OrcStringKind::Int(tok) => i().ex(**tok).await.rc(),
|
||||
OrcStringKind::Val(atom) => atom.request(StringGetVal).await,
|
||||
}
|
||||
}
|
||||
@@ -104,12 +118,11 @@ impl OrcString {
|
||||
impl TryFromExpr for OrcString {
|
||||
async fn try_from_expr(expr: Expr) -> OrcRes<OrcString> {
|
||||
if let Ok(v) = TAtom::<StrAtom>::try_from_expr(expr.clone()).await {
|
||||
return Ok(OrcString { ctx: expr.ctx(), kind: OrcStringKind::Val(v) });
|
||||
return Ok(OrcString { kind: OrcStringKind::Val(v) });
|
||||
}
|
||||
let ctx = expr.ctx();
|
||||
match TAtom::<IntStrAtom>::try_from_expr(expr).await {
|
||||
Ok(t) => Ok(OrcString { ctx: t.untyped.ctx().clone(), kind: OrcStringKind::Int(t) }),
|
||||
Err(e) => Err(mk_errv(ctx.i().i("A string was expected").await, "", e.pos_iter())),
|
||||
Ok(t) => Ok(OrcString { kind: OrcStringKind::Int(t) }),
|
||||
Err(e) => Err(mk_errv(i().i("A string was expected").await, "", e.pos_iter())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,10 +5,12 @@ use orchid_base::location::SrcRange;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::parse::ParseCtx;
|
||||
use orchid_base::sym;
|
||||
use orchid_base::tree::wrap_tokv;
|
||||
use orchid_base::tree::{Paren, wrap_tokv};
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::gen_expr::sym_ref;
|
||||
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
|
||||
use orchid_extension::parser::p_tree2gen;
|
||||
use orchid_extension::tree::{GenTokTree, ref_tok, x_tok};
|
||||
use orchid_extension::tree::{GenTok, GenTokTree, ref_tok, x_tok};
|
||||
|
||||
use super::str_atom::IntStrAtom;
|
||||
|
||||
@@ -97,9 +99,9 @@ fn parse_string(str: &str) -> Result<String, StringError> {
|
||||
pub struct StringLexer;
|
||||
impl Lexer for StringLexer {
|
||||
const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['"'..='"', '`'..='`'];
|
||||
async fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
||||
async fn lex<'a>(all: &'a str, lctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
||||
let Some(mut tail) = all.strip_prefix('"') else {
|
||||
return Err(err_not_applicable(ctx.ctx.i()).await);
|
||||
return Err(err_not_applicable().await);
|
||||
};
|
||||
let mut ret = None;
|
||||
let mut cur = String::new();
|
||||
@@ -121,19 +123,27 @@ impl Lexer for StringLexer {
|
||||
}
|
||||
let add_frag = |prev: Option<GenTokTree>, new: GenTokTree| async {
|
||||
let Some(prev) = prev else { return new };
|
||||
let concat_fn = ref_tok(sym!(std::string::concat; ctx.i()).await)
|
||||
let concat_fn = ref_tok(sym!(std::string::concat; lctx.i()))
|
||||
.await
|
||||
.at(SrcRange::zw(prev.sr.path(), prev.sr.start()));
|
||||
wrap_tokv([concat_fn, prev, new])
|
||||
};
|
||||
loop {
|
||||
if let Some(rest) = tail.strip_prefix('"') {
|
||||
return Ok((rest, add_frag(ret, str_to_gen(&mut cur, tail, &mut errors, ctx).await).await));
|
||||
return Ok((
|
||||
rest,
|
||||
add_frag(ret, str_to_gen(&mut cur, tail, &mut errors, lctx).await).await,
|
||||
));
|
||||
} else if let Some(rest) = tail.strip_prefix('$') {
|
||||
ret = Some(add_frag(ret, str_to_gen(&mut cur, tail, &mut errors, ctx).await).await);
|
||||
let (new_tail, tree) = ctx.recurse(rest).await?;
|
||||
ret = Some(add_frag(ret, str_to_gen(&mut cur, tail, &mut errors, lctx).await).await);
|
||||
let (new_tail, tree) = lctx.recurse(rest).await?;
|
||||
tail = new_tail;
|
||||
ret = Some(add_frag(ret, p_tree2gen(tree)).await);
|
||||
// wrap the received token in a call to to_str
|
||||
let to_str = sym_ref(sym!(std::string::to_str; i()));
|
||||
let sr = tree.sr();
|
||||
let inj_to_str_tok = GenTok::NewExpr(to_str).at(sr.map_range(|_| sr.start()..sr.start()));
|
||||
let to_str_call = GenTok::S(Paren::Round, vec![inj_to_str_tok, p_tree2gen(tree)]).at(sr);
|
||||
ret = Some(add_frag(ret, to_str_call).await);
|
||||
} else if tail.starts_with('\\') {
|
||||
// parse_string will deal with it, we just have to skip the next char
|
||||
tail = &tail[2..];
|
||||
@@ -143,11 +153,11 @@ impl Lexer for StringLexer {
|
||||
cur.push(c);
|
||||
tail = ch.as_str();
|
||||
} else {
|
||||
let range = ctx.pos(all)..ctx.pos("");
|
||||
let range = lctx.pos(all)..lctx.pos("");
|
||||
return Err(mk_errv(
|
||||
ctx.i().i("No string end").await,
|
||||
lctx.i().i("No string end").await,
|
||||
"String never terminated with \"",
|
||||
[SrcRange::new(range.clone(), ctx.src())],
|
||||
[SrcRange::new(range.clone(), lctx.src())],
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +1,60 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use orchid_extension::tree::{GenMember, comments, fun, prefix};
|
||||
use orchid_base::format::fmt;
|
||||
use orchid_base::sym;
|
||||
use orchid_extension::atom::ForeignAtom;
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::ToExpr;
|
||||
use orchid_extension::coroutine_exec::exec;
|
||||
use orchid_extension::expr::Expr;
|
||||
use orchid_extension::gen_expr::{call, sym_ref};
|
||||
use orchid_extension::tree::{GenMember, cnst, comments, fun, prefix};
|
||||
|
||||
use super::str_atom::StrAtom;
|
||||
use crate::OrcString;
|
||||
use crate::std::protocol::types::get_impl;
|
||||
use crate::std::string::to_string::{AsStrTag, ToStringMethod};
|
||||
|
||||
pub fn gen_str_lib() -> Vec<GenMember> {
|
||||
prefix("std::string", [comments(
|
||||
["Concatenate two strings"],
|
||||
fun(true, "concat", |left: OrcString, right: OrcString| async move {
|
||||
StrAtom::new(Rc::new(left.get_string().await.to_string() + &right.get_string().await))
|
||||
}),
|
||||
)])
|
||||
prefix("std::string", [
|
||||
comments(
|
||||
["Concatenate two strings"],
|
||||
fun(true, "concat", async |left: OrcString, right: OrcString| {
|
||||
StrAtom::new(Rc::new(left.get_string().await.to_string() + &right.get_string().await))
|
||||
}),
|
||||
),
|
||||
comments(
|
||||
["Converts a value to string. This function is used in interpolation. \
|
||||
It supports the std::string::to_string protocol in Orchid, \
|
||||
the std::string::to_string request in Rust, \
|
||||
and expression debug printing as a fallback (print_atom for Atomic implementors in Rust).\n\n\
|
||||
This function is infallible."],
|
||||
fun(true, "to_str", async |input: Expr| {
|
||||
exec(async move |mut h| {
|
||||
if let Ok(atom) = h.exec::<ForeignAtom>(input.clone()).await {
|
||||
if let Some(str) = atom.request(ToStringMethod).await {
|
||||
return StrAtom::new(Rc::new(str)).to_gen().await;
|
||||
}
|
||||
let proto_ref = sym_ref(sym!(std::string::to_string::__protocol_tag__; i()));
|
||||
let proto = h.exec(proto_ref).await.expect("This protocol is defined in this system");
|
||||
if let Ok(cb) = get_impl(atom.clone(), proto).await {
|
||||
return call(cb.to_gen().await, [atom.to_gen().await]).to_gen().await;
|
||||
}
|
||||
}
|
||||
return StrAtom::new(Rc::new(fmt(&input, &i()).await)).to_gen().await;
|
||||
})
|
||||
.await
|
||||
}),
|
||||
),
|
||||
prefix("to_string", [
|
||||
cnst(true, "__type_tag__", AsStrTag),
|
||||
fun(true, "resolve", async |atom: ForeignAtom| {
|
||||
exec(async |mut h| {
|
||||
let proto = h.exec(sym_ref(sym!(std::string::to_string; i()))).await?;
|
||||
Ok(call(get_impl(atom.clone(), proto).await?.to_gen().await, [atom.to_gen().await]))
|
||||
})
|
||||
.await
|
||||
}),
|
||||
]),
|
||||
])
|
||||
}
|
||||
|
||||
36
orchid-std/src/std/string/to_string.rs
Normal file
36
orchid-std/src/std/string/to_string.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use orchid_api_derive::Coding;
|
||||
use orchid_api_traits::Request;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_extension::atom::{AtomMethod, Atomic, MethodSetBuilder, Supports};
|
||||
use orchid_extension::atom_thin::{ThinAtom, ThinVariant};
|
||||
use orchid_extension::context::i;
|
||||
|
||||
use crate::std::protocol::types::{GetImplMethod, GetTagIdMethod};
|
||||
|
||||
#[derive(Coding, Clone, Debug)]
|
||||
pub struct AsStrTag;
|
||||
impl Atomic for AsStrTag {
|
||||
type Data = AsStrTag;
|
||||
type Variant = ThinVariant;
|
||||
fn reg_reqs() -> MethodSetBuilder<Self> {
|
||||
MethodSetBuilder::new().handle::<GetTagIdMethod>().handle::<GetImplMethod>()
|
||||
}
|
||||
}
|
||||
impl ThinAtom for AsStrTag {}
|
||||
impl Supports<GetTagIdMethod> for AsStrTag {
|
||||
async fn handle(&self, _: GetTagIdMethod) -> <GetTagIdMethod as Request>::Response {
|
||||
Sym::parse("std::string::to_string", &i()).await.unwrap().to_api()
|
||||
}
|
||||
}
|
||||
impl Supports<GetImplMethod> for AsStrTag {
|
||||
async fn handle(&self, _: GetImplMethod) -> <GetImplMethod as Request>::Response { None }
|
||||
}
|
||||
|
||||
#[derive(Coding, Clone, Debug)]
|
||||
pub struct ToStringMethod;
|
||||
impl Request for ToStringMethod {
|
||||
type Response = String;
|
||||
}
|
||||
impl AtomMethod for ToStringMethod {
|
||||
const NAME: &str = "std::string::to_string";
|
||||
}
|
||||
211
orchid-std/src/std/tuple.rs
Normal file
211
orchid-std/src/std/tuple.rs
Normal file
@@ -0,0 +1,211 @@
|
||||
use std::borrow::Cow;
|
||||
use std::num::NonZero;
|
||||
use std::pin::Pin;
|
||||
use std::rc::Rc;
|
||||
|
||||
use futures::AsyncWrite;
|
||||
use futures::future::join_all;
|
||||
use never::Never;
|
||||
use orchid_api::ExprTicket;
|
||||
use orchid_api_derive::{Coding, Hierarchy};
|
||||
use orchid_api_traits::Request;
|
||||
use orchid_base::error::{OrcRes, mk_errv};
|
||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||
use orchid_extension::atom::{Atomic, TAtom};
|
||||
use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant, own};
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::{ToExpr, TryFromExpr};
|
||||
use orchid_extension::expr::{Expr, ExprHandle};
|
||||
use orchid_extension::gen_expr::GExpr;
|
||||
use orchid_extension::system::dep_req;
|
||||
use orchid_extension::tree::{GenMember, cnst, fun, prefix};
|
||||
|
||||
use crate::std::std_system::StdReq;
|
||||
use crate::{Int, StdSystem, api};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Tuple(pub(super) Rc<Vec<Expr>>);
|
||||
|
||||
impl Atomic for Tuple {
|
||||
type Data = Vec<ExprTicket>;
|
||||
type Variant = OwnedVariant;
|
||||
}
|
||||
|
||||
impl OwnedAtom for Tuple {
|
||||
type Refs = Vec<Expr>;
|
||||
async fn val(&self) -> Cow<'_, Self::Data> {
|
||||
Cow::Owned(self.0.iter().map(|x| x.handle().ticket()).collect())
|
||||
}
|
||||
async fn serialize(&self, _: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
|
||||
self.0.as_ref().clone()
|
||||
}
|
||||
async fn deserialize(_: impl DeserializeCtx, refs: Self::Refs) -> Self { Self(Rc::new(refs)) }
|
||||
async fn print_atom<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
Variants::default()
|
||||
.sequence(self.0.len(), "t[", ", ", "]", Some(true))
|
||||
.sequence(self.0.len(), "t[\n", ",\n", "\n]", Some(true))
|
||||
.units_own(join_all(self.0.iter().map(|x| x.print(c))).await)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extends(StdReq)]
|
||||
pub struct CreateTuple(pub Vec<api::ExprTicket>);
|
||||
impl Request for CreateTuple {
|
||||
type Response = api::ExprTicket;
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TupleBuilder {
|
||||
arity: NonZero<u32>,
|
||||
items: Vec<Expr>,
|
||||
}
|
||||
impl Atomic for TupleBuilder {
|
||||
type Data = ();
|
||||
type Variant = OwnedVariant;
|
||||
}
|
||||
impl OwnedAtom for TupleBuilder {
|
||||
type Refs = Never;
|
||||
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
|
||||
async fn call(mut self, arg: Expr) -> GExpr {
|
||||
self.items.push(arg);
|
||||
if self.arity.get() == self.items.len().try_into().expect("counting up from 0") {
|
||||
Tuple(Rc::new(self.items)).to_gen().await
|
||||
} else {
|
||||
self.to_gen().await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn gen_tuple_lib() -> Vec<GenMember> {
|
||||
prefix("std::tuple", [
|
||||
cnst(true, "empty", Tuple(Rc::new(Vec::new()))),
|
||||
fun(true, "one", async |item: Expr| Tuple(Rc::new(vec![item]))),
|
||||
fun(true, "new", async |arity: TAtom<Int>| {
|
||||
if let Ok(arity) = u32::try_from(arity.value.0).and_then(|v| v.try_into()) {
|
||||
TupleBuilder { arity, items: Vec::new() }.to_gen().await
|
||||
} else {
|
||||
Tuple(Rc::new(Vec::new())).to_gen().await
|
||||
}
|
||||
}),
|
||||
fun(true, "get", async |tup: TAtom<Tuple>, idx: TAtom<Int>| {
|
||||
if let Ok(idx) = usize::try_from(idx.0)
|
||||
&& let Some(val) = own(&tup).await.0.get(idx)
|
||||
{
|
||||
return Ok(val.clone());
|
||||
}
|
||||
return Err(mk_errv(
|
||||
i().i("Tuple index out of bounds").await,
|
||||
format!("{} is out of bounds for Tuple{}", idx.0, tup.len()),
|
||||
[idx.pos()],
|
||||
));
|
||||
}),
|
||||
fun(true, "set", async |tup: TAtom<Tuple>, idx: TAtom<Int>, val: Expr| {
|
||||
if let Ok(idx) = usize::try_from(idx.0) {
|
||||
let mut new_vec = own(&tup).await.0.to_vec();
|
||||
if let Some(slot) = new_vec.get_mut(idx) {
|
||||
*slot = val;
|
||||
return Ok(Tuple(Rc::new(new_vec)));
|
||||
}
|
||||
}
|
||||
return Err(mk_errv(
|
||||
i().i("Tuple index out of bounds").await,
|
||||
format!("{} is out of bounds for Tuple{}", idx.0, tup.len()),
|
||||
[idx.pos()],
|
||||
));
|
||||
}),
|
||||
fun(true, "len", async |tup: TAtom<Tuple>| {
|
||||
Int(tup.len().try_into().expect("Tuple was created with an Int length"))
|
||||
}),
|
||||
fun(true, "cat", async |left: TAtom<Tuple>, right: TAtom<Tuple>| {
|
||||
Tuple(Rc::new(own(&left).await.0.iter().chain(own(&right).await.0.iter()).cloned().collect()))
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
pub struct UntypedTuple(pub Vec<Expr>);
|
||||
impl TryFromExpr for UntypedTuple {
|
||||
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
||||
let tpl = TAtom::<Tuple>::try_from_expr(expr.clone()).await?;
|
||||
let exprs =
|
||||
join_all(tpl.iter().map(async |t| Expr::from_handle(ExprHandle::from_ticket(*t).await)))
|
||||
.await;
|
||||
Ok(UntypedTuple(exprs))
|
||||
}
|
||||
}
|
||||
impl ToExpr for UntypedTuple {
|
||||
async fn to_gen(self) -> GExpr {
|
||||
let exprs = join_all(self.0.into_iter().map(async |expr| expr.serialize().await)).await;
|
||||
Expr::deserialize(dep_req::<StdSystem, _>(CreateTuple(exprs)).await).await.to_gen().await
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Tpl<T>(pub T);
|
||||
|
||||
mod tpl_impls {
|
||||
use itertools::Itertools;
|
||||
use orchid_base::error::{OrcRes, mk_errv};
|
||||
use orchid_extension::context::i;
|
||||
use orchid_extension::conv::{ToExpr, TryFromExpr};
|
||||
use orchid_extension::expr::Expr;
|
||||
use orchid_extension::gen_expr::GExpr;
|
||||
|
||||
use super::{Tpl, UntypedTuple};
|
||||
|
||||
macro_rules! tpl_derives {
|
||||
($len:literal $($t:ident)*) => {
|
||||
pastey::paste! {
|
||||
impl<$( $t: TryFromExpr, )*> TryFromExpr for Tpl<($( $t, )*)> {
|
||||
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
||||
let tpl = UntypedTuple::try_from_expr(expr.clone()).await?;
|
||||
let Some([$( [< $t:lower >], )*]) = tpl.0.iter().cloned().collect_array() else {
|
||||
return Err(mk_errv(
|
||||
i().i("Tuple arity mismatch").await,
|
||||
format!("Expected a {}-ary tuple, found {}-ary", $len, tpl.0.len()),
|
||||
[expr.data().await.pos.clone()]
|
||||
));
|
||||
};
|
||||
Ok(Tpl(( $( $t::try_from_expr([< $t:lower >]).await?, )* )))
|
||||
}
|
||||
}
|
||||
impl<$( $t: ToExpr, )*> ToExpr for Tpl<($( $t, )*)> {
|
||||
async fn to_gen(self) -> GExpr {
|
||||
let Self(($( [< $t:lower >], )*)) = self;
|
||||
UntypedTuple(vec![
|
||||
$( [< $t:lower >].to_expr().await, )*
|
||||
]).to_gen().await
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
tpl_derives!(0);
|
||||
tpl_derives!(1 A);
|
||||
tpl_derives!(2 A B);
|
||||
tpl_derives!(3 A B C);
|
||||
tpl_derives!(4 A B C D);
|
||||
tpl_derives!(5 A B C D E);
|
||||
tpl_derives!(6 A B C D E F);
|
||||
tpl_derives!(7 A B C D E F G);
|
||||
tpl_derives!(8 A B C D E F G H);
|
||||
tpl_derives!(9 A B C D E F G H I);
|
||||
tpl_derives!(10 A B C D E F G H I J);
|
||||
}
|
||||
|
||||
pub struct HomoTpl<T>(pub Vec<T>);
|
||||
|
||||
impl<T: TryFromExpr> TryFromExpr for HomoTpl<T> {
|
||||
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
|
||||
let tpl = TAtom::<Tuple>::try_from_expr(expr.clone()).await?;
|
||||
let mut res = Vec::new();
|
||||
for item in tpl.iter() {
|
||||
res.push(T::try_from_expr(Expr::from_handle(ExprHandle::from_ticket(*item).await)).await?);
|
||||
}
|
||||
Ok(HomoTpl(res))
|
||||
}
|
||||
}
|
||||
impl<T: ToExpr> ToExpr for HomoTpl<T> {
|
||||
async fn to_gen(self) -> GExpr {
|
||||
UntypedTuple(join_all(self.0.into_iter().map(async |t| t.to_expr().await)).await).to_gen().await
|
||||
}
|
||||
}
|
||||
1
orchidlang/.gitignore
vendored
1
orchidlang/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
target
|
||||
828
orchidlang/Cargo.lock
generated
828
orchidlang/Cargo.lock
generated
@@ -1,828 +0,0 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.8.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d713b3834d76b85304d4d525563c1276e2e30dc97cc67bfb4585a4a29fc2c89f"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"version_check",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "1.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "allocator-api2"
|
||||
version = "0.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
|
||||
|
||||
[[package]]
|
||||
name = "anstream"
|
||||
version = "0.6.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "96b09b5178381e0874812a9b157f7fe84982617e48f71f4e3235482775e5b540"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"anstyle-parse",
|
||||
"anstyle-query",
|
||||
"anstyle-wincon",
|
||||
"colorchoice",
|
||||
"utf8parse",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-parse"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c"
|
||||
dependencies = [
|
||||
"utf8parse",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-query"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648"
|
||||
dependencies = [
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-wincon"
|
||||
version = "3.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atty"
|
||||
version = "0.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
version = "0.10.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
|
||||
dependencies = [
|
||||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bound"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6021ae095f16f54aaae093f4c723700430e71eab731d3b0a07fc8fe258fd5110"
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c48f0051a4b4c5e0b6d365cd04af53aeaa209e3cc15ec2cdb69e73cc87fbd0dc"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c918d541ef2913577a0f9566e9ce27cb35b6df072075769e0b26cb5a554520da"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9f3e7391dad68afb0c2ede1bf619f579a3dc9c2ec67f089baa397123a2f3d1eb"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
"clap_lex",
|
||||
"strsim",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "307bc0538d5f0f83b8248db3087aa92fe504e4691294d0c96c0eabc33f47ba47"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.50",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_lex"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce"
|
||||
|
||||
[[package]]
|
||||
name = "colorchoice"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
|
||||
|
||||
[[package]]
|
||||
name = "const_format"
|
||||
version = "0.2.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e3a214c7af3d04997541b18d432afaff4c455e79e2029079647e72fc2bd27673"
|
||||
dependencies = [
|
||||
"const_format_proc_macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "const_format_proc_macros"
|
||||
version = "0.2.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c7f6ff08fd20f4f299298a28e2dfa8a8ba1036e6cd2460ac1de7b425d76f2500"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-xid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cpufeatures"
|
||||
version = "0.2.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-deque"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d"
|
||||
dependencies = [
|
||||
"crossbeam-epoch",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-epoch"
|
||||
version = "0.9.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
|
||||
dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345"
|
||||
|
||||
[[package]]
|
||||
name = "crypto-common"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
|
||||
dependencies = [
|
||||
"generic-array",
|
||||
"typenum",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "digest"
|
||||
version = "0.10.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
|
||||
dependencies = [
|
||||
"block-buffer",
|
||||
"crypto-common",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dyn-clone"
|
||||
version = "1.0.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d"
|
||||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a"
|
||||
|
||||
[[package]]
|
||||
name = "generic-array"
|
||||
version = "0.14.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
|
||||
dependencies = [
|
||||
"typenum",
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "globset"
|
||||
version = "0.4.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"bstr",
|
||||
"log",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.14.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"allocator-api2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.1.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "intern-all"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "20c9bf7d7b0572f7b4398fddc93ac1a200a92d1ba319a27dac04649b2223c0f6"
|
||||
dependencies = [
|
||||
"hashbrown",
|
||||
"lazy_static",
|
||||
"trait-set",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569"
|
||||
dependencies = [
|
||||
"either",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kernel32-sys"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
||||
dependencies = [
|
||||
"winapi 0.2.8",
|
||||
"winapi-build",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.153"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149"
|
||||
|
||||
[[package]]
|
||||
name = "never"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c96aba5aa877601bb3f6dd6a63a969e1f82e60646e81e71b14496995e9853c91"
|
||||
|
||||
[[package]]
|
||||
name = "num-traits"
|
||||
version = "0.2.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "numtoa"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8f8bdf33df195859076e54ab11ee78a1b208382d3a26ec40d142ffc1ecc49ef"
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.19.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
||||
|
||||
[[package]]
|
||||
name = "orchidlang"
|
||||
version = "0.3.0"
|
||||
dependencies = [
|
||||
"bound",
|
||||
"clap",
|
||||
"const_format",
|
||||
"dyn-clone",
|
||||
"hashbrown",
|
||||
"intern-all",
|
||||
"itertools",
|
||||
"never",
|
||||
"once_cell",
|
||||
"ordered-float",
|
||||
"paste",
|
||||
"rayon",
|
||||
"rust-embed",
|
||||
"substack",
|
||||
"take_mut",
|
||||
"termsize",
|
||||
"trait-set",
|
||||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ordered-float"
|
||||
version = "4.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a76df7075c7d4d01fdcb46c912dd17fba5b60c78ea480b475f2b6ab6f666584e"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "paste"
|
||||
version = "1.0.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.78"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.35"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fa7237101a77a10773db45d62004a272517633fbcc3df19d96455ede1122e051"
|
||||
dependencies = [
|
||||
"either",
|
||||
"rayon-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rayon-core"
|
||||
version = "1.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2"
|
||||
dependencies = [
|
||||
"crossbeam-deque",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_termios"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "20145670ba436b55d91fc92d25e71160fbfbdd57831631c8d7d36377a476f1cb"
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.4.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
|
||||
|
||||
[[package]]
|
||||
name = "rust-embed"
|
||||
version = "8.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a82c0bbc10308ed323529fd3c1dce8badda635aa319a5ff0e6466f33b8101e3f"
|
||||
dependencies = [
|
||||
"rust-embed-impl",
|
||||
"rust-embed-utils",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rust-embed-impl"
|
||||
version = "8.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6227c01b1783cdfee1bcf844eb44594cd16ec71c35305bf1c9fb5aade2735e16"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rust-embed-utils",
|
||||
"syn 2.0.50",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rust-embed-utils"
|
||||
version = "8.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8cb0a25bfbb2d4b4402179c2cf030387d9990857ce08a32592c6238db9fa8665"
|
||||
dependencies = [
|
||||
"globset",
|
||||
"sha2",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "same-file"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
|
||||
dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.197"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.197"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.50",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha2"
|
||||
version = "0.10.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cpufeatures",
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01"
|
||||
|
||||
[[package]]
|
||||
name = "substack"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ffccc3d80f0a489de67aa74ff31ab852abb973e1c6dacf3704889e00ca544e7f"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.109"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.50"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74f1bdc9872430ce9b75da68329d1c1746faf50ffac5f19e02b71e37ff881ffb"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "take_mut"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60"
|
||||
|
||||
[[package]]
|
||||
name = "termion"
|
||||
version = "1.5.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "077185e2eac69c3f8379a4298e1e07cd36beb962290d4a51199acf0fdc10607e"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"numtoa",
|
||||
"redox_syscall",
|
||||
"redox_termios",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "termsize"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5e86d824a8e90f342ad3ef4bd51ef7119a9b681b0cc9f8ee7b2852f02ccd2517"
|
||||
dependencies = [
|
||||
"atty",
|
||||
"kernel32-sys",
|
||||
"libc",
|
||||
"termion",
|
||||
"winapi 0.2.8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "trait-set"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b79e2e9c9ab44c6d7c20d5976961b47e8f49ac199154daa514b77cd1ab536625"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typenum"
|
||||
version = "1.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-segmentation"
|
||||
version = "1.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
version = "0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
|
||||
|
||||
[[package]]
|
||||
name = "utf8parse"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||
|
||||
[[package]]
|
||||
name = "walkdir"
|
||||
version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee"
|
||||
dependencies = [
|
||||
"same-file",
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.2.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
|
||||
dependencies = [
|
||||
"winapi-i686-pc-windows-gnu",
|
||||
"winapi-x86_64-pc-windows-gnu",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-build"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-i686-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-util"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596"
|
||||
dependencies = [
|
||||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||
dependencies = [
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d380ba1dc7187569a8a9e91ed34b8ccfc33123bbacb8c0aed2d1ad7f3ef2dc5f"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm",
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_gnullvm",
|
||||
"windows_x86_64_msvc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68e5dcfb9413f53afd9c8f86e56a7b4d86d9a2fa26090ea2dc9e40fba56c6ec6"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8dab469ebbc45798319e69eebf92308e541ce46760b49b18c6b3fe5e8965b30f"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2a4e9b6a7cac734a8b4138a4e1044eac3404d8326b6c0f939276560687a033fb"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28b0ec9c422ca95ff34a78755cfa6ad4a51371da2a5ace67500cf7ca5f232c58"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "704131571ba93e89d7cd43482277d6632589b18ecf4468f591fbae0a8b101614"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42079295511643151e98d61c38c0acc444e52dd42ab456f7ccfd5152e8ecf21c"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0770833d60a970638e989b3fa9fd2bb1aaadcf88963d1659fd7d9990196ed2d6"
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.7.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be"
|
||||
dependencies = [
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.7.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.50",
|
||||
]
|
||||
@@ -1,41 +0,0 @@
|
||||
[package]
|
||||
name = "orchidlang"
|
||||
version = "0.3.0"
|
||||
edition = "2024"
|
||||
license = "GPL-3.0"
|
||||
repository = "https://github.com/lbfalvy/orchid"
|
||||
description = """
|
||||
An embeddable pure functional scripting language
|
||||
"""
|
||||
authors = ["Lawrence Bethlenfalvy <lbfalvy@protonmail.com>"]
|
||||
|
||||
[lib]
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "orcx"
|
||||
path = "src/bin/orcx.rs"
|
||||
doc = false
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
hashbrown = "0.14"
|
||||
ordered-float = "4.2"
|
||||
itertools = "0.12"
|
||||
dyn-clone = "1.0"
|
||||
trait-set = "0.3"
|
||||
paste = "1.0"
|
||||
rust-embed = { version = "8.2", features = ["include-exclude"] }
|
||||
take_mut = "0.2"
|
||||
unicode-segmentation = "1.11"
|
||||
never = "0.1"
|
||||
substack = "1.1"
|
||||
intern-all = "0.4.1"
|
||||
once_cell = "1.19"
|
||||
const_format = "0.2"
|
||||
bound = "0.5"
|
||||
# Dependencies of orcx
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
rayon = "1.8"
|
||||
termsize = "0.1"
|
||||
@@ -1,2 +0,0 @@
|
||||
mod prompt;
|
||||
pub use prompt::cmd_prompt;
|
||||
@@ -1,11 +0,0 @@
|
||||
use std::io::{self, Error, Write};
|
||||
|
||||
pub fn cmd_prompt(prompt: &str) -> Result<(String, Vec<String>), Error> {
|
||||
print!("{}", prompt);
|
||||
io::stdout().flush()?;
|
||||
let mut cmdln = String::new();
|
||||
io::stdin().read_line(&mut cmdln)?;
|
||||
let mut segments = cmdln.split(' ');
|
||||
let cmd = if let Some(cmd) = segments.next() { cmd } else { "" };
|
||||
Ok((cmd.to_string(), segments.map(str::to_string).collect()))
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
use itertools::Itertools;
|
||||
use orchidlang::error::Reporter;
|
||||
use orchidlang::facade::macro_runner::MacroRunner;
|
||||
use orchidlang::libs::std::exit_status::OrcExitStatus;
|
||||
use orchidlang::location::{CodeGenInfo, CodeLocation};
|
||||
use orchidlang::name::Sym;
|
||||
use orchidlang::pipeline::project::{ItemKind, ProjItem, ProjectTree};
|
||||
use orchidlang::sym;
|
||||
|
||||
use crate::cli::cmd_prompt;
|
||||
|
||||
/// A little utility to step through the reproject of a macro set
|
||||
pub fn main(tree: ProjectTree, symbol: Sym) -> OrcExitStatus {
|
||||
print!("Macro debugger starting on {symbol}");
|
||||
let location = CodeLocation::new_gen(CodeGenInfo::no_details(sym!(orcx::macro_runner)));
|
||||
let expr_ent = match tree.0.walk1_ref(&[], &symbol[..], |_| true) {
|
||||
Ok((e, _)) => e.clone(),
|
||||
Err(e) => {
|
||||
eprintln!("{}", e.at(&location.origin()));
|
||||
return OrcExitStatus::Failure;
|
||||
},
|
||||
};
|
||||
let mut expr = match expr_ent.item() {
|
||||
Some(ProjItem { kind: ItemKind::Const(c) }) => c.clone(),
|
||||
_ => {
|
||||
eprintln!("macro-debug argument must be a constant");
|
||||
return OrcExitStatus::Failure;
|
||||
},
|
||||
};
|
||||
let reporter = Reporter::new();
|
||||
let macro_runner = MacroRunner::new(&tree, None, &reporter);
|
||||
reporter.assert_exit();
|
||||
println!("\nInitial state: {expr}");
|
||||
// print_for_debug(&code);
|
||||
let mut steps = macro_runner.step(expr.clone()).enumerate();
|
||||
loop {
|
||||
let (cmd, _) = cmd_prompt("\ncmd> ").unwrap();
|
||||
match cmd.trim() {
|
||||
"" | "n" | "next" => match steps.next() {
|
||||
None => print!("Halted"),
|
||||
Some((idx, c)) => {
|
||||
expr = c;
|
||||
print!("Step {idx}: {expr}");
|
||||
},
|
||||
},
|
||||
"p" | "print" => {
|
||||
let glossary = expr.value.collect_names();
|
||||
let gl_str = glossary.iter().join(", ");
|
||||
print!("code: {expr}\nglossary: {gl_str}")
|
||||
},
|
||||
"d" | "dump" => print!("Rules: {}", macro_runner.repo),
|
||||
"q" | "quit" => return OrcExitStatus::Success,
|
||||
"complete" => {
|
||||
match steps.last() {
|
||||
Some((idx, c)) => print!("Step {idx}: {c}"),
|
||||
None => print!("Already halted"),
|
||||
}
|
||||
return OrcExitStatus::Success;
|
||||
},
|
||||
"h" | "help" => print!(
|
||||
"Available commands:
|
||||
\t<blank>, n, next\t\ttake a step
|
||||
\tp, print\t\tprint the current state
|
||||
\td, dump\t\tprint the rule table
|
||||
\tq, quit\t\texit
|
||||
\th, help\t\tprint this text"
|
||||
),
|
||||
_ => {
|
||||
print!("unrecognized command \"{}\", try \"help\"", cmd);
|
||||
continue;
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
pub mod macro_debug;
|
||||
pub mod print_project;
|
||||
pub mod shared;
|
||||
pub mod tests;
|
||||
@@ -1,55 +0,0 @@
|
||||
use itertools::Itertools;
|
||||
use orchidlang::pipeline::project::{ItemKind, ProjItem, ProjectMod};
|
||||
use orchidlang::tree::{ModEntry, ModMember};
|
||||
|
||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
|
||||
pub struct ProjPrintOpts {
|
||||
pub width: u16,
|
||||
pub hide_locations: bool,
|
||||
}
|
||||
|
||||
fn indent(amount: u16) -> String { " ".repeat(amount.into()) }
|
||||
|
||||
pub fn print_proj_mod(module: &ProjectMod, lvl: u16, opts: ProjPrintOpts) -> String {
|
||||
let mut acc = String::new();
|
||||
let tab = indent(lvl);
|
||||
for (key, ModEntry { member, x }) in &module.entries {
|
||||
let mut line_acc = String::new();
|
||||
for c in &x.comments {
|
||||
line_acc += &format!("{tab}, --[|{}|]--\n", c);
|
||||
}
|
||||
if x.exported {
|
||||
line_acc += &format!("{tab}export ");
|
||||
} else {
|
||||
line_acc += &tab
|
||||
}
|
||||
match member {
|
||||
ModMember::Sub(module) => {
|
||||
line_acc += &format!("module {key} {{\n");
|
||||
line_acc += &print_proj_mod(module, lvl + 1, opts);
|
||||
line_acc += &format!("{tab}}}");
|
||||
},
|
||||
ModMember::Item(ProjItem { kind: ItemKind::None }) => {
|
||||
line_acc += &format!("keyword {key}");
|
||||
},
|
||||
ModMember::Item(ProjItem { kind: ItemKind::Alias(tgt) }) => {
|
||||
line_acc += &format!("alias {key} => {tgt}");
|
||||
},
|
||||
ModMember::Item(ProjItem { kind: ItemKind::Const(val) }) => {
|
||||
line_acc += &format!("const {key} := {val}");
|
||||
},
|
||||
}
|
||||
if !x.locations.is_empty() && !opts.hide_locations {
|
||||
let locs = x.locations.iter().map(|l| l.to_string()).join(", ");
|
||||
let line_len = line_acc.split('\n').last().unwrap().len();
|
||||
match usize::from(opts.width).checked_sub(locs.len() + line_len + 4) {
|
||||
Some(padding) => line_acc += &" ".repeat(padding),
|
||||
None => line_acc += &format!("\n{tab} @ "),
|
||||
}
|
||||
line_acc += &locs;
|
||||
}
|
||||
line_acc += "\n";
|
||||
acc += &line_acc
|
||||
}
|
||||
acc
|
||||
}
|
||||
@@ -1,64 +0,0 @@
|
||||
use std::io::BufReader;
|
||||
use std::thread;
|
||||
|
||||
use orchidlang::facade::loader::Loader;
|
||||
use orchidlang::libs::asynch::system::AsynchSystem;
|
||||
use orchidlang::libs::directfs::DirectFS;
|
||||
use orchidlang::libs::io::{IOService, Sink, Source, Stream};
|
||||
use orchidlang::libs::scheduler::system::SeqScheduler;
|
||||
use orchidlang::libs::std::std_system::StdConfig;
|
||||
|
||||
pub fn stdin_source() -> Source { BufReader::new(Box::new(std::io::stdin())) }
|
||||
pub fn stdout_sink() -> Sink { Box::new(std::io::stdout()) }
|
||||
pub fn stderr_sink() -> Sink { Box::new(std::io::stderr()) }
|
||||
|
||||
pub fn with_std_env<T>(cb: impl for<'a> FnOnce(Loader<'a>) -> T) -> T {
|
||||
with_env(stdin_source(), stdout_sink(), stderr_sink(), cb)
|
||||
}
|
||||
|
||||
pub fn with_env<T>(
|
||||
stdin: Source,
|
||||
stdout: Sink,
|
||||
stderr: Sink,
|
||||
cb: impl for<'a> FnOnce(Loader<'a>) -> T,
|
||||
) -> T {
|
||||
let mut asynch = AsynchSystem::new();
|
||||
let scheduler = SeqScheduler::new(&mut asynch);
|
||||
let std_streams = [
|
||||
("stdin", Stream::Source(stdin)),
|
||||
("stdout", Stream::Sink(stdout)),
|
||||
("stderr", Stream::Sink(stderr)),
|
||||
];
|
||||
let env = Loader::new()
|
||||
.add_system(StdConfig { impure: true })
|
||||
.add_system(asynch)
|
||||
.add_system(scheduler.clone())
|
||||
.add_system(IOService::new(scheduler.clone(), std_streams))
|
||||
.add_system(DirectFS::new(scheduler));
|
||||
cb(env)
|
||||
}
|
||||
|
||||
pub fn worker_cnt() -> usize { thread::available_parallelism().map(usize::from).unwrap_or(1) }
|
||||
|
||||
macro_rules! unwrap_exit {
|
||||
($param:expr) => {
|
||||
match $param {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
eprintln!("{e}");
|
||||
return ExitCode::FAILURE;
|
||||
},
|
||||
}
|
||||
};
|
||||
($param:expr; $error:expr) => {
|
||||
match $param {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
eprintln!("{e}");
|
||||
return $error;
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) use unwrap_exit;
|
||||
@@ -1,111 +0,0 @@
|
||||
use std::fmt;
|
||||
use std::io::BufReader;
|
||||
use std::path::Path;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use itertools::Itertools;
|
||||
use orchidlang::error::{ProjectError, ProjectResult, Reporter};
|
||||
use orchidlang::facade::loader::Loader;
|
||||
use orchidlang::facade::macro_runner::MacroRunner;
|
||||
use orchidlang::facade::merge_trees::NortConst;
|
||||
use orchidlang::facade::process::Process;
|
||||
use orchidlang::foreign::error::{RTError, RTErrorObj, RTResult};
|
||||
use orchidlang::foreign::inert::Inert;
|
||||
use orchidlang::interpreter::error::RunError;
|
||||
use orchidlang::interpreter::nort;
|
||||
use orchidlang::libs::io::{Sink, Source};
|
||||
use orchidlang::libs::std::exit_status::OrcExitStatus;
|
||||
use orchidlang::name::Sym;
|
||||
use rayon::iter::ParallelIterator;
|
||||
use rayon::slice::ParallelSlice;
|
||||
|
||||
use super::shared::{with_env, worker_cnt};
|
||||
|
||||
pub fn mock_source() -> Source { BufReader::new(Box::new(&[][..])) }
|
||||
pub fn mock_sink() -> Sink { Box::<Vec<u8>>::default() }
|
||||
pub fn with_mock_env<T>(cb: impl for<'a> FnOnce(Loader<'a>) -> T) -> T {
|
||||
with_env(mock_source(), mock_sink(), mock_sink(), cb)
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TestDidNotHalt(Sym);
|
||||
impl RTError for TestDidNotHalt {}
|
||||
impl fmt::Display for TestDidNotHalt {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "Test {} did not halt", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TestDidNotSucceed(Sym, nort::Expr);
|
||||
impl RTError for TestDidNotSucceed {}
|
||||
impl fmt::Display for TestDidNotSucceed {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "Test {} settled on {}", self.0, self.1)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_test(proc: &mut Process, name: Sym, data: NortConst) -> RTResult<()> {
|
||||
let res = proc.run(data.value, Some(10_000)).map_err(|e| match e {
|
||||
RunError::Extern(e) => e,
|
||||
RunError::Interrupted(_) => TestDidNotHalt(name.clone()).pack(),
|
||||
})?;
|
||||
match res.clone().downcast()? {
|
||||
Inert(OrcExitStatus::Success) => Ok(()),
|
||||
_ => Err(TestDidNotSucceed(name, res).pack()),
|
||||
}
|
||||
}
|
||||
pub fn run_tests(
|
||||
dir: &Path,
|
||||
macro_limit: usize,
|
||||
threads: Option<usize>,
|
||||
tests: &[(Sym, NortConst)],
|
||||
) -> ProjectResult<()> {
|
||||
with_mock_env(|env| {
|
||||
let reporter = Reporter::new();
|
||||
env.proc_dir(dir.to_owned(), true, Some(macro_limit), &reporter);
|
||||
reporter.bind()
|
||||
})?;
|
||||
let threads = threads.unwrap_or_else(worker_cnt);
|
||||
rayon::ThreadPoolBuilder::new().num_threads(threads).build_global().unwrap();
|
||||
let batch_size = tests.len().div_ceil(threads);
|
||||
let errors = (tests.par_chunks(batch_size))
|
||||
.map(|tests| {
|
||||
with_mock_env(|env| {
|
||||
let reporter = Reporter::new();
|
||||
let mut proc = env.proc_dir(dir.to_owned(), true, Some(macro_limit), &reporter);
|
||||
reporter.assert(); // checked above
|
||||
(tests.iter())
|
||||
.filter_map(|(test, constant)| {
|
||||
Some((test.clone(), run_test(&mut proc, test.clone(), constant.clone()).err()?))
|
||||
})
|
||||
.collect_vec()
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<HashMap<_, _>>();
|
||||
if errors.is_empty() { Ok(()) } else { Err(TestsFailed(errors).pack()) }
|
||||
}
|
||||
|
||||
pub struct TestsFailed(HashMap<Sym, RTErrorObj>);
|
||||
impl ProjectError for TestsFailed {
|
||||
const DESCRIPTION: &'static str = "Various tests failed";
|
||||
fn message(&self) -> String {
|
||||
([format!("{} tests failed. Errors:", self.0.len())].into_iter())
|
||||
.chain(self.0.iter().map(|(k, e)| format!("In {k}, {e}")))
|
||||
.join("\n")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_tree_tests(dir: &Path, reporter: &Reporter) -> ProjectResult<Vec<(Sym, NortConst)>> {
|
||||
with_mock_env(|env| {
|
||||
let tree = env.load_dir(dir.to_owned(), reporter);
|
||||
let tree = MacroRunner::new(&tree, Some(10_000), reporter).run_macros(tree, reporter);
|
||||
(tree.all_consts().into_iter())
|
||||
.filter(|(_, rep)| rep.comments.iter().any(|s| s.trim() == "test"))
|
||||
.map(|(k, v)| Ok((k.clone(), NortConst::convert_from(v, reporter))))
|
||||
.collect::<ProjectResult<Vec<_>>>()
|
||||
})
|
||||
}
|
||||
@@ -1,283 +0,0 @@
|
||||
mod cli;
|
||||
mod features;
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::{stdin, stdout, Write};
|
||||
use std::path::PathBuf;
|
||||
use std::process::ExitCode;
|
||||
|
||||
use clap::{Parser, Subcommand};
|
||||
use hashbrown::HashSet;
|
||||
use itertools::Itertools;
|
||||
use never::Never;
|
||||
use orchidlang::error::Reporter;
|
||||
use orchidlang::facade::macro_runner::MacroRunner;
|
||||
use orchidlang::facade::merge_trees::{merge_trees, NortConst};
|
||||
use orchidlang::facade::process::Process;
|
||||
use orchidlang::foreign::inert::Inert;
|
||||
use orchidlang::gen::tpl;
|
||||
use orchidlang::gen::traits::Gen;
|
||||
use orchidlang::interpreter::gen_nort::nort_gen;
|
||||
use orchidlang::interpreter::nort::{self};
|
||||
use orchidlang::libs::std::exit_status::OrcExitStatus;
|
||||
use orchidlang::libs::std::string::OrcString;
|
||||
use orchidlang::location::{CodeGenInfo, CodeLocation, SourceRange};
|
||||
use orchidlang::name::Sym;
|
||||
use orchidlang::parse::context::FlatLocContext;
|
||||
use orchidlang::parse::lexer::{lex, Lexeme};
|
||||
use orchidlang::sym;
|
||||
use orchidlang::tree::{ModMemberRef, TreeTransforms};
|
||||
use orchidlang::virt_fs::{decl_file, DeclTree};
|
||||
|
||||
use crate::features::macro_debug;
|
||||
use crate::features::print_project::{print_proj_mod, ProjPrintOpts};
|
||||
use crate::features::shared::{stderr_sink, stdout_sink, unwrap_exit, with_env, with_std_env};
|
||||
use crate::features::tests::{get_tree_tests, mock_source, run_test, run_tests, with_mock_env};
|
||||
|
||||
#[derive(Subcommand, Debug)]
|
||||
enum Command {
|
||||
/// Run unit tests, any constant annotated --[[ test ]]--
|
||||
Test {
|
||||
/// Specify an exact test to run
|
||||
#[arg(long)]
|
||||
only: Option<String>,
|
||||
#[arg(long, short)]
|
||||
threads: Option<usize>,
|
||||
#[arg(long)]
|
||||
system: Option<String>,
|
||||
},
|
||||
#[command(arg_required_else_help = true)]
|
||||
MacroDebug {
|
||||
#[arg(long, short)]
|
||||
symbol: String,
|
||||
},
|
||||
ListMacros,
|
||||
ProjectTree {
|
||||
#[arg(long, default_value_t = false)]
|
||||
hide_locations: bool,
|
||||
#[arg(long)]
|
||||
width: Option<u16>,
|
||||
},
|
||||
Repl,
|
||||
}
|
||||
/// Orchid interpreter
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(name = "Orchid Executor")]
|
||||
#[command(author = "Lawrence Bethlenfalvy <lbfalvy@protonmail.com>")]
|
||||
#[command(long_about = Some("Execute Orchid projects from the file system"))]
|
||||
struct Args {
|
||||
/// Folder containing main.orc or the manually specified entry module
|
||||
#[arg(short, long, default_value = ".")]
|
||||
pub dir: String,
|
||||
/// Alternative entrypoint for the interpreter
|
||||
#[arg(short, long)]
|
||||
pub main: Option<String>,
|
||||
/// Maximum number of steps taken by the macro executor
|
||||
#[arg(long, default_value_t = 10_000)]
|
||||
pub macro_limit: usize,
|
||||
|
||||
#[command(subcommand)]
|
||||
pub command: Option<Command>,
|
||||
}
|
||||
impl Args {
|
||||
/// Validate the project directory and the
|
||||
pub fn chk_dir_main(&self) -> Result<(), String> {
|
||||
let dir_path = PathBuf::from(&self.dir);
|
||||
if !dir_path.is_dir() {
|
||||
return Err(format!("{} is not a directory", dir_path.display()));
|
||||
}
|
||||
let segs = match &self.main {
|
||||
Some(s) => s.split("::").collect::<Vec<_>>(),
|
||||
None => match File::open("./main.orc") {
|
||||
Ok(_) => return Ok(()),
|
||||
Err(e) => return Err(format!("Cannot open './main.orc'\n{e}")),
|
||||
},
|
||||
};
|
||||
if segs.len() < 2 {
|
||||
return Err("Entry point too short".to_string());
|
||||
};
|
||||
let (_, pathsegs) = segs.split_last().unwrap();
|
||||
let mut possible_files = pathsegs.iter().scan(dir_path, |path, seg| {
|
||||
path.push(seg);
|
||||
Some(path.with_extension("orc"))
|
||||
});
|
||||
if possible_files.all(|p| File::open(p).is_err()) {
|
||||
let out_path = pathsegs.join("::");
|
||||
let pbuf = PathBuf::from(&self.dir);
|
||||
return Err(format!("{out_path} not found in {}", pbuf.display()));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn chk_proj(&self) -> Result<(), String> { self.chk_dir_main() }
|
||||
}
|
||||
|
||||
pub fn main() -> ExitCode {
|
||||
let args = Args::parse();
|
||||
unwrap_exit!(args.chk_proj());
|
||||
let dir = PathBuf::from(args.dir);
|
||||
let main_s = args.main.as_ref().map_or("tree::main::main", |s| s);
|
||||
let main = Sym::parse(main_s).expect("--main cannot be empty");
|
||||
let location = CodeLocation::new_gen(CodeGenInfo::no_details(sym!(orcx::entrypoint)));
|
||||
let reporter = Reporter::new();
|
||||
|
||||
// subcommands
|
||||
#[allow(clippy::blocks_in_conditions)]
|
||||
match args.command {
|
||||
Some(Command::ListMacros) => with_mock_env(|env| {
|
||||
let tree = env.load_main(dir, [main], &reporter);
|
||||
let mr = MacroRunner::new(&tree, None, &reporter);
|
||||
println!("Parsed rules: {}", mr.repo);
|
||||
ExitCode::SUCCESS
|
||||
}),
|
||||
Some(Command::ProjectTree { hide_locations, width }) => {
|
||||
let tree = with_mock_env(|env| env.load_main(dir, [main], &reporter));
|
||||
let w = width.or_else(|| termsize::get().map(|s| s.cols)).unwrap_or(74);
|
||||
let print_opts = ProjPrintOpts { width: w, hide_locations };
|
||||
println!("Project tree: {}", print_proj_mod(&tree.0, 0, print_opts));
|
||||
ExitCode::SUCCESS
|
||||
},
|
||||
Some(Command::MacroDebug { symbol }) => with_mock_env(|env| {
|
||||
let tree = env.load_main(dir, [main], &reporter);
|
||||
let symbol = Sym::parse(&symbol).expect("macro-debug needs an argument");
|
||||
macro_debug::main(tree, symbol).code()
|
||||
}),
|
||||
Some(Command::Test { only: Some(_), threads: Some(_), .. }) => {
|
||||
eprintln!(
|
||||
"Each test case runs in a single thread.
|
||||
--only and --threads cannot both be specified"
|
||||
);
|
||||
ExitCode::FAILURE
|
||||
},
|
||||
Some(Command::Test { only: Some(_), system: Some(_), .. }) => {
|
||||
eprintln!(
|
||||
"Conflicting test filters applied. --only runs a single test by
|
||||
symbol name, while --system runs all tests in a system"
|
||||
);
|
||||
ExitCode::FAILURE
|
||||
},
|
||||
Some(Command::Test { only: None, threads, system: None }) => {
|
||||
let tree_tests = reporter.unwrap_exit(get_tree_tests(&dir, &reporter));
|
||||
unwrap_exit!(run_tests(&dir, args.macro_limit, threads, &tree_tests));
|
||||
ExitCode::SUCCESS
|
||||
},
|
||||
Some(Command::Test { only: Some(symbol), threads: None, system: None }) => {
|
||||
let symbol = Sym::parse(&symbol).expect("Test needs an argument");
|
||||
with_env(mock_source(), stdout_sink(), stderr_sink(), |env| {
|
||||
// iife in lieu of try blocks
|
||||
let tree = env.load_main(dir.clone(), [symbol.clone()], &reporter);
|
||||
let mr = MacroRunner::new(&tree, Some(args.macro_limit), &reporter);
|
||||
let consts = mr.run_macros(tree, &reporter).all_consts();
|
||||
let test = consts.get(&symbol).expect("Test not found");
|
||||
let nc = NortConst::convert_from(test.clone(), &reporter);
|
||||
let mut proc = Process::new(merge_trees(consts, env.systems(), &reporter), env.handlers());
|
||||
unwrap_exit!(run_test(&mut proc, symbol.clone(), nc.clone()));
|
||||
ExitCode::SUCCESS
|
||||
})
|
||||
},
|
||||
Some(Command::Test { only: None, threads, system: Some(system) }) => {
|
||||
let subtrees = unwrap_exit!(with_mock_env(|env| {
|
||||
match env.systems().find(|s| s.name == system) {
|
||||
None => Err(format!("System {system} not found")),
|
||||
Some(sys) => {
|
||||
let mut paths = HashSet::new();
|
||||
sys.code.search_all((), |path, node, ()| {
|
||||
if matches!(node, ModMemberRef::Item(_)) {
|
||||
let name = Sym::new(path.unreverse()).expect("Empty path means global file");
|
||||
paths.insert(name);
|
||||
}
|
||||
});
|
||||
Ok(paths)
|
||||
},
|
||||
}
|
||||
}));
|
||||
let in_subtrees = |sym: Sym| subtrees.iter().any(|sub| sym[..].starts_with(&sub[..]));
|
||||
let tests = with_mock_env(|env| {
|
||||
let tree = env.load_main(dir.clone(), [main.clone()], &reporter);
|
||||
let mr = MacroRunner::new(&tree, Some(args.macro_limit), &reporter);
|
||||
let src_consts = mr.run_macros(tree, &reporter).all_consts();
|
||||
let consts = merge_trees(src_consts, env.systems(), &reporter);
|
||||
(consts.into_iter())
|
||||
.filter(|(k, v)| in_subtrees(k.clone()) && v.comments.iter().any(|c| c.trim() == "test"))
|
||||
.collect_vec()
|
||||
});
|
||||
eprintln!("Running {} tests", tests.len());
|
||||
unwrap_exit!(run_tests(&dir, args.macro_limit, threads, &tests));
|
||||
eprintln!("All tests pass");
|
||||
ExitCode::SUCCESS
|
||||
},
|
||||
None => with_std_env(|env| {
|
||||
let proc = env.proc_main(dir, [main.clone()], true, Some(args.macro_limit), &reporter);
|
||||
reporter.assert_exit();
|
||||
let ret = unwrap_exit!(proc.run(nort::Clause::Constant(main).into_expr(location), None));
|
||||
drop(proc);
|
||||
match ret.clone().downcast() {
|
||||
Ok(Inert(OrcExitStatus::Success)) => ExitCode::SUCCESS,
|
||||
Ok(Inert(OrcExitStatus::Failure)) => ExitCode::FAILURE,
|
||||
Err(_) => {
|
||||
println!("{}", ret.clause);
|
||||
ExitCode::SUCCESS
|
||||
},
|
||||
}
|
||||
}),
|
||||
Some(Command::Repl) => with_std_env(|env| {
|
||||
let sctx = env.project_ctx(&reporter);
|
||||
loop {
|
||||
let reporter = Reporter::new();
|
||||
print!("orc");
|
||||
let mut src = String::new();
|
||||
let mut paren_tally = 0;
|
||||
loop {
|
||||
print!("> ");
|
||||
stdout().flush().unwrap();
|
||||
let mut buf = String::new();
|
||||
stdin().read_line(&mut buf).unwrap();
|
||||
src += &buf;
|
||||
let range = SourceRange::mock();
|
||||
let spctx = sctx.parsing(range.code());
|
||||
let pctx = FlatLocContext::new(&spctx, &range);
|
||||
let res =
|
||||
lex(Vec::new(), &buf, &pctx, |_| Ok::<_, Never>(false)).unwrap_or_else(|e| match e {});
|
||||
res.tokens.iter().for_each(|e| match &e.lexeme {
|
||||
Lexeme::LP(_) => paren_tally += 1,
|
||||
Lexeme::RP(_) => paren_tally -= 1,
|
||||
_ => (),
|
||||
});
|
||||
if 0 == paren_tally {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let tree = env.load_project_main(
|
||||
[sym!(tree::main::__repl_input__)],
|
||||
DeclTree::ns("tree::main", [decl_file(&format!("const __repl_input__ := {src}"))]),
|
||||
&reporter,
|
||||
);
|
||||
let mr = MacroRunner::new(&tree, Some(args.macro_limit), &reporter);
|
||||
let proj_consts = mr.run_macros(tree, &reporter).all_consts();
|
||||
let consts = merge_trees(proj_consts, env.systems(), &reporter);
|
||||
let ctx = nort_gen(location.clone());
|
||||
let to_string_tpl = tpl::A(tpl::C("std::string::convert"), tpl::Slot);
|
||||
if let Err(err) = reporter.bind() {
|
||||
eprintln!("{err}");
|
||||
continue;
|
||||
}
|
||||
let proc = Process::new(consts, env.handlers());
|
||||
let prompt = tpl::C("tree::main::__repl_input__").template(ctx.clone(), []);
|
||||
let out = match proc.run(prompt, Some(1000)) {
|
||||
Ok(out) => out,
|
||||
Err(e) => {
|
||||
eprintln!("{e}");
|
||||
continue;
|
||||
},
|
||||
};
|
||||
if let Ok(out) = proc.run(to_string_tpl.template(ctx, [out.clone()]), Some(1000)) {
|
||||
if let Ok(s) = out.clone().downcast::<Inert<OrcString>>() {
|
||||
println!("{}", s.0.as_str());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
println!("{out}")
|
||||
}
|
||||
}),
|
||||
}
|
||||
}
|
||||
@@ -1,203 +0,0 @@
|
||||
//! The main structure of the façade, collects systems and exposes various
|
||||
//! operations over the whole set.
|
||||
|
||||
use std::borrow::Borrow;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use intern_all::i;
|
||||
|
||||
use super::macro_runner::MacroRunner;
|
||||
use super::merge_trees::merge_trees;
|
||||
use super::process::Process;
|
||||
use super::system::{IntoSystem, System};
|
||||
use super::unbound_ref::validate_refs;
|
||||
use crate::error::Reporter;
|
||||
use crate::gen::tree::ConstTree;
|
||||
use crate::interpreter::context::RunEnv;
|
||||
use crate::interpreter::handler::HandlerTable;
|
||||
use crate::location::{CodeGenInfo, CodeOrigin};
|
||||
use crate::name::{PathSlice, Sym, VPath};
|
||||
use crate::pipeline::load_project::{load_project, ProjectContext};
|
||||
use crate::pipeline::project::ProjectTree;
|
||||
use crate::sym;
|
||||
use crate::utils::combine::Combine;
|
||||
use crate::utils::sequence::Sequence;
|
||||
use crate::virt_fs::{DeclTree, DirNode, Loaded, VirtFS};
|
||||
|
||||
/// A compiled environment ready to load user code. It stores the list of
|
||||
/// systems and combines with usercode to produce a [Process]
|
||||
pub struct Loader<'a> {
|
||||
systems: Vec<System<'a>>,
|
||||
}
|
||||
impl<'a> Loader<'a> {
|
||||
/// Initialize a new environment
|
||||
#[must_use]
|
||||
pub fn new() -> Self { Self { systems: Vec::new() } }
|
||||
|
||||
/// Retrieve the list of systems
|
||||
pub fn systems(&self) -> impl Iterator<Item = &System<'a>> { self.systems.iter() }
|
||||
|
||||
/// Register a new system in the environment
|
||||
#[must_use]
|
||||
pub fn add_system<'b: 'a>(mut self, is: impl IntoSystem<'b> + 'b) -> Self {
|
||||
self.systems.push(Box::new(is).into_system());
|
||||
self
|
||||
}
|
||||
|
||||
/// Extract the systems from the environment
|
||||
pub fn into_systems(self) -> Vec<System<'a>> { self.systems }
|
||||
|
||||
/// Initialize an environment with a prepared list of systems
|
||||
pub fn from_systems(sys: impl IntoIterator<Item = System<'a>>) -> Self {
|
||||
Self { systems: sys.into_iter().collect() }
|
||||
}
|
||||
|
||||
/// Combine the `constants` fields of all systems
|
||||
pub fn constants(&self) -> ConstTree {
|
||||
(self.systems())
|
||||
.try_fold(ConstTree::tree::<&str>([]), |acc, sys| acc.combine(sys.constants.clone()))
|
||||
.expect("Conflicting const trees")
|
||||
}
|
||||
|
||||
/// Extract the command handlers from the systems, consuming the loader in the
|
||||
/// process. This has to consume the systems because handler tables aren't
|
||||
/// Copy. It also establishes the practice that environments live on the
|
||||
/// stack.
|
||||
pub fn handlers(&self) -> HandlerTable<'_> {
|
||||
(self.systems.iter()).fold(HandlerTable::new(), |t, sys| t.link(&sys.handlers))
|
||||
}
|
||||
|
||||
/// Compile the environment from the set of systems and return it directly.
|
||||
/// See [#load_dir]
|
||||
pub fn project_ctx<'b>(&self, reporter: &'b Reporter) -> ProjectContext<'_, 'b> {
|
||||
ProjectContext {
|
||||
lexer_plugins: Sequence::new(|| {
|
||||
self.systems().flat_map(|sys| &sys.lexer_plugins).map(|b| &**b)
|
||||
}),
|
||||
line_parsers: Sequence::new(|| {
|
||||
self.systems().flat_map(|sys| &sys.line_parsers).map(|b| &**b)
|
||||
}),
|
||||
preludes: Sequence::new(|| self.systems().flat_map(|sys| &sys.prelude)),
|
||||
reporter,
|
||||
}
|
||||
}
|
||||
|
||||
/// Combine source code from all systems with the specified directory into a
|
||||
/// common [VirtFS]
|
||||
pub fn make_dir_fs(&self, dir: PathBuf) -> DeclTree {
|
||||
let dir_node = DirNode::new(dir, ".orc").rc();
|
||||
DeclTree::tree([("tree", DeclTree::leaf(dir_node))])
|
||||
}
|
||||
|
||||
/// All system trees merged into one
|
||||
pub fn system_fs(&self) -> DeclTree {
|
||||
(self.systems().try_fold(DeclTree::empty(), |acc, sub| acc.combine(sub.code.clone())))
|
||||
.expect("Conflicting system trees")
|
||||
}
|
||||
|
||||
/// A wrapper around [load_project] that only takes the arguments that aren't
|
||||
/// fully specified by systems
|
||||
pub fn load_project_main(
|
||||
&self,
|
||||
entrypoints: impl IntoIterator<Item = Sym>,
|
||||
root: DeclTree,
|
||||
reporter: &Reporter,
|
||||
) -> ProjectTree {
|
||||
let tgt_loc = CodeOrigin::Gen(CodeGenInfo::no_details(sym!(facade::entrypoint)));
|
||||
let constants = self.constants().unwrap_mod();
|
||||
let targets = entrypoints.into_iter().map(|s| (s, tgt_loc.clone()));
|
||||
let root = self.system_fs().combine(root).expect("System trees conflict with root");
|
||||
load_project(&self.project_ctx(reporter), targets, &constants, &root)
|
||||
}
|
||||
|
||||
/// A wrapper around [load_project] that only takes the arguments that aren't
|
||||
/// fully specified by systems
|
||||
pub fn load_project(&self, root: DeclTree, reporter: &Reporter) -> ProjectTree {
|
||||
let mut orc_files: Vec<VPath> = Vec::new();
|
||||
find_all_orc_files([].borrow(), &mut orc_files, &root);
|
||||
let entrypoints = (orc_files.into_iter()).map(|p| p.name_with_suffix(i!(str: "tree")).to_sym());
|
||||
let tgt_loc = CodeOrigin::Gen(CodeGenInfo::no_details(sym!(facade::entrypoint)));
|
||||
let constants = self.constants().unwrap_mod();
|
||||
let targets = entrypoints.into_iter().map(|s| (s, tgt_loc.clone()));
|
||||
let root = self.system_fs().combine(root).expect("System trees conflict with root");
|
||||
load_project(&self.project_ctx(reporter), targets, &constants, &root)
|
||||
}
|
||||
|
||||
/// Load a directory from the local file system as an Orchid project.
|
||||
/// File loading proceeds along import statements and ignores all files
|
||||
/// not reachable from the specified file.
|
||||
pub fn load_main(
|
||||
&self,
|
||||
dir: PathBuf,
|
||||
targets: impl IntoIterator<Item = Sym>,
|
||||
reporter: &Reporter,
|
||||
) -> ProjectTree {
|
||||
self.load_project_main(targets, self.make_dir_fs(dir), reporter)
|
||||
}
|
||||
|
||||
/// Load every orchid file in a directory
|
||||
pub fn load_dir(&self, dir: PathBuf, reporter: &Reporter) -> ProjectTree {
|
||||
self.load_project(self.make_dir_fs(dir), reporter)
|
||||
}
|
||||
|
||||
/// Build a process by calling other utilities in [crate::facade]. A sort of
|
||||
/// facade over the facade. If you need a custom file system, consider
|
||||
/// combining this with [Loader::load_project]. For usage with
|
||||
/// [Loader::load_main] and [Loader::load_dir] we offer the shorthands
|
||||
/// [Loader::proc_main] and [Loader::proc_dir].
|
||||
pub fn proc(
|
||||
&'a self,
|
||||
tree: ProjectTree,
|
||||
check_refs: bool,
|
||||
macro_limit: Option<usize>,
|
||||
reporter: &Reporter,
|
||||
) -> Process<'a> {
|
||||
let mr = MacroRunner::new(&tree, macro_limit, reporter);
|
||||
let pm_tree = mr.run_macros(tree, reporter);
|
||||
let consts = merge_trees(pm_tree.all_consts(), self.systems(), reporter);
|
||||
if check_refs {
|
||||
validate_refs(consts.keys().cloned().collect(), reporter, &mut |sym, location| {
|
||||
(consts.get(&sym).map(|nc| nc.value.clone()))
|
||||
.ok_or_else(|| RunEnv::sym_not_found(sym, location))
|
||||
});
|
||||
}
|
||||
Process::new(consts, self.handlers())
|
||||
}
|
||||
|
||||
/// Load a project and process everything
|
||||
pub fn proc_dir(
|
||||
&'a self,
|
||||
dir: PathBuf,
|
||||
check_refs: bool,
|
||||
macro_limit: Option<usize>,
|
||||
reporter: &Reporter,
|
||||
) -> Process<'a> {
|
||||
self.proc(self.load_dir(dir.to_owned(), reporter), check_refs, macro_limit, reporter)
|
||||
}
|
||||
|
||||
/// Load a project and process everything to load specific symbols
|
||||
pub fn proc_main(
|
||||
&'a self,
|
||||
dir: PathBuf,
|
||||
targets: impl IntoIterator<Item = Sym>,
|
||||
check_refs: bool,
|
||||
macro_limit: Option<usize>,
|
||||
reporter: &Reporter,
|
||||
) -> Process<'a> {
|
||||
self.proc(self.load_main(dir.to_owned(), targets, reporter), check_refs, macro_limit, reporter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Default for Loader<'a> {
|
||||
fn default() -> Self { Self::new() }
|
||||
}
|
||||
|
||||
fn find_all_orc_files(path: &PathSlice, paths: &mut Vec<VPath>, vfs: &impl VirtFS) {
|
||||
match vfs.read(path) {
|
||||
Err(_) => (),
|
||||
Ok(Loaded::Code(_)) => paths.push(path.to_vpath()),
|
||||
Ok(Loaded::Collection(items)) => items
|
||||
.iter()
|
||||
.for_each(|suffix| find_all_orc_files(&path.to_vpath().suffix([suffix.clone()]), paths, vfs)),
|
||||
}
|
||||
}
|
||||
@@ -1,102 +0,0 @@
|
||||
//! Encapsulates the macro runner's scaffolding. Relies on a [ProjectTree]
|
||||
//! loaded by the [super::loader::Loader]
|
||||
|
||||
use std::iter;
|
||||
|
||||
use crate::error::{ErrorPosition, ProjectError, ProjectErrorObj, ProjectResult, Reporter};
|
||||
use crate::location::CodeOrigin;
|
||||
use crate::parse::parsed;
|
||||
use crate::pipeline::project::{ItemKind, ProjItem, ProjectTree};
|
||||
use crate::rule::repository::Repo;
|
||||
use crate::tree::TreeTransforms;
|
||||
|
||||
/// Encapsulates the macro repository and the constant list, and allows querying
|
||||
/// for macro execution results
|
||||
pub struct MacroRunner {
|
||||
/// Optimized catalog of substitution rules
|
||||
pub repo: Repo,
|
||||
/// Runtime code containing macro invocations
|
||||
pub timeout: Option<usize>,
|
||||
}
|
||||
impl MacroRunner {
|
||||
/// Initialize a macro runner
|
||||
pub fn new(tree: &ProjectTree, timeout: Option<usize>, reporter: &Reporter) -> Self {
|
||||
let rules = tree.all_rules();
|
||||
let repo = Repo::new(rules, reporter);
|
||||
Self { repo, timeout }
|
||||
}
|
||||
|
||||
/// Process the macros in an expression.
|
||||
pub fn process_expr(&self, expr: parsed::Expr) -> ProjectResult<parsed::Expr> {
|
||||
match self.timeout {
|
||||
None => Ok((self.repo.pass(&expr)).unwrap_or_else(|| expr.clone())),
|
||||
Some(limit) => {
|
||||
let (o, leftover_gas) = self.repo.long_step(&expr, limit + 1);
|
||||
if 0 < leftover_gas {
|
||||
return Ok(o);
|
||||
}
|
||||
Err(MacroTimeout { location: expr.range.origin(), limit }.pack())
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Run all macros in the project.
|
||||
pub fn run_macros(&self, tree: ProjectTree, reporter: &Reporter) -> ProjectTree {
|
||||
ProjectTree(tree.0.map_data(
|
||||
|_, item| match &item.kind {
|
||||
ItemKind::Const(c) => match self.process_expr(c.clone()) {
|
||||
Ok(expr) => ProjItem { kind: ItemKind::Const(expr) },
|
||||
Err(e) => {
|
||||
reporter.report(e);
|
||||
item
|
||||
},
|
||||
},
|
||||
_ => item,
|
||||
},
|
||||
|_, x| x,
|
||||
|_, x| x,
|
||||
))
|
||||
}
|
||||
|
||||
/// Obtain an iterator that steps through the preprocessing of a constant
|
||||
/// for debugging macros
|
||||
pub fn step(&self, mut expr: parsed::Expr) -> impl Iterator<Item = parsed::Expr> + '_ {
|
||||
iter::from_fn(move || {
|
||||
expr = self.repo.step(&expr)?;
|
||||
Some(expr.clone())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Error raised when a macro runs too long
|
||||
#[derive(Debug)]
|
||||
pub struct MacroTimeout {
|
||||
location: CodeOrigin,
|
||||
limit: usize,
|
||||
}
|
||||
impl ProjectError for MacroTimeout {
|
||||
const DESCRIPTION: &'static str = "Macro execution has not halted";
|
||||
|
||||
fn message(&self) -> String {
|
||||
let Self { limit, .. } = self;
|
||||
format!("Macro processing took more than {limit} steps")
|
||||
}
|
||||
|
||||
fn one_position(&self) -> CodeOrigin { self.location.clone() }
|
||||
}
|
||||
|
||||
struct MacroErrors(Vec<ProjectErrorObj>);
|
||||
impl ProjectError for MacroErrors {
|
||||
const DESCRIPTION: &'static str = "Errors occurred during macro execution";
|
||||
fn positions(&self) -> impl IntoIterator<Item = ErrorPosition> + '_ {
|
||||
self.0.iter().enumerate().flat_map(|(i, e)| {
|
||||
e.positions().map(move |ep| ErrorPosition {
|
||||
origin: ep.origin,
|
||||
message: Some(match ep.message {
|
||||
Some(msg) => format!("Error #{}: {}; {msg}", i + 1, e.message()),
|
||||
None => format!("Error #{}: {}", i + 1, e.message()),
|
||||
}),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,75 +0,0 @@
|
||||
//! Combine constants from [super::macro_runner::MacroRunner::run_macros] with
|
||||
//! systems from [super::loader::Loader::systems]
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
|
||||
use super::system::System;
|
||||
use crate::error::Reporter;
|
||||
use crate::foreign::inert::Inert;
|
||||
use crate::foreign::to_clause::ToClause;
|
||||
use crate::intermediate::ast_to_ir::ast_to_ir;
|
||||
use crate::intermediate::ir_to_nort::ir_to_nort;
|
||||
use crate::interpreter::nort;
|
||||
use crate::location::{CodeGenInfo, CodeLocation};
|
||||
use crate::name::{NameLike, Sym};
|
||||
use crate::pipeline::project::ConstReport;
|
||||
use crate::sym;
|
||||
use crate::tree::{ModMemberRef, TreeTransforms};
|
||||
use crate::utils::unwrap_or::unwrap_or;
|
||||
|
||||
/// Equivalent of [crate::pipeline::project::ConstReport] for the interpreter's
|
||||
/// representation, [crate::interpreter::nort].
|
||||
#[derive(Clone)]
|
||||
pub struct NortConst {
|
||||
/// Comments associated with the constant which may affect its interpretation
|
||||
pub comments: Vec<Arc<String>>,
|
||||
/// Location of the definition, if known
|
||||
pub location: CodeLocation,
|
||||
/// Value assigned to the constant
|
||||
pub value: nort::Expr,
|
||||
}
|
||||
impl NortConst {
|
||||
/// Convert into NORT constant from AST constant
|
||||
pub fn convert_from(value: ConstReport, reporter: &Reporter) -> NortConst {
|
||||
let module = Sym::new(value.name.split_last().1[..].iter())
|
||||
.expect("Constant names from source are at least 2 long");
|
||||
let location = CodeLocation::new_src(value.range.clone(), value.name);
|
||||
let nort = match ast_to_ir(value.value, value.range, module.clone()) {
|
||||
Ok(ir) => ir_to_nort(&ir),
|
||||
Err(e) => {
|
||||
reporter.report(e);
|
||||
Inert(0).to_expr(location.clone())
|
||||
},
|
||||
};
|
||||
Self { value: nort, location, comments: value.comments }
|
||||
}
|
||||
}
|
||||
|
||||
/// Combine a list of symbols loaded from source and the constant trees from
|
||||
/// each system.
|
||||
pub fn merge_trees<'a: 'b, 'b>(
|
||||
source: impl IntoIterator<Item = (Sym, ConstReport)>,
|
||||
systems: impl IntoIterator<Item = &'b System<'a>> + 'b,
|
||||
reporter: &Reporter,
|
||||
) -> HashMap<Sym, NortConst> {
|
||||
let mut out = HashMap::new();
|
||||
for (name, rep) in source.into_iter() {
|
||||
out.insert(name.clone(), NortConst::convert_from(rep, reporter));
|
||||
}
|
||||
for system in systems {
|
||||
let const_module = system.constants.unwrap_mod_ref();
|
||||
const_module.search_all((), |stack, node, ()| {
|
||||
let c = unwrap_or!(node => ModMemberRef::Item; return);
|
||||
let location = CodeLocation::new_gen(CodeGenInfo::details(
|
||||
sym!(facade::merge_tree),
|
||||
format!("system.name={}", system.name),
|
||||
));
|
||||
let value = c.clone().gen_nort(stack.clone(), location.clone());
|
||||
let crep = NortConst { value, comments: vec![], location };
|
||||
out.insert(Sym::new(stack.unreverse()).expect("root item is forbidden"), crep);
|
||||
});
|
||||
}
|
||||
out
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
//! A simplified set of commands each grouping a large subset of the operations
|
||||
//! exposed by Orchid to make writing embeddings faster in the typical case.
|
||||
|
||||
pub mod loader;
|
||||
pub mod macro_runner;
|
||||
pub mod merge_trees;
|
||||
pub mod process;
|
||||
pub mod system;
|
||||
pub mod unbound_ref;
|
||||
@@ -1,39 +0,0 @@
|
||||
//! Run Orchid commands in the context of the loaded environment. Either
|
||||
//! returned by [super::loader::Loader::proc], or constructed manually from the
|
||||
//! return value of [super::merge_trees::merge_trees] and
|
||||
//! [super::loader::Loader::handlers].
|
||||
|
||||
use hashbrown::HashMap;
|
||||
|
||||
use super::merge_trees::NortConst;
|
||||
use crate::interpreter::context::{Halt, RunEnv, RunParams};
|
||||
use crate::interpreter::error::RunError;
|
||||
use crate::interpreter::handler::HandlerTable;
|
||||
use crate::interpreter::nort::Expr;
|
||||
use crate::interpreter::run::run;
|
||||
use crate::name::Sym;
|
||||
|
||||
/// This struct ties the state of systems to loaded code, and allows to call
|
||||
/// Orchid-defined functions
|
||||
pub struct Process<'a>(RunEnv<'a>);
|
||||
impl<'a> Process<'a> {
|
||||
/// Build a process from the return value of [crate::facade::merge_trees] and
|
||||
pub fn new(
|
||||
consts: impl IntoIterator<Item = (Sym, NortConst)>,
|
||||
handlers: HandlerTable<'a>,
|
||||
) -> Self {
|
||||
let symbols: HashMap<_, _> = consts.into_iter().map(|(k, v)| (k, v.value)).collect();
|
||||
Self(RunEnv::new(handlers, move |sym, location| {
|
||||
symbols.get(&sym).cloned().ok_or_else(|| RunEnv::sym_not_found(sym, location))
|
||||
}))
|
||||
}
|
||||
|
||||
/// Execute the given command in this process. If gas is specified, at most as
|
||||
/// many steps will be executed and then the partial result returned.
|
||||
///
|
||||
/// This is useful to catch infinite loops or ensure that a tenant program
|
||||
/// yields
|
||||
pub fn run(&self, prompt: Expr, gas: Option<usize>) -> Result<Halt, RunError<'_>> {
|
||||
run(prompt, &self.0, &mut RunParams { stack: 1000, gas })
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user