Added directfs

Added a very rudimentary file I/O system suitable for experimenting
with the language further. A better one will be designed when we have
sensible error management.
This commit is contained in:
2023-09-17 16:37:39 +01:00
parent 1078835e8b
commit 7396078304
84 changed files with 563 additions and 721 deletions

View File

@@ -0,0 +1,31 @@
import system::(io, directfs, async)
import std::proc::*
import std::(to_string, to_uint)
const folder_view := \path.\next. do{
cps println $ "Contents of " ++ path;
cps entries = async::block_on $ directfs::readdir path;
cps list::enumerate entries
|> list::map (pass \id. pass \name.\is_dir. (
println $ to_string id ++ ": " ++ name ++ if is_dir then "/" else ""
))
|> list::chain;
cps print "select an entry, or .. to move up: ";
cps choice = readln;
let output = if choice == "..\n"
then directfs::pop_path path
|> option::unwrap
|> tuple::pick 0 2
else (
to_uint choice
|> (list::get entries)
|> option::unwrap
|> (directfs::join_paths path)
);
next output
}
const main := loop_over (path = "/home/lbfalvy/Code/orchid/examples") {
cps path = folder_view path;
}

View File

@@ -10,7 +10,7 @@ use itertools::Itertools;
use orchidlang::facade::{Environment, PreMacro}; use orchidlang::facade::{Environment, PreMacro};
use orchidlang::systems::asynch::AsynchSystem; use orchidlang::systems::asynch::AsynchSystem;
use orchidlang::systems::stl::StlConfig; use orchidlang::systems::stl::StlConfig;
use orchidlang::systems::{io, scheduler}; use orchidlang::systems::{directfs, io, scheduler};
use orchidlang::{ast, interpreted, interpreter, Interner, Sym, VName}; use orchidlang::{ast, interpreted, interpreter, Interner, Sym, VName};
use crate::cli::cmd_prompt; use crate::cli::cmd_prompt;
@@ -64,6 +64,7 @@ impl Args {
pub fn chk_proj(&self) -> Result<(), String> { self.chk_dir_main() } pub fn chk_proj(&self) -> Result<(), String> { self.chk_dir_main() }
} }
#[must_use]
pub fn to_vname(data: &str, i: &Interner) -> VName { pub fn to_vname(data: &str, i: &Interner) -> VName {
data.split("::").map(|s| i.i(s)).collect::<Vec<_>>() data.split("::").map(|s| i.i(s)).collect::<Vec<_>>()
} }
@@ -135,16 +136,17 @@ pub fn main() {
let main = to_vname(&args.main, &i); let main = to_vname(&args.main, &i);
let mut asynch = AsynchSystem::new(); let mut asynch = AsynchSystem::new();
let scheduler = scheduler::SeqScheduler::new(&mut asynch); let scheduler = scheduler::SeqScheduler::new(&mut asynch);
let io = io::Service::new(scheduler.clone(), [ let std_streams = [
("stdin", io::Stream::Source(BufReader::new(Box::new(std::io::stdin())))), ("stdin", io::Stream::Source(BufReader::new(Box::new(std::io::stdin())))),
("stdout", io::Stream::Sink(Box::new(std::io::stdout()))), ("stdout", io::Stream::Sink(Box::new(std::io::stdout()))),
("stderr", io::Stream::Sink(Box::new(std::io::stderr()))), // ("stderr", io::Stream::Sink(Box::new(std::io::stderr()))),
]); ];
let env = Environment::new(&i) let env = Environment::new(&i)
.add_system(StlConfig { impure: true }) .add_system(StlConfig { impure: true })
.add_system(asynch) .add_system(asynch)
.add_system(scheduler) .add_system(scheduler.clone())
.add_system(io); .add_system(io::Service::new(scheduler.clone(), std_streams))
.add_system(directfs::DirectFS::new(scheduler));
let premacro = env.load_dir(&dir, &main).unwrap(); let premacro = env.load_dir(&dir, &main).unwrap();
if args.dump_repo { if args.dump_repo {
println!("Parsed rules: {}", premacro.repo); println!("Parsed rules: {}", premacro.repo);

View File

@@ -2,7 +2,6 @@
mod import_all; mod import_all;
mod no_targets; mod no_targets;
mod not_exported; mod not_exported;
// mod not_found;
mod conflicting_roles; mod conflicting_roles;
mod parse_error_with_tokens; mod parse_error_with_tokens;
mod project_error; mod project_error;

View File

@@ -28,6 +28,7 @@ impl NotFound {
/// ///
/// Basically, if `e` was not produced by the `walk*` methods called on /// Basically, if `e` was not produced by the `walk*` methods called on
/// `path`. /// `path`.
#[must_use]
pub fn from_walk_error( pub fn from_walk_error(
source: &[Tok<String>], source: &[Tok<String>],
prefix: &[Tok<String>], prefix: &[Tok<String>],

View File

@@ -18,19 +18,24 @@ pub struct ErrorPosition {
/// code changes /// code changes
pub trait ProjectError { pub trait ProjectError {
/// A general description of this type of error /// A general description of this type of error
#[must_use]
fn description(&self) -> &str; fn description(&self) -> &str;
/// A formatted message that includes specific parameters /// A formatted message that includes specific parameters
#[must_use]
fn message(&self) -> String { self.description().to_string() } fn message(&self) -> String { self.description().to_string() }
/// Code positions relevant to this error. If you don't implement this, you /// Code positions relevant to this error. If you don't implement this, you
/// must implement [ProjectError::one_position] /// must implement [ProjectError::one_position]
#[must_use]
fn positions(&self) -> BoxedIter<ErrorPosition> { fn positions(&self) -> BoxedIter<ErrorPosition> {
box_once(ErrorPosition { location: self.one_position(), message: None }) box_once(ErrorPosition { location: self.one_position(), message: None })
} }
/// Short way to provide a single location. If you don't implement this, you /// Short way to provide a single location. If you don't implement this, you
/// must implement [ProjectError::positions] /// must implement [ProjectError::positions]
#[must_use]
fn one_position(&self) -> Location { unimplemented!() } fn one_position(&self) -> Location { unimplemented!() }
/// Convert the error into an `Rc<dyn ProjectError>` to be able to /// Convert the error into an `Rc<dyn ProjectError>` to be able to
/// handle various errors together /// handle various errors together
#[must_use]
fn rc(self) -> Rc<dyn ProjectError> fn rc(self) -> Rc<dyn ProjectError>
where where
Self: Sized + 'static, Self: Sized + 'static,

View File

@@ -23,9 +23,11 @@ pub struct Environment<'a> {
} }
impl<'a> Environment<'a> { impl<'a> Environment<'a> {
/// Initialize a new environment /// Initialize a new environment
#[must_use]
pub fn new(i: &'a Interner) -> Self { Self { i, systems: Vec::new() } } pub fn new(i: &'a Interner) -> Self { Self { i, systems: Vec::new() } }
/// Register a new system in the environment /// Register a new system in the environment
#[must_use]
pub fn add_system<'b: 'a>(mut self, is: impl IntoSystem<'b> + 'b) -> Self { pub fn add_system<'b: 'a>(mut self, is: impl IntoSystem<'b> + 'b) -> Self {
self.systems.push(Box::new(is).into_system(self.i)); self.systems.push(Box::new(is).into_system(self.i));
self self

View File

@@ -33,6 +33,7 @@ impl<'a> Process<'a> {
/// Find all unbound constant names in a symbol. This is often useful to /// Find all unbound constant names in a symbol. This is often useful to
/// identify dynamic loading targets. /// identify dynamic loading targets.
#[must_use]
pub fn unbound_refs(&self, key: Sym) -> Vec<(Sym, Location)> { pub fn unbound_refs(&self, key: Sym) -> Vec<(Sym, Location)> {
let mut errors = Vec::new(); let mut errors = Vec::new();
let sym = self.symbols.get(&key).expect("symbol must exist"); let sym = self.symbols.get(&key).expect("symbol must exist");
@@ -48,9 +49,9 @@ impl<'a> Process<'a> {
errors errors
} }
/// Assert that, unless [interpreted::Clause::Constant]s are created /// Assert that the code contains no invalid constants. This ensures that,
/// procedurally, a [interpreter::RuntimeError::MissingSymbol] cannot be /// unless [interpreted::Clause::Constant]s are created procedurally,
/// produced /// a [interpreter::RuntimeError::MissingSymbol] cannot be produced
pub fn validate_refs(&self) -> ProjectResult<()> { pub fn validate_refs(&self) -> ProjectResult<()> {
for key in self.symbols.keys() { for key in self.symbols.keys() {
if let Some((symbol, location)) = self.unbound_refs(key.clone()).pop() { if let Some((symbol, location)) = self.unbound_refs(key.clone()).pop() {

View File

@@ -27,6 +27,7 @@ pub struct System<'a> {
impl<'a> System<'a> { impl<'a> System<'a> {
/// Intern the name of the system so that it can be used as an Orchid /// Intern the name of the system so that it can be used as an Orchid
/// namespace /// namespace
#[must_use]
pub fn vname(&self, i: &Interner) -> VName { pub fn vname(&self, i: &Interner) -> VName {
self.name.iter().map(|s| i.i(s)).collect::<Vec<_>>() self.name.iter().map(|s| i.i(s)).collect::<Vec<_>>()
} }

View File

@@ -37,8 +37,10 @@ where
/// ```ignore /// ```ignore
/// fn as_any(self: Box<Self>) -> Box<dyn Any> { self } /// fn as_any(self: Box<Self>) -> Box<dyn Any> { self }
/// ``` /// ```
#[must_use]
fn as_any(self: Box<Self>) -> Box<dyn Any>; fn as_any(self: Box<Self>) -> Box<dyn Any>;
/// See [Atomic::as_any], exactly the same but for references /// See [Atomic::as_any], exactly the same but for references
#[must_use]
fn as_any_ref(&self) -> &dyn Any; fn as_any_ref(&self) -> &dyn Any;
/// Attempt to normalize this value. If it wraps a value, this should report /// Attempt to normalize this value. If it wraps a value, this should report
@@ -47,6 +49,7 @@ where
fn run(self: Box<Self>, ctx: Context) -> AtomicResult; fn run(self: Box<Self>, ctx: Context) -> AtomicResult;
/// Wrap the atom in a clause to be placed in an [AtomicResult]. /// Wrap the atom in a clause to be placed in an [AtomicResult].
#[must_use]
fn atom_cls(self) -> Clause fn atom_cls(self) -> Clause
where where
Self: Sized, Self: Sized,
@@ -55,6 +58,7 @@ where
} }
/// Wrap the atom in a new expression instance to be placed in a tree /// Wrap the atom in a new expression instance to be placed in a tree
#[must_use]
fn atom_exi(self) -> ExprInst fn atom_exi(self) -> ExprInst
where where
Self: Sized, Self: Sized,
@@ -73,10 +77,12 @@ where
pub struct Atom(pub Box<dyn Atomic>); pub struct Atom(pub Box<dyn Atomic>);
impl Atom { impl Atom {
/// Wrap an [Atomic] in a type-erased box /// Wrap an [Atomic] in a type-erased box
#[must_use]
pub fn new<T: 'static + Atomic>(data: T) -> Self { pub fn new<T: 'static + Atomic>(data: T) -> Self {
Self(Box::new(data) as Box<dyn Atomic>) Self(Box::new(data) as Box<dyn Atomic>)
} }
/// Get the contained data /// Get the contained data
#[must_use]
pub fn data(&self) -> &dyn Atomic { self.0.as_ref() as &dyn Atomic } pub fn data(&self) -> &dyn Atomic { self.0.as_ref() as &dyn Atomic }
/// Attempt to downcast contained data to a specific type /// Attempt to downcast contained data to a specific type
pub fn try_cast<T: Atomic>(self) -> Result<T, Self> { pub fn try_cast<T: Atomic>(self) -> Result<T, Self> {
@@ -86,8 +92,10 @@ impl Atom {
} }
} }
/// Test the type of the contained data without downcasting /// Test the type of the contained data without downcasting
#[must_use]
pub fn is<T: 'static>(&self) -> bool { self.data().as_any_ref().is::<T>() } pub fn is<T: 'static>(&self) -> bool { self.data().as_any_ref().is::<T>() }
/// Downcast contained data, panic if it isn't the specified type /// Downcast contained data, panic if it isn't the specified type
#[must_use]
pub fn cast<T: 'static>(self) -> T { pub fn cast<T: 'static>(self) -> T {
*self.0.as_any().downcast().expect("Type mismatch on Atom::cast") *self.0.as_any().downcast().expect("Type mismatch on Atom::cast")
} }

View File

@@ -25,6 +25,7 @@ struct CPSFn<T: CPSPayload> {
pub payload: T, pub payload: T,
} }
impl<T: CPSPayload> CPSFn<T> { impl<T: CPSPayload> CPSFn<T> {
#[must_use]
fn new(argc: usize, payload: T) -> Self { fn new(argc: usize, payload: T) -> Self {
debug_assert!( debug_assert!(
argc > 0, argc > 0,
@@ -55,37 +56,25 @@ pub struct CPSBox<T: CPSPayload> {
pub continuations: Vec<ExprInst>, pub continuations: Vec<ExprInst>,
} }
impl<T: CPSPayload> CPSBox<T> { impl<T: CPSPayload> CPSBox<T> {
/// Assert that the command was instantiated with the correct number of
/// possible continuations. This is decided by the native bindings, not user
/// code, therefore this error may be uncovered by usercode but can never be
/// produced at will.
pub fn assert_count(&self, expect: usize) {
let real = self.continuations.len();
debug_assert!(
real == expect,
"Tried to read {expect} argument(s) but {real} were provided for {:?}",
self.payload
)
}
/// Unpack the wrapped command and the continuation /// Unpack the wrapped command and the continuation
#[must_use]
pub fn unpack1(self) -> (T, ExprInst) { pub fn unpack1(self) -> (T, ExprInst) {
self.assert_count(1);
let [cont]: [ExprInst; 1] = let [cont]: [ExprInst; 1] =
self.continuations.try_into().expect("size checked"); self.continuations.try_into().expect("size checked");
(self.payload, cont) (self.payload, cont)
} }
/// Unpack the wrapped command and 2 continuations (usually an async and a /// Unpack the wrapped command and 2 continuations (usually an async and a
/// sync) /// sync)
#[must_use]
pub fn unpack2(self) -> (T, ExprInst, ExprInst) { pub fn unpack2(self) -> (T, ExprInst, ExprInst) {
self.assert_count(2);
let [c1, c2]: [ExprInst; 2] = let [c1, c2]: [ExprInst; 2] =
self.continuations.try_into().expect("size checked"); self.continuations.try_into().expect("size checked");
(self.payload, c1, c2) (self.payload, c1, c2)
} }
/// Unpack the wrapped command and 3 continuations (usually an async success, /// Unpack the wrapped command and 3 continuations (usually an async success,
/// an async fail and a sync) /// an async fail and a sync)
#[must_use]
pub fn unpack3(self) -> (T, ExprInst, ExprInst, ExprInst) { pub fn unpack3(self) -> (T, ExprInst, ExprInst, ExprInst) {
self.assert_count(3);
let [c1, c2, c3]: [ExprInst; 3] = let [c1, c2, c3]: [ExprInst; 3] =
self.continuations.try_into().expect("size checked"); self.continuations.try_into().expect("size checked");
(self.payload, c1, c2, c3) (self.payload, c1, c2, c3)
@@ -97,6 +86,7 @@ impl<T: CPSPayload> InertAtomic for CPSBox<T> {
} }
/// Like [init_cps] but wrapped in a [ConstTree] for init-time usage /// Like [init_cps] but wrapped in a [ConstTree] for init-time usage
#[must_use]
pub fn const_cps<T: CPSPayload>(argc: usize, payload: T) -> ConstTree { pub fn const_cps<T: CPSPayload>(argc: usize, payload: T) -> ConstTree {
ConstTree::xfn(CPSFn::new(argc, payload)) ConstTree::xfn(CPSFn::new(argc, payload))
} }
@@ -106,6 +96,7 @@ pub fn const_cps<T: CPSPayload>(argc: usize, payload: T) -> ConstTree {
/// ///
/// This function is meant to be used in an external function defined with /// This function is meant to be used in an external function defined with
/// [crate::define_fn]. For usage in a [ConstTree], see [mk_const] /// [crate::define_fn]. For usage in a [ConstTree], see [mk_const]
#[must_use]
pub fn init_cps<T: CPSPayload>(argc: usize, payload: T) -> Clause { pub fn init_cps<T: CPSPayload>(argc: usize, payload: T) -> Clause {
CPSFn::new(argc, payload).xfn_cls() CPSFn::new(argc, payload).xfn_cls()
} }

View File

@@ -16,6 +16,7 @@ pub type XfnResult = Result<Clause, Rc<dyn ExternError>>;
/// Errors produced by external code /// Errors produced by external code
pub trait ExternError: Display { pub trait ExternError: Display {
/// Convert into trait object /// Convert into trait object
#[must_use]
fn into_extern(self) -> Rc<dyn ExternError> fn into_extern(self) -> Rc<dyn ExternError>
where where
Self: 'static + Sized, Self: 'static + Sized,
@@ -37,6 +38,7 @@ impl Error for dyn ExternError {}
/// these are also external functions. /// these are also external functions.
pub trait ExternFn: DynClone { pub trait ExternFn: DynClone {
/// Display name of the function /// Display name of the function
#[must_use]
fn name(&self) -> &str; fn name(&self) -> &str;
/// Combine the function with an argument to produce a new clause /// Combine the function with an argument to produce a new clause
fn apply(self: Box<Self>, arg: ExprInst, ctx: Context) -> XfnResult; fn apply(self: Box<Self>, arg: ExprInst, ctx: Context) -> XfnResult;
@@ -45,6 +47,7 @@ pub trait ExternFn: DynClone {
self.name().hash(&mut state) self.name().hash(&mut state)
} }
/// Wrap this function in a clause to be placed in an [AtomicResult]. /// Wrap this function in a clause to be placed in an [AtomicResult].
#[must_use]
fn xfn_cls(self) -> Clause fn xfn_cls(self) -> Clause
where where
Self: Sized + 'static, Self: Sized + 'static,

View File

@@ -19,6 +19,7 @@ use crate::Primitive;
/// provided in argument lists. /// provided in argument lists.
pub trait InertAtomic: Debug + Clone + 'static { pub trait InertAtomic: Debug + Clone + 'static {
/// Typename to be shown in the error when a conversion from [ExprInst] fails /// Typename to be shown in the error when a conversion from [ExprInst] fails
#[must_use]
fn type_str() -> &'static str; fn type_str() -> &'static str;
/// Proxies to [Responder] so that you don't have to implmeent it manually if /// Proxies to [Responder] so that you don't have to implmeent it manually if
/// you need it, but behaves exactly as the default implementation. /// you need it, but behaves exactly as the default implementation.

View File

@@ -5,9 +5,7 @@
mod monotype; mod monotype;
mod multitype; mod multitype;
mod token; mod token;
// mod traits;
pub use monotype::TypedInterner; pub use monotype::TypedInterner;
pub use multitype::Interner; pub use multitype::Interner;
pub use token::Tok; pub use token::Tok;
// pub use traits::{DisplayBundle, InternedDisplay, InternedInto};

View File

@@ -15,11 +15,13 @@ pub struct TypedInterner<T: 'static + Eq + Hash + Clone> {
} }
impl<T: Eq + Hash + Clone> TypedInterner<T> { impl<T: Eq + Hash + Clone> TypedInterner<T> {
/// Create a fresh interner instance /// Create a fresh interner instance
#[must_use]
pub fn new() -> Rc<Self> { pub fn new() -> Rc<Self> {
Rc::new(Self { tokens: RefCell::new(HashMap::new()) }) Rc::new(Self { tokens: RefCell::new(HashMap::new()) })
} }
/// Intern an object, returning a token /// Intern an object, returning a token
#[must_use]
pub fn i<Q: ?Sized + Eq + Hash + ToOwned<Owned = T>>( pub fn i<Q: ?Sized + Eq + Hash + ToOwned<Owned = T>>(
self: &Rc<Self>, self: &Rc<Self>,
q: &Q, q: &Q,
@@ -42,6 +44,7 @@ impl<T: Eq + Hash + Clone> TypedInterner<T> {
} }
/// Helper function to compute hashes outside a hashmap /// Helper function to compute hashes outside a hashmap
#[must_use]
fn compute_hash( fn compute_hash(
hash_builder: &impl BuildHasher, hash_builder: &impl BuildHasher,
key: &(impl Hash + ?Sized), key: &(impl Hash + ?Sized),

View File

@@ -8,7 +8,6 @@ use hashbrown::HashMap;
use super::monotype::TypedInterner; use super::monotype::TypedInterner;
use super::token::Tok; use super::token::Tok;
// use super::InternedDisplay;
/// A collection of interners based on their type. Allows to intern any object /// A collection of interners based on their type. Allows to intern any object
/// that implements [ToOwned]. Objects of the same type are stored together in a /// that implements [ToOwned]. Objects of the same type are stored together in a
@@ -18,9 +17,11 @@ pub struct Interner {
} }
impl Interner { impl Interner {
/// Create a new interner /// Create a new interner
#[must_use]
pub fn new() -> Self { Self { interners: RefCell::new(HashMap::new()) } } pub fn new() -> Self { Self { interners: RefCell::new(HashMap::new()) } }
/// Intern something /// Intern something
#[must_use]
pub fn i<Q: ?Sized + Eq + Hash + ToOwned>(&self, q: &Q) -> Tok<Q::Owned> pub fn i<Q: ?Sized + Eq + Hash + ToOwned>(&self, q: &Q) -> Tok<Q::Owned>
where where
Q::Owned: 'static + Eq + Hash + Clone + Borrow<Q>, Q::Owned: 'static + Eq + Hash + Clone + Borrow<Q>,
@@ -31,32 +32,10 @@ impl Interner {
} }
/// Fully resolve a list of interned things. /// Fully resolve a list of interned things.
#[must_use]
pub fn extern_all<T: 'static + Eq + Hash + Clone>(s: &[Tok<T>]) -> Vec<T> { pub fn extern_all<T: 'static + Eq + Hash + Clone>(s: &[Tok<T>]) -> Vec<T> {
s.iter().map(|t| (**t).clone()).collect() s.iter().map(|t| (**t).clone()).collect()
} }
// /// A variant of `unwrap` using [InternedDisplay] to circumvent `unwrap`'s
// /// dependencyon [Debug]. For clarity, [expect] should be preferred.
// pub fn unwrap<T, E: InternedDisplay>(&self, result: Result<T, E>) -> T {
// result.unwrap_or_else(|e| {
// println!("Unwrapped Error: {}", e.bundle(self));
// panic!("Unwrapped an error");
// })
// }
// /// A variant of `expect` using [InternedDisplay] to circumvent `expect`'s
// /// depeendency on [Debug].
// pub fn expect<T, E: InternedDisplay>(
// &self,
// result: Result<T, E>,
// msg: &str,
// ) -> T {
// result.unwrap_or_else(|e| {
// println!("Expectation failed: {msg}");
// println!("Error: {}", e.bundle(self));
// panic!("Expected an error");
// })
// }
} }
impl Default for Interner { impl Default for Interner {
@@ -64,6 +43,7 @@ impl Default for Interner {
} }
/// Get or create an interner for a given type. /// Get or create an interner for a given type.
#[must_use]
fn get_interner<T: 'static + Eq + Hash + Clone>( fn get_interner<T: 'static + Eq + Hash + Clone>(
interners: &mut RefMut<HashMap<TypeId, Rc<dyn Any>>>, interners: &mut RefMut<HashMap<TypeId, Rc<dyn Any>>>,
) -> Rc<TypedInterner<T>> { ) -> Rc<TypedInterner<T>> {

View File

@@ -18,15 +18,18 @@ pub struct Tok<T: Eq + Hash + Clone + 'static> {
} }
impl<T: Eq + Hash + Clone + 'static> Tok<T> { impl<T: Eq + Hash + Clone + 'static> Tok<T> {
/// Create a new token. Used exclusively by the interner /// Create a new token. Used exclusively by the interner
#[must_use]
pub(crate) fn new(data: Rc<T>, interner: Weak<TypedInterner<T>>) -> Self { pub(crate) fn new(data: Rc<T>, interner: Weak<TypedInterner<T>>) -> Self {
Self { data, interner } Self { data, interner }
} }
/// Take the ID number out of a token /// Take the ID number out of a token
#[must_use]
pub fn id(&self) -> NonZeroUsize { pub fn id(&self) -> NonZeroUsize {
((self.data.as_ref() as *const T as usize).try_into()) ((self.data.as_ref() as *const T as usize).try_into())
.expect("Pointer can always be cast to nonzero") .expect("Pointer can always be cast to nonzero")
} }
/// Cast into usize /// Cast into usize
#[must_use]
pub fn usize(&self) -> usize { self.id().into() } pub fn usize(&self) -> usize { self.id().into() }
/// ///
pub fn assert_comparable(&self, other: &Self) { pub fn assert_comparable(&self, other: &Self) {

View File

@@ -1,69 +0,0 @@
use core::fmt::{self, Display, Formatter};
use core::ops::Deref;
use std::rc::Rc;
use crate::interner::Interner;
/// A variant of [std::fmt::Display] for objects that contain interned
/// strings and therefore can only be stringified in the presence of a
/// string interner
///
/// The functions defined here are suffixed to distinguish them from
/// the ones in Display and ToString respectively, because Rust can't
/// identify functions based on arity
pub trait InternedDisplay {
/// formats the value using the given formatter and string interner
fn fmt_i(
&self,
f: &mut std::fmt::Formatter<'_>,
i: &Interner,
) -> std::fmt::Result;
/// Converts the value to a string to be displayed
fn to_string_i(&self, i: &Interner) -> String {
self.bundle(i).to_string()
}
/// Combine with an interner to implement [Display]
fn bundle<'a>(&'a self, interner: &'a Interner) -> DisplayBundle<'a, Self> {
DisplayBundle { interner, data: self }
}
}
// Special loophole for Rc<dyn ProjectError>
impl<T: ?Sized> InternedDisplay for Rc<T>
where
T: InternedDisplay,
{
fn fmt_i(&self, f: &mut Formatter<'_>, i: &Interner) -> fmt::Result {
self.deref().fmt_i(f, i)
}
}
/// A reference to an [InternedDisplay] type and an [Interner] tied together
/// to implement [Display]
pub struct DisplayBundle<'a, T: InternedDisplay + ?Sized> {
interner: &'a Interner,
data: &'a T,
}
impl<'a, T: InternedDisplay + ?Sized> Display for DisplayBundle<'a, T> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
self.data.fmt_i(f, self.interner)
}
}
/// Conversions that are possible in the presence of an interner
///
/// Essentially, this allows to define abstractions over interned and
/// non-interned versions of a type and convert between them
pub trait InternedInto<U> {
/// Execute the conversion
fn into_i(self, i: &Interner) -> U;
}
impl<T: Into<U>, U> InternedInto<U> for T {
fn into_i(self, _i: &Interner) -> U {
self.into()
}
}

View File

@@ -48,6 +48,7 @@ fn map_at<E>(
/// Replace the [Clause::LambdaArg] placeholders at the ends of the [PathSet] /// Replace the [Clause::LambdaArg] placeholders at the ends of the [PathSet]
/// with the value in the body. Note that a path may point to multiple /// with the value in the body. Note that a path may point to multiple
/// placeholders. /// placeholders.
#[must_use]
fn substitute(paths: &PathSet, value: Clause, body: ExprInst) -> ExprInst { fn substitute(paths: &PathSet, value: Clause, body: ExprInst) -> ExprInst {
let PathSet { steps, next } = paths; let PathSet { steps, next } = paths;
unwrap_always(map_at(steps, body, &mut |checkpoint| -> Always<Clause> { unwrap_always(map_at(steps, body, &mut |checkpoint| -> Always<Clause> {

View File

@@ -21,6 +21,7 @@ pub struct HandlerTable<'a> {
} }
impl<'a> HandlerTable<'a> { impl<'a> HandlerTable<'a> {
/// Create a new [HandlerTable] /// Create a new [HandlerTable]
#[must_use]
pub fn new() -> Self { Self { handlers: HashMap::new() } } pub fn new() -> Self { Self { handlers: HashMap::new() } }
/// Add a handler function to interpret a type of atom and decide what happens /// Add a handler function to interpret a type of atom and decide what happens
@@ -46,6 +47,7 @@ impl<'a> HandlerTable<'a> {
} }
/// Combine two non-overlapping handler sets /// Combine two non-overlapping handler sets
#[must_use]
pub fn combine(mut self, other: Self) -> Self { pub fn combine(mut self, other: Self) -> Self {
for (key, value) in other.handlers { for (key, value) in other.handlers {
let prev = self.handlers.insert(key, value); let prev = self.handlers.insert(key, value);

View File

@@ -4,6 +4,7 @@ pub use chumsky::{self, Parser};
use super::decls::SimpleParser; use super::decls::SimpleParser;
/// Parses Lua-style comments /// Parses Lua-style comments
#[must_use]
pub fn comment_parser() -> impl SimpleParser<char, String> { pub fn comment_parser() -> impl SimpleParser<char, String> {
choice(( choice((
just("--[").ignore_then(take_until(just("]--").ignored())), just("--[").ignore_then(take_until(just("]--").ignored())),

View File

@@ -8,8 +8,11 @@ use crate::{Tok, VName};
/// Hiding type parameters in associated types allows for simpler /// Hiding type parameters in associated types allows for simpler
/// parser definitions /// parser definitions
pub trait Context: Clone { pub trait Context: Clone {
#[must_use]
fn ops(&self) -> &[Tok<String>]; fn ops(&self) -> &[Tok<String>];
#[must_use]
fn file(&self) -> Rc<VName>; fn file(&self) -> Rc<VName>;
#[must_use]
fn interner(&self) -> &Interner; fn interner(&self) -> &Interner;
} }

View File

@@ -25,10 +25,12 @@ pub struct Entry {
} }
impl Entry { impl Entry {
/// Checks if the lexeme is a comment or line break /// Checks if the lexeme is a comment or line break
#[must_use]
pub fn is_filler(&self) -> bool { pub fn is_filler(&self) -> bool {
matches!(self.lexeme, Lexeme::Comment(_) | Lexeme::BR) matches!(self.lexeme, Lexeme::Comment(_) | Lexeme::BR)
} }
#[must_use]
pub fn is_keyword(&self) -> bool { pub fn is_keyword(&self) -> bool {
matches!( matches!(
self.lexeme, self.lexeme,
@@ -40,12 +42,15 @@ impl Entry {
) )
} }
#[must_use]
pub fn location(&self) -> Location { self.location.clone() } pub fn location(&self) -> Location { self.location.clone() }
#[must_use]
pub fn range(&self) -> Range<usize> { pub fn range(&self) -> Range<usize> {
self.location.range().expect("An Entry can only have a known location") self.location.range().expect("An Entry can only have a known location")
} }
#[must_use]
pub fn file(&self) -> Rc<VName> { pub fn file(&self) -> Rc<VName> {
self.location.file().expect("An Entry can only have a range location") self.location.file().expect("An Entry can only have a range location")
} }
@@ -57,30 +62,6 @@ impl Display for Entry {
} }
} }
// impl From<Entry> for (Lexeme, Range<usize>) {
// fn from(ent: Entry) -> Self {
// (ent.lexeme.clone(), ent.range())
// }
// }
// impl Span for Entry {
// type Context = (Lexeme, Rc<Vec<String>>);
// type Offset = usize;
// fn context(&self) -> Self::Context {
// (self.lexeme.clone(), self.file())
// }
// fn start(&self) -> Self::Offset {
// self.range().start()
// }
// fn end(&self) -> Self::Offset {
// self.range().end()
// }
// fn new((lexeme, file): Self::Context, range: Range<Self::Offset>) -> Self {
// Self { lexeme, location: Location::Range { file, range } }
// }
// }
impl AsRef<Location> for Entry { impl AsRef<Location> for Entry {
fn as_ref(&self) -> &Location { &self.location } fn as_ref(&self) -> &Location { &self.location }
} }
@@ -159,12 +140,14 @@ impl Display for Lexeme {
} }
impl Lexeme { impl Lexeme {
#[must_use]
pub fn rule(prio: impl Into<f64>) -> Self { pub fn rule(prio: impl Into<f64>) -> Self {
Lexeme::Arrow( Lexeme::Arrow(
NotNan::new(prio.into()).expect("Rule priority cannot be NaN"), NotNan::new(prio.into()).expect("Rule priority cannot be NaN"),
) )
} }
#[must_use]
pub fn parser<E: chumsky::Error<Entry>>( pub fn parser<E: chumsky::Error<Entry>>(
self, self,
) -> impl Parser<Entry, Entry, Error = E> + Clone { ) -> impl Parser<Entry, Entry, Error = E> + Clone {
@@ -181,10 +164,12 @@ impl Display for LexedText {
} }
} }
#[must_use]
fn paren_parser(lp: char, rp: char) -> impl SimpleParser<char, Lexeme> { fn paren_parser(lp: char, rp: char) -> impl SimpleParser<char, Lexeme> {
just(lp).to(Lexeme::LP(lp)).or(just(rp).to(Lexeme::RP(lp))) just(lp).to(Lexeme::LP(lp)).or(just(rp).to(Lexeme::RP(lp)))
} }
#[must_use]
pub fn literal_parser<'a>( pub fn literal_parser<'a>(
ctx: impl Context + 'a, ctx: impl Context + 'a,
) -> impl SimpleParser<char, Literal> + 'a { ) -> impl SimpleParser<char, Literal> + 'a {
@@ -199,6 +184,7 @@ pub fn literal_parser<'a>(
pub static BASE_OPS: &[&str] = &[",", ".", "..", "...", "*"]; pub static BASE_OPS: &[&str] = &[",", ".", "..", "...", "*"];
#[must_use]
pub fn lexer<'a>( pub fn lexer<'a>(
ctx: impl Context + 'a, ctx: impl Context + 'a,
source: Rc<String>, source: Rc<String>,

View File

@@ -16,19 +16,23 @@ struct Subresult {
location: Location, location: Location,
} }
impl Subresult { impl Subresult {
#[must_use]
fn new_glob(location: Location) -> Self { fn new_glob(location: Location) -> Self {
Self { glob: true, deque: VecDeque::new(), location } Self { glob: true, deque: VecDeque::new(), location }
} }
#[must_use]
fn new_named(name: Tok<String>, location: Location) -> Self { fn new_named(name: Tok<String>, location: Location) -> Self {
Self { location, glob: false, deque: VecDeque::from([name]) } Self { location, glob: false, deque: VecDeque::from([name]) }
} }
#[must_use]
fn push_front(mut self, name: Tok<String>) -> Self { fn push_front(mut self, name: Tok<String>) -> Self {
self.deque.push_front(name); self.deque.push_front(name);
self self
} }
#[must_use]
fn finalize(self) -> Import { fn finalize(self) -> Import {
let Self { mut deque, glob, location } = self; let Self { mut deque, glob, location } = self;
debug_assert!(glob || !deque.is_empty(), "The constructors forbid this"); debug_assert!(glob || !deque.is_empty(), "The constructors forbid this");

View File

@@ -42,6 +42,7 @@ pub static NOT_NAME_CHAR: &[char] = &[
/// TODO: `.` could possibly be parsed as an operator in some contexts. /// TODO: `.` could possibly be parsed as an operator in some contexts.
/// This operator is very common in maths so it's worth a try. /// This operator is very common in maths so it's worth a try.
/// Investigate. /// Investigate.
#[must_use]
pub fn anyop_parser<'a>() -> impl SimpleParser<char, String> + 'a { pub fn anyop_parser<'a>() -> impl SimpleParser<char, String> + 'a {
filter(move |c| { filter(move |c| {
!NOT_NAME_CHAR.contains(c) !NOT_NAME_CHAR.contains(c)
@@ -57,6 +58,7 @@ pub fn anyop_parser<'a>() -> impl SimpleParser<char, String> + 'a {
/// Parse an operator or name. Failing both, parse everything up to /// Parse an operator or name. Failing both, parse everything up to
/// the next whitespace or blacklisted character as a new operator. /// the next whitespace or blacklisted character as a new operator.
#[must_use]
pub fn name_parser<'a>( pub fn name_parser<'a>(
ops: &[impl AsRef<str> + Clone], ops: &[impl AsRef<str> + Clone],
) -> impl SimpleParser<char, String> + 'a { ) -> impl SimpleParser<char, String> + 'a {

View File

@@ -13,6 +13,7 @@ fn assert_not_digit(base: u32, c: char) {
/// Parse an arbitrarily grouped sequence of digits starting with an underscore. /// Parse an arbitrarily grouped sequence of digits starting with an underscore.
/// ///
/// TODO: this should use separated_by and parse the leading group too /// TODO: this should use separated_by and parse the leading group too
#[must_use]
fn separated_digits_parser(base: u32) -> impl SimpleParser<char, String> { fn separated_digits_parser(base: u32) -> impl SimpleParser<char, String> {
just('_') just('_')
.ignore_then(text::digits(base)) .ignore_then(text::digits(base))
@@ -23,6 +24,7 @@ fn separated_digits_parser(base: u32) -> impl SimpleParser<char, String> {
/// parse a grouped uint /// parse a grouped uint
/// ///
/// Not to be confused with [int_parser] which does a lot more /// Not to be confused with [int_parser] which does a lot more
#[must_use]
fn uint_parser(base: u32) -> impl SimpleParser<char, u64> { fn uint_parser(base: u32) -> impl SimpleParser<char, u64> {
text::int(base).then(separated_digits_parser(base)).map( text::int(base).then(separated_digits_parser(base)).map(
move |(s1, s2): (String, String)| { move |(s1, s2): (String, String)| {
@@ -33,6 +35,7 @@ fn uint_parser(base: u32) -> impl SimpleParser<char, u64> {
/// parse exponent notation, or return 0 as the default exponent. /// parse exponent notation, or return 0 as the default exponent.
/// The exponent is always in decimal. /// The exponent is always in decimal.
#[must_use]
fn pow_parser() -> impl SimpleParser<char, i32> { fn pow_parser() -> impl SimpleParser<char, i32> {
choice(( choice((
just('p').ignore_then(text::int(10)).map(|s: String| s.parse().unwrap()), just('p').ignore_then(text::int(10)).map(|s: String| s.parse().unwrap()),
@@ -68,6 +71,7 @@ fn nat2f(base: u64) -> impl Fn((NotNan<f64>, i32)) -> NotNan<f64> {
} }
/// parse an uint from exponential notation (panics if 'p' is a digit in base) /// parse an uint from exponential notation (panics if 'p' is a digit in base)
#[must_use]
fn pow_uint_parser(base: u32) -> impl SimpleParser<char, u64> { fn pow_uint_parser(base: u32) -> impl SimpleParser<char, u64> {
assert_not_digit(base, 'p'); assert_not_digit(base, 'p');
uint_parser(base).then(pow_parser()).map(nat2u(base.into())) uint_parser(base).then(pow_parser()).map(nat2u(base.into()))
@@ -76,6 +80,7 @@ fn pow_uint_parser(base: u32) -> impl SimpleParser<char, u64> {
/// parse an uint from a base determined by its prefix or lack thereof /// parse an uint from a base determined by its prefix or lack thereof
/// ///
/// Not to be confused with [uint_parser] which is a component of it. /// Not to be confused with [uint_parser] which is a component of it.
#[must_use]
pub fn int_parser() -> impl SimpleParser<char, u64> { pub fn int_parser() -> impl SimpleParser<char, u64> {
choice(( choice((
just("0b").ignore_then(pow_uint_parser(2)), just("0b").ignore_then(pow_uint_parser(2)),
@@ -86,6 +91,7 @@ pub fn int_parser() -> impl SimpleParser<char, u64> {
} }
/// parse a float from dot notation /// parse a float from dot notation
#[must_use]
fn dotted_parser(base: u32) -> impl SimpleParser<char, NotNan<f64>> { fn dotted_parser(base: u32) -> impl SimpleParser<char, NotNan<f64>> {
uint_parser(base) uint_parser(base)
.then( .then(
@@ -107,6 +113,7 @@ fn dotted_parser(base: u32) -> impl SimpleParser<char, NotNan<f64>> {
} }
/// parse a float from dotted and optionally also exponential notation /// parse a float from dotted and optionally also exponential notation
#[must_use]
fn pow_float_parser(base: u32) -> impl SimpleParser<char, NotNan<f64>> { fn pow_float_parser(base: u32) -> impl SimpleParser<char, NotNan<f64>> {
assert_not_digit(base, 'p'); assert_not_digit(base, 'p');
dotted_parser(base).then(pow_parser()).map(nat2f(base.into())) dotted_parser(base).then(pow_parser()).map(nat2f(base.into()))
@@ -114,6 +121,7 @@ fn pow_float_parser(base: u32) -> impl SimpleParser<char, NotNan<f64>> {
/// parse a float with dotted and optionally exponential notation from a base /// parse a float with dotted and optionally exponential notation from a base
/// determined by its prefix /// determined by its prefix
#[must_use]
pub fn float_parser() -> impl SimpleParser<char, NotNan<f64>> { pub fn float_parser() -> impl SimpleParser<char, NotNan<f64>> {
choice(( choice((
just("0b").ignore_then(pow_float_parser(2)), just("0b").ignore_then(pow_float_parser(2)),
@@ -124,6 +132,7 @@ pub fn float_parser() -> impl SimpleParser<char, NotNan<f64>> {
.labelled("float") .labelled("float")
} }
#[must_use]
pub fn print_nat16(num: NotNan<f64>) -> String { pub fn print_nat16(num: NotNan<f64>) -> String {
let exp = num.log(16.0).floor(); let exp = num.log(16.0).floor();
let man = num / 16_f64.powf(exp); let man = num / 16_f64.powf(exp);

View File

@@ -2,6 +2,7 @@ use chumsky::prelude::*;
use super::decls::SimpleParser; use super::decls::SimpleParser;
#[must_use]
pub fn operators_parser<T>( pub fn operators_parser<T>(
f: impl Fn(String) -> T, f: impl Fn(String) -> T,
) -> impl SimpleParser<char, Vec<T>> { ) -> impl SimpleParser<char, Vec<T>> {

View File

@@ -6,6 +6,7 @@ use super::decls::SimpleParser;
use super::number::int_parser; use super::number::int_parser;
use crate::ast::{PHClass, Placeholder}; use crate::ast::{PHClass, Placeholder};
#[must_use]
pub fn placeholder_parser( pub fn placeholder_parser(
ctx: impl Context, ctx: impl Context,
) -> impl SimpleParser<char, Placeholder> { ) -> impl SimpleParser<char, Placeholder> {

View File

@@ -287,6 +287,7 @@ fn vec_to_single(
} }
} }
#[must_use]
pub fn expr_slice_location(v: &[impl AsRef<Location>]) -> Location { pub fn expr_slice_location(v: &[impl AsRef<Location>]) -> Location {
v.first() v.first()
.map(|l| l.as_ref().clone().to(v.last().unwrap().as_ref().clone())) .map(|l| l.as_ref().clone().to(v.last().unwrap().as_ref().clone()))

View File

@@ -5,6 +5,7 @@ use crate::Location;
/// Represents a slice which may or may not contain items, and a fallback entry /// Represents a slice which may or may not contain items, and a fallback entry
/// used for error reporting whenever the errant stream is empty. /// used for error reporting whenever the errant stream is empty.
#[must_use = "streams represent segments of code that must be parsed"]
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
pub struct Stream<'a> { pub struct Stream<'a> {
pub fallback: &'a Entry, pub fallback: &'a Entry,
@@ -42,6 +43,7 @@ impl<'a> Stream<'a> {
}) })
} }
#[must_use]
pub fn location(self) -> Location { pub fn location(self) -> Location {
self.data.first().map_or_else( self.data.first().map_or_else(
|| self.fallback.location(), || self.fallback.location(),
@@ -98,12 +100,6 @@ impl<'a> Stream<'a> {
} }
} }
// impl<'a> From<(&'a Entry, &'a [Entry])> for Stream<'a> {
// fn from((fallback, data): (&'a Entry, &'a [Entry])) -> Self {
// Self::new(fallback, data)
// }
// }
pub fn skip_parenthesized<'a>( pub fn skip_parenthesized<'a>(
it: impl Iterator<Item = &'a Entry>, it: impl Iterator<Item = &'a Entry>,
) -> impl Iterator<Item = (usize, &'a Entry)> { ) -> impl Iterator<Item = (usize, &'a Entry)> {

View File

@@ -4,6 +4,7 @@ use chumsky::{self, Parser};
use super::decls::SimpleParser; use super::decls::SimpleParser;
/// Parses a text character that is not the specified delimiter /// Parses a text character that is not the specified delimiter
#[must_use]
fn text_parser(delim: char) -> impl SimpleParser<char, char> { fn text_parser(delim: char) -> impl SimpleParser<char, char> {
// Copied directly from Chumsky's JSON example. // Copied directly from Chumsky's JSON example.
let escape = just('\\').ignore_then( let escape = just('\\').ignore_then(
@@ -35,6 +36,7 @@ fn text_parser(delim: char) -> impl SimpleParser<char, char> {
} }
/// Parse a string between double quotes /// Parse a string between double quotes
#[must_use]
pub fn str_parser() -> impl SimpleParser<char, String> { pub fn str_parser() -> impl SimpleParser<char, String> {
just('"') just('"')
.ignore_then( .ignore_then(

View File

@@ -1,59 +0,0 @@
use std::slice;
use chumsky::primitive::Container;
use hashbrown::HashMap;
use crate::representations::project::{ProjectMod, ItemKind, ProjectEntry};
use crate::tree::ModMember;
use crate::utils::{pushed, unwrap_or};
use crate::{ProjectTree, VName, Tok, NameLike};
use super::walk_with_links::{walk_with_links, Target};
pub struct AliasCache {
data: HashMap<Vec<Tok<String>>, Option<Vec<Tok<String>>>>,
}
impl AliasCache {
pub fn new() -> Self {
Self { data: HashMap::new() }
}
/// Finds the absolute nsname corresponding to the given name in the given
/// context, if it's imported. If the name is defined locally, returns None
/// to avoid allocating several vectors for every local variable.
pub fn resolv_name<'a>(
&'a mut self,
root: &ProjectMod<VName>,
location: &[Tok<String>],
name: Tok<String>
) -> Option<&'a [Tok<String>]> {
let full_path = pushed(location, name);
if let Some(result) = self.data.get(&full_path) {
return result.as_deref();
}
let (ent, finalp) = walk_with_links(root, location.iter().cloned())
.expect("This path should be valid");
let m = unwrap_or!{ent => Target::Mod; panic!("Must be a module")};
let result = m.extra.imports_from.get(&name).map(|next| {
self.resolv_name(root, &next, name).unwrap_or(&next)
});
self.data.insert(full_path, result.map(|s| s.to_vec()));
return result;
}
/// Find the absolute target of a
pub fn resolv_vec<'a>(
&'a mut self,
root: &ProjectMod<VName>,
modname: &[Tok<String>],
vname: &[Tok<String>],
) -> Option<&'a [Tok<String>]> {
let (name, ns) = vname.split_last().expect("name cannot be empty");
if ns.is_empty() {
self.resolv_name(modname, name)
} else {
let origin = self.resolv_vec(modname, ns)?;
self.resolv_name(origin, name)
}
}
}

View File

@@ -1,4 +1,3 @@
// mod alias_cache;
mod resolve_aliases; mod resolve_aliases;
mod walk_with_links; mod walk_with_links;

View File

@@ -11,6 +11,7 @@ use crate::tree::{ModEntry, ModMember, Module};
use crate::utils::pure_push::pushed; use crate::utils::pure_push::pushed;
use crate::{Interner, ProjectTree, Tok, VName}; use crate::{Interner, ProjectTree, Tok, VName};
#[must_use]
fn resolve_aliases_rec( fn resolve_aliases_rec(
root: &ProjectMod<VName>, root: &ProjectMod<VName>,
module: &ProjectMod<VName>, module: &ProjectMod<VName>,
@@ -26,7 +27,6 @@ fn resolve_aliases_rec(
let full_name = (module.extra.path.iter()).chain(n.iter()).cloned(); let full_name = (module.extra.path.iter()).chain(n.iter()).cloned();
match walk_with_links(root, full_name, false) { match walk_with_links(root, full_name, false) {
Ok(rep) => Some(rep.abs_path), Ok(rep) => Some(rep.abs_path),
// Ok(_) => None,
Err(e) => { Err(e) => {
let leftovers = e.tail.collect::<Vec<_>>(); let leftovers = e.tail.collect::<Vec<_>>();
if !leftovers.is_empty() { if !leftovers.is_empty() {
@@ -87,6 +87,7 @@ fn resolve_aliases_rec(
} }
} }
#[must_use]
pub fn resolve_aliases( pub fn resolve_aliases(
project: ProjectTree<VName>, project: ProjectTree<VName>,
updated: &impl Fn(&[Tok<String>]) -> bool, updated: &impl Fn(&[Tok<String>]) -> bool,

View File

@@ -12,6 +12,7 @@ pub enum Target<'a, N: NameLike> {
Leaf(&'a ProjectItem<N>), Leaf(&'a ProjectItem<N>),
} }
#[must_use = "this is the sole product of this function"]
pub struct WalkReport<'a, N: NameLike> { pub struct WalkReport<'a, N: NameLike> {
pub target: Target<'a, N>, pub target: Target<'a, N>,
pub abs_path: VName, pub abs_path: VName,

View File

@@ -87,6 +87,7 @@ pub fn load_file(root: &Path, path: &[Tok<String>]) -> IOResult {
} }
/// Generates a cached file loader for a directory /// Generates a cached file loader for a directory
#[must_use]
pub fn mk_dir_cache(root: PathBuf) -> Cache<'static, VName, IOResult> { pub fn mk_dir_cache(root: PathBuf) -> Cache<'static, VName, IOResult> {
Cache::new(move |vname: VName, _this| load_file(&root, &vname)) Cache::new(move |vname: VName, _this| load_file(&root, &vname))
} }
@@ -125,6 +126,7 @@ pub fn load_embed<T: 'static + RustEmbed>(path: &str, ext: &str) -> IOResult {
} }
/// Generates a cached file loader for a [RustEmbed] /// Generates a cached file loader for a [RustEmbed]
#[must_use]
pub fn mk_embed_cache<T: 'static + RustEmbed>( pub fn mk_embed_cache<T: 'static + RustEmbed>(
ext: &str, ext: &str,
) -> Cache<'_, Vec<Stok>, IOResult> { ) -> Cache<'_, Vec<Stok>, IOResult> {
@@ -136,6 +138,7 @@ pub fn mk_embed_cache<T: 'static + RustEmbed>(
/// Load all files from an embed and convert them into a map usable in a /// Load all files from an embed and convert them into a map usable in a
/// [System] /// [System]
#[must_use]
pub fn embed_to_map<T: 'static + RustEmbed>( pub fn embed_to_map<T: 'static + RustEmbed>(
suffix: &str, suffix: &str,
i: &Interner, i: &Interner,

View File

@@ -1,55 +0,0 @@
use std::hash::Hash;
use hashbrown::{HashMap, HashSet};
use crate::{interner::Tok, VName};
#[derive(Clone, Debug, Default)]
pub struct AliasMap {
pub targets: HashMap<VName, VName>,
pub aliases: HashMap<VName, HashSet<VName>>,
}
impl AliasMap {
pub fn new() -> Self {
Self::default()
}
pub fn link(&mut self, alias: VName, target: VName) {
let prev = self.targets.insert(alias.clone(), target.clone());
debug_assert!(prev.is_none(), "Alias already has a target");
multimap_entry(&mut self.aliases, &target).insert(alias.clone());
// Remove aliases of the alias
if let Some(alts) = self.aliases.remove(&alias) {
for alt in alts {
// Assert that this step has always been done in the past
debug_assert!(
self.aliases.get(&alt).map(HashSet::is_empty).unwrap_or(true),
"Alias set of alias not empty"
);
let alt_target = self.targets.insert(alt.clone(), target.clone());
debug_assert!(
alt_target.as_ref() == Some(&alias),
"Name not target of its own alias"
);
multimap_entry(&mut self.aliases, &alias).insert(alt);
}
}
}
pub fn resolve(&self, alias: &[Tok<String>]) -> Option<&VName> {
self.targets.get(alias)
}
}
/// find or create the set belonging to the given key in the given
/// map-to-set (aka. multimap)
fn multimap_entry<'a, K: Eq + Hash + Clone, V>(
map: &'a mut HashMap<K, HashSet<V>>,
key: &'_ K,
) -> &'a mut HashSet<V> {
map
.raw_entry_mut()
.from_key(key)
.or_insert_with(|| (key.clone(), HashSet::new()))
.1
}

View File

@@ -1,86 +0,0 @@
use super::alias_map::AliasMap;
use super::decls::{InjectedAsFn, UpdatedFn};
use crate::ast::{Expr, Rule};
use crate::interner::Tok;
use crate::representations::project::{ItemKind, ProjectMod};
use crate::representations::tree::ModMember;
use crate::representations::VName;
use crate::utils::Substack;
fn resolve_rec(
namespace: &[Tok<String>],
alias_map: &AliasMap,
) -> Option<VName> {
if let Some(alias) = alias_map.resolve(namespace) {
Some(alias.clone())
} else if let Some((foot, body)) = namespace.split_last() {
let mut new_beginning = resolve_rec(body, alias_map)?;
new_beginning.push(foot.clone());
Some(new_beginning)
} else {
None
}
}
fn resolve(
namespace: &[Tok<String>],
alias_map: &AliasMap,
injected_as: &impl InjectedAsFn,
) -> Option<VName> {
injected_as(namespace).or_else(|| {
let next_v = resolve_rec(namespace, alias_map)?;
Some(injected_as(&next_v).unwrap_or(next_v))
})
}
fn process_expr(
expr: &Expr<VName>,
alias_map: &AliasMap,
injected_as: &impl InjectedAsFn,
) -> Expr<VName> {
expr
.map_names(&|n| resolve(n, alias_map, injected_as))
.unwrap_or_else(|| expr.clone())
}
/// Replace all aliases with the name they're originally defined as
fn apply_aliases_rec(
path: Substack<Tok<String>>,
module: &mut ProjectMod<VName>,
alias_map: &AliasMap,
injected_as: &impl InjectedAsFn,
updated: &impl UpdatedFn,
) {
for (name, entry) in module.entries.iter_mut() {
match &mut entry.member {
ModMember::Sub(sub) => {
let subpath = path.push(name.clone());
apply_aliases_rec(subpath, sub, alias_map, injected_as, updated)
},
ModMember::Item(it) => match &mut it.kind {
ItemKind::None => (),
ItemKind::Const(expr) =>
*expr = process_expr(expr, alias_map, injected_as),
ItemKind::Alias(name) =>
if let Some(alt) = alias_map.resolve(&name) {
*name = alt.clone()
},
},
_ => (),
}
}
for Rule { pattern, prio, template } in module.extra.rules.iter_mut() {
for expr in pattern.iter_mut().chain(template.iter_mut()) {
*expr = process_expr(expr, alias_map, injected_as)
}
}
}
pub fn apply_aliases(
module: &mut ProjectMod<VName>,
alias_map: &AliasMap,
injected_as: &impl InjectedAsFn,
updated: &impl UpdatedFn,
) {
apply_aliases_rec(Substack::Bottom, module, alias_map, injected_as, updated)
}

View File

@@ -1,47 +0,0 @@
use super::alias_map::AliasMap;
use super::decls::UpdatedFn;
use crate::error::ProjectResult;
use crate::interner::Tok;
use crate::representations::project::{ProjectMod, ProjectTree};
use crate::representations::tree::ModMember;
use crate::representations::VName;
use crate::utils::{pushed, unwrap_or};
/// Populate target and alias maps from the module tree recursively
fn collect_aliases_rec(
path: Vec<Tok<String>>,
module: &ProjectMod<VName>,
project: &ProjectTree<VName>,
alias_map: &mut AliasMap,
updated: &impl UpdatedFn,
) -> ProjectResult<()> {
// Assume injected module has been alias-resolved
if !updated(&path) {
return Ok(());
};
for (name, target_sym_v) in module.extra.imports_from.iter() {
let sym_path_v = pushed(&path, name.clone());
alias_map.link(sym_path_v, target_sym_v.clone());
}
for (name, entry) in module.entries.iter() {
let submodule = unwrap_or!(&entry.member => ModMember::Sub; continue);
collect_aliases_rec(
pushed(&path, name.clone()),
submodule,
project,
alias_map,
updated,
)?
}
Ok(())
}
/// Populate target and alias maps from the module tree
pub fn collect_aliases(
module: &ProjectMod<VName>,
project: &ProjectTree<VName>,
alias_map: &mut AliasMap,
updated: &impl UpdatedFn,
) -> ProjectResult<()> {
collect_aliases_rec(Vec::new(), module, project, alias_map, updated)
}

View File

@@ -1,8 +0,0 @@
use trait_set::trait_set;
use crate::{interner::Tok, VName};
trait_set! {
pub trait InjectedAsFn = Fn(&[Tok<String>]) -> Option<VName>;
pub trait UpdatedFn = Fn(&[Tok<String>]) -> bool;
}

View File

@@ -1,8 +0,0 @@
mod alias_map;
mod apply_aliases;
mod collect_aliases;
mod decls;
mod resolve_imports;
mod alias_cache;
pub use resolve_imports::resolve_imports;

View File

@@ -1,22 +0,0 @@
use super::alias_cache::AliasCache;
use super::alias_map::AliasMap;
use super::apply_aliases::apply_aliases;
use super::collect_aliases::collect_aliases;
use super::decls::{InjectedAsFn, UpdatedFn};
use crate::error::ProjectResult;
use crate::representations::project::ProjectTree;
use crate::representations::VName;
/// Follow import chains to locate the original name of all tokens, then
/// replace these aliases with the original names throughout the tree
pub fn resolve_imports(
mut project: ProjectTree<VName>,
injected_as: &impl InjectedAsFn,
updated: &impl UpdatedFn,
) -> ProjectResult<ProjectTree<VName>> {
let mut cache = AliasCache::new(&project);
// let mut map = AliasMap::new();
// collect_aliases(&project.0, &project, &mut map, updated)?;
// apply_aliases(&mut project.0, &map, injected_as, updated);
Ok(project)
}

View File

@@ -1,7 +1,6 @@
//! Loading Orchid modules from source //! Loading Orchid modules from source
pub mod file_loader; pub mod file_loader;
mod import_abs_path; mod import_abs_path;
// mod import_resolution;
mod dealias; mod dealias;
mod parse_layer; mod parse_layer;
mod project_tree; mod project_tree;

View File

@@ -16,6 +16,7 @@ use crate::utils::get_or::get_or_default;
use crate::utils::pure_push::pushed_ref; use crate::utils::pure_push::pushed_ref;
use crate::{Tok, VName}; use crate::{Tok, VName};
#[must_use = "A submodule may not be integrated into the tree"]
pub struct TreeReport { pub struct TreeReport {
pub entries: HashMap<Tok<String>, ProjectEntry<VName>>, pub entries: HashMap<Tok<String>, ProjectEntry<VName>>,
pub rules: Vec<Rule<VName>>, pub rules: Vec<Rule<VName>>,

View File

@@ -32,6 +32,7 @@ pub struct Expr<N: NameLike> {
impl<N: NameLike> Expr<N> { impl<N: NameLike> Expr<N> {
/// Process all names with the given mapper. /// Process all names with the given mapper.
/// Return a new object if anything was processed /// Return a new object if anything was processed
#[must_use]
pub fn map_names(&self, pred: &impl Fn(&N) -> Option<N>) -> Option<Self> { pub fn map_names(&self, pred: &impl Fn(&N) -> Option<N>) -> Option<Self> {
Some(Self { Some(Self {
value: self.value.map_names(pred)?, value: self.value.map_names(pred)?,
@@ -40,6 +41,7 @@ impl<N: NameLike> Expr<N> {
} }
/// Transform from one name system to another /// Transform from one name system to another
#[must_use]
pub fn transform_names<O: NameLike>(self, pred: &impl Fn(N) -> O) -> Expr<O> { pub fn transform_names<O: NameLike>(self, pred: &impl Fn(N) -> O) -> Expr<O> {
Expr { value: self.value.transform_names(pred), location: self.location } Expr { value: self.value.transform_names(pred), location: self.location }
} }
@@ -71,6 +73,7 @@ pub fn search_all_slcs<N: NameLike, T>(
impl Expr<VName> { impl Expr<VName> {
/// Add the specified prefix to every Name /// Add the specified prefix to every Name
#[must_use]
pub fn prefix( pub fn prefix(
&self, &self,
prefix: &[Tok<String>], prefix: &[Tok<String>],
@@ -143,6 +146,7 @@ pub enum Clause<N: NameLike> {
impl<N: NameLike> Clause<N> { impl<N: NameLike> Clause<N> {
/// Extract the expressions from an auto, lambda or S /// Extract the expressions from an auto, lambda or S
#[must_use]
pub fn body(&self) -> Option<Rc<Vec<Expr<N>>>> { pub fn body(&self) -> Option<Rc<Vec<Expr<N>>>> {
match self { match self {
Self::Lambda(_, body) | Self::S(_, body) => Some(body.clone()), Self::Lambda(_, body) | Self::S(_, body) => Some(body.clone()),
@@ -151,6 +155,7 @@ impl<N: NameLike> Clause<N> {
} }
/// Convert with identical meaning /// Convert with identical meaning
#[must_use]
pub fn into_expr(self) -> Expr<N> { pub fn into_expr(self) -> Expr<N> {
if let Self::S('(', body) = &self { if let Self::S('(', body) = &self {
if body.len() == 1 { if body.len() == 1 {
@@ -164,6 +169,7 @@ impl<N: NameLike> Clause<N> {
} }
/// Convert with identical meaning /// Convert with identical meaning
#[must_use]
pub fn from_exprs(exprs: &[Expr<N>]) -> Option<Self> { pub fn from_exprs(exprs: &[Expr<N>]) -> Option<Self> {
if exprs.is_empty() { if exprs.is_empty() {
None None
@@ -173,7 +179,9 @@ impl<N: NameLike> Clause<N> {
Some(Self::S('(', Rc::new(exprs.to_vec()))) Some(Self::S('(', Rc::new(exprs.to_vec())))
} }
} }
/// Convert with identical meaning /// Convert with identical meaning
#[must_use]
pub fn from_exprv(exprv: &Rc<Vec<Expr<N>>>) -> Option<Clause<N>> { pub fn from_exprv(exprv: &Rc<Vec<Expr<N>>>) -> Option<Clause<N>> {
if exprv.len() < 2 { if exprv.len() < 2 {
Self::from_exprs(exprv) Self::from_exprs(exprv)
@@ -185,6 +193,7 @@ impl<N: NameLike> Clause<N> {
/// Collect all names that appear in this expression. /// Collect all names that appear in this expression.
/// NOTICE: this isn't the total set of unbound names, it's mostly useful to /// NOTICE: this isn't the total set of unbound names, it's mostly useful to
/// make weak statements for optimization. /// make weak statements for optimization.
#[must_use]
pub fn collect_names(&self) -> HashSet<N> { pub fn collect_names(&self) -> HashSet<N> {
if let Self::Name(n) = self { if let Self::Name(n) = self {
return HashSet::from([n.clone()]); return HashSet::from([n.clone()]);
@@ -202,6 +211,7 @@ impl<N: NameLike> Clause<N> {
/// Process all names with the given mapper. /// Process all names with the given mapper.
/// Return a new object if anything was processed /// Return a new object if anything was processed
#[must_use]
pub fn map_names(&self, pred: &impl Fn(&N) -> Option<N>) -> Option<Self> { pub fn map_names(&self, pred: &impl Fn(&N) -> Option<N>) -> Option<Self> {
match self { match self {
Clause::P(_) | Clause::Placeh(_) => None, Clause::P(_) | Clause::Placeh(_) => None,
@@ -244,6 +254,7 @@ impl<N: NameLike> Clause<N> {
} }
/// Transform from one name representation to another /// Transform from one name representation to another
#[must_use]
pub fn transform_names<O: NameLike>( pub fn transform_names<O: NameLike>(
self, self,
pred: &impl Fn(N) -> O, pred: &impl Fn(N) -> O,
@@ -292,6 +303,7 @@ impl<N: NameLike> Clause<N> {
impl Clause<VName> { impl Clause<VName> {
/// Add the specified prefix to every Name /// Add the specified prefix to every Name
#[must_use]
pub fn prefix( pub fn prefix(
&self, &self,
prefix: &[Tok<String>], prefix: &[Tok<String>],
@@ -348,6 +360,7 @@ pub struct Rule<N: NameLike> {
impl Rule<VName> { impl Rule<VName> {
/// Namespace all tokens in the rule /// Namespace all tokens in the rule
#[must_use]
pub fn prefix( pub fn prefix(
&self, &self,
prefix: &[Tok<String>], prefix: &[Tok<String>],
@@ -364,6 +377,7 @@ impl Rule<VName> {
/// Return a list of all names that don't contain a namespace separator `::`. /// Return a list of all names that don't contain a namespace separator `::`.
/// These are exported when the rule is exported /// These are exported when the rule is exported
#[must_use]
pub fn collect_single_names(&self) -> VName { pub fn collect_single_names(&self) -> VName {
let mut names = Vec::new(); let mut names = Vec::new();
for e in self.pattern.iter() { for e in self.pattern.iter() {

View File

@@ -26,6 +26,7 @@ pub struct Error {
pub kind: ErrorKind, pub kind: ErrorKind,
} }
impl Error { impl Error {
#[must_use]
pub fn new(kind: ErrorKind, location: &Location) -> Self { pub fn new(kind: ErrorKind, location: &Location) -> Self {
Self { location: location.clone(), kind } Self { location: location.clone(), kind }
} }
@@ -64,6 +65,7 @@ struct Context<'a> {
} }
impl<'a> Context<'a> { impl<'a> Context<'a> {
#[must_use]
fn w_name<'b>(&'b self, name: Sym) -> Context<'b> fn w_name<'b>(&'b self, name: Sym) -> Context<'b>
where where
'a: 'b, 'a: 'b,
@@ -71,6 +73,7 @@ impl<'a> Context<'a> {
Context { names: self.names.push(name) } Context { names: self.names.push(name) }
} }
#[must_use]
fn new() -> Context<'static> { Context { names: Substack::Bottom } } fn new() -> Context<'static> { Context { names: Substack::Bottom } }
} }

View File

@@ -24,6 +24,7 @@ pub enum ConstTree {
} }
impl ConstTree { impl ConstTree {
/// Describe a [Primitive] /// Describe a [Primitive]
#[must_use]
pub fn primitive(primitive: Primitive) -> Self { pub fn primitive(primitive: Primitive) -> Self {
Self::Const(Expr { Self::Const(Expr {
location: Location::Unknown, location: Location::Unknown,
@@ -31,18 +32,22 @@ impl ConstTree {
}) })
} }
/// Describe an [ExternFn] /// Describe an [ExternFn]
#[must_use]
pub fn xfn(xfn: impl ExternFn + 'static) -> Self { pub fn xfn(xfn: impl ExternFn + 'static) -> Self {
Self::primitive(Primitive::ExternFn(Box::new(xfn))) Self::primitive(Primitive::ExternFn(Box::new(xfn)))
} }
/// Describe an [Atomic] /// Describe an [Atomic]
#[must_use]
pub fn atom(atom: impl Atomic + 'static) -> Self { pub fn atom(atom: impl Atomic + 'static) -> Self {
Self::primitive(Primitive::Atom(Atom(Box::new(atom)))) Self::primitive(Primitive::Atom(Atom(Box::new(atom))))
} }
/// Describe a module /// Describe a module
#[must_use]
pub fn tree(arr: impl IntoIterator<Item = (Tok<String>, Self)>) -> Self { pub fn tree(arr: impl IntoIterator<Item = (Tok<String>, Self)>) -> Self {
Self::Tree(arr.into_iter().collect()) Self::Tree(arr.into_iter().collect())
} }
/// Namespace the tree with the list of names /// Namespace the tree with the list of names
#[must_use]
pub fn namespace( pub fn namespace(
pref: impl IntoIterator<Item = Tok<String>>, pref: impl IntoIterator<Item = Tok<String>>,
data: Self, data: Self,
@@ -59,6 +64,7 @@ impl ConstTree {
/// # Panics /// # Panics
/// ///
/// If this is a leaf node aka. constant and not a namespace /// If this is a leaf node aka. constant and not a namespace
#[must_use]
pub fn unwrap_tree(self) -> HashMap<Tok<String>, Self> { pub fn unwrap_tree(self) -> HashMap<Tok<String>, Self> {
match self { match self {
Self::Tree(map) => map, Self::Tree(map) => map,
@@ -87,6 +93,7 @@ impl Add for ConstTree {
} }
} }
#[must_use]
fn from_const_tree_rec( fn from_const_tree_rec(
path: Substack<Tok<String>>, path: Substack<Tok<String>>,
consts: HashMap<Tok<String>, ConstTree>, consts: HashMap<Tok<String>, ConstTree>,
@@ -119,6 +126,7 @@ fn from_const_tree_rec(
/// Convert a map of [ConstTree] into a [ProjectTree] that can be used with the /// Convert a map of [ConstTree] into a [ProjectTree] that can be used with the
/// layered parsing system /// layered parsing system
#[must_use]
pub fn from_const_tree( pub fn from_const_tree(
consts: HashMap<Tok<String>, ConstTree>, consts: HashMap<Tok<String>, ConstTree>,
file: &[Tok<String>], file: &[Tok<String>],

View File

@@ -64,10 +64,12 @@ pub struct ExprInst(pub Rc<RefCell<Expr>>);
impl ExprInst { impl ExprInst {
/// Wrap an [Expr] in a shared container so that normalizatoin steps are /// Wrap an [Expr] in a shared container so that normalizatoin steps are
/// applied to all references /// applied to all references
#[must_use]
pub fn new(expr: Expr) -> Self { Self(Rc::new(RefCell::new(expr))) } pub fn new(expr: Expr) -> Self { Self(Rc::new(RefCell::new(expr))) }
/// Take the [Expr] out of this container if it's the last reference to it, or /// Take the [Expr] out of this container if it's the last reference to it, or
/// clone it out. /// clone it out.
#[must_use]
pub fn expr_val(self) -> Expr { pub fn expr_val(self) -> Expr {
Rc::try_unwrap(self.0) Rc::try_unwrap(self.0)
.map(|c| c.into_inner()) .map(|c| c.into_inner())
@@ -79,6 +81,7 @@ impl ExprInst {
/// # Panics /// # Panics
/// ///
/// if the expression is already borrowed in read-write mode /// if the expression is already borrowed in read-write mode
#[must_use]
pub fn expr(&self) -> impl Deref<Target = Expr> + '_ { pub fn expr(&self) -> impl Deref<Target = Expr> + '_ {
self.0.as_ref().borrow() self.0.as_ref().borrow()
} }
@@ -88,6 +91,7 @@ impl ExprInst {
/// # Panics /// # Panics
/// ///
/// if the expression is already borrowed /// if the expression is already borrowed
#[must_use]
pub fn expr_mut(&self) -> impl DerefMut<Target = Expr> + '_ { pub fn expr_mut(&self) -> impl DerefMut<Target = Expr> + '_ {
self.0.as_ref().borrow_mut() self.0.as_ref().borrow_mut()
} }
@@ -124,6 +128,7 @@ impl ExprInst {
/// Call a predicate on the expression, returning whatever the /// Call a predicate on the expression, returning whatever the
/// predicate returns. This is a convenience function for reaching /// predicate returns. This is a convenience function for reaching
/// through the RefCell. /// through the RefCell.
#[must_use]
pub fn inspect<T>(&self, predicate: impl FnOnce(&Clause) -> T) -> T { pub fn inspect<T>(&self, predicate: impl FnOnce(&Clause) -> T) -> T {
predicate(&self.expr().clause) predicate(&self.expr().clause)
} }
@@ -178,10 +183,12 @@ impl ExprInst {
} }
/// Get the code location data associated with this expresssion directly /// Get the code location data associated with this expresssion directly
#[must_use]
pub fn location(&self) -> Location { self.expr().location.clone() } pub fn location(&self) -> Location { self.expr().location.clone() }
/// If this expression is an [Atomic], request an object of the given type. /// If this expression is an [Atomic], request an object of the given type.
/// If it's not an atomic, fail the request automatically. /// If it's not an atomic, fail the request automatically.
#[must_use = "your request might not have succeeded"]
pub fn request<T: 'static>(&self) -> Option<T> { pub fn request<T: 'static>(&self) -> Option<T> {
match &self.expr().clause { match &self.expr().clause {
Clause::P(Primitive::Atom(a)) => request(&*a.0), Clause::P(Primitive::Atom(a)) => request(&*a.0),

View File

@@ -27,6 +27,7 @@ pub enum Location {
impl Location { impl Location {
/// Range, if known. If the range is known, the file is always known /// Range, if known. If the range is known, the file is always known
#[must_use]
pub fn range(&self) -> Option<Range<usize>> { pub fn range(&self) -> Option<Range<usize>> {
if let Self::Range { range, .. } = self { if let Self::Range { range, .. } = self {
Some(range.clone()) Some(range.clone())
@@ -36,6 +37,7 @@ impl Location {
} }
/// File, if known /// File, if known
#[must_use]
pub fn file(&self) -> Option<Rc<VName>> { pub fn file(&self) -> Option<Rc<VName>> {
if let Self::File(file) | Self::Range { file, .. } = self { if let Self::File(file) | Self::Range { file, .. } = self {
Some(file.clone()) Some(file.clone())
@@ -45,6 +47,7 @@ impl Location {
} }
/// Associated source code, if known /// Associated source code, if known
#[must_use]
pub fn source(&self) -> Option<Rc<String>> { pub fn source(&self) -> Option<Rc<String>> {
if let Self::Range { source, .. } = self { if let Self::Range { source, .. } = self {
Some(source.clone()) Some(source.clone())
@@ -55,6 +58,7 @@ impl Location {
/// If the two locations are ranges in the same file, connect them. /// If the two locations are ranges in the same file, connect them.
/// Otherwise choose the more accurate, preferring lhs if equal. /// Otherwise choose the more accurate, preferring lhs if equal.
#[must_use]
pub fn to(self, other: Self) -> Self { pub fn to(self, other: Self) -> Self {
match self { match self {
Location::Unknown => other, Location::Unknown => other,
@@ -75,6 +79,7 @@ impl Location {
/// Choose one of the two locations, preferring better accuracy, or lhs if /// Choose one of the two locations, preferring better accuracy, or lhs if
/// equal /// equal
#[must_use]
pub fn or(self, alt: Self) -> Self { pub fn or(self, alt: Self) -> Self {
match (&self, &alt) { match (&self, &alt) {
(Self::Unknown, _) => alt, (Self::Unknown, _) => alt,
@@ -110,6 +115,7 @@ impl Debug for Location {
} }
} }
#[must_use]
fn pos2lc(s: &str, i: usize) -> (usize, usize) { fn pos2lc(s: &str, i: usize) -> (usize, usize) {
s.chars().take(i).fold((1, 1), |(line, col), char| { s.chars().take(i).fold((1, 1), |(line, col), char| {
if char == '\n' { (line + 1, 1) } else { (line, col + 1) } if char == '\n' { (line + 1, 1) } else { (line, col + 1) }

View File

@@ -21,6 +21,7 @@ pub type Sym = Tok<VName>;
/// handled together in datastructures /// handled together in datastructures
pub trait NameLike: 'static + Clone + Eq + Hash + Debug { pub trait NameLike: 'static + Clone + Eq + Hash + Debug {
/// Fully resolve the name for printing /// Fully resolve the name for printing
#[must_use]
fn to_strv(&self) -> Vec<String>; fn to_strv(&self) -> Vec<String>;
} }

View File

@@ -5,6 +5,7 @@ use super::path_set::PathSet;
use super::{interpreted, postmacro}; use super::{interpreted, postmacro};
use crate::utils::Side; use crate::utils::Side;
#[must_use]
fn collect_paths_expr_rec( fn collect_paths_expr_rec(
expr: &postmacro::Expr, expr: &postmacro::Expr,
depth: usize, depth: usize,

View File

@@ -137,6 +137,7 @@ fn collect_rules_rec<N: NameLike>(
/// Collect the complete list of rules to be used by the rule repository from /// Collect the complete list of rules to be used by the rule repository from
/// the [ProjectTree] /// the [ProjectTree]
#[must_use]
pub fn collect_rules<N: NameLike>(project: &ProjectTree<N>) -> Vec<Rule<N>> { pub fn collect_rules<N: NameLike>(project: &ProjectTree<N>) -> Vec<Rule<N>> {
let mut rules = Vec::new(); let mut rules = Vec::new();
collect_rules_rec(&mut rules, &project.0); collect_rules_rec(&mut rules, &project.0);
@@ -164,6 +165,7 @@ fn collect_consts_rec<N: NameLike>(
} }
/// Extract the symbol table from a [ProjectTree] /// Extract the symbol table from a [ProjectTree]
#[must_use]
pub fn collect_consts<N: NameLike>( pub fn collect_consts<N: NameLike>(
project: &ProjectTree<N>, project: &ProjectTree<N>,
i: &Interner, i: &Interner,
@@ -173,6 +175,7 @@ pub fn collect_consts<N: NameLike>(
consts consts
} }
#[must_use]
fn vname_to_sym_tree_rec( fn vname_to_sym_tree_rec(
tree: ProjectMod<VName>, tree: ProjectMod<VName>,
i: &Interner, i: &Interner,
@@ -218,6 +221,7 @@ fn vname_to_sym_tree_rec(
/// Convert a flexible vname-based tree to a more rigid but faster symbol-based /// Convert a flexible vname-based tree to a more rigid but faster symbol-based
/// tree. The pipeline works with vnames, but the macro executor works with /// tree. The pipeline works with vnames, but the macro executor works with
/// symbols. /// symbols.
#[must_use]
pub fn vname_to_sym_tree( pub fn vname_to_sym_tree(
tree: ProjectTree<VName>, tree: ProjectTree<VName>,
i: &Interner, i: &Interner,

View File

@@ -34,6 +34,7 @@ impl Import {
/// ///
/// Returns the path if this is a glob import, or the path plus the /// Returns the path if this is a glob import, or the path plus the
/// name if this is a specific import /// name if this is a specific import
#[must_use]
pub fn nonglob_path(&self) -> VName { pub fn nonglob_path(&self) -> VName {
let mut path_vec = self.path.clone(); let mut path_vec = self.path.clone();
if let Some(n) = &self.name { if let Some(n) = &self.name {
@@ -241,6 +242,7 @@ pub fn absolute_path(
}) })
} }
#[must_use = "this could be None which means that there are too many supers"]
fn absolute_path_rec( fn absolute_path_rec(
abs_location: &[Tok<String>], abs_location: &[Tok<String>],
rel_path: &[Tok<String>], rel_path: &[Tok<String>],

View File

@@ -3,7 +3,8 @@ use std::hash::Hash;
use std::ops::Deref; use std::ops::Deref;
use std::rc::Rc; use std::rc::Rc;
use crate::Tok; use crate::interpreted::{Clause, ExprInst};
use crate::{Literal, Primitive, Tok};
/// An Orchid string which may or may not be interned /// An Orchid string which may or may not be interned
#[derive(Clone, Eq)] #[derive(Clone, Eq)]
@@ -25,6 +26,7 @@ impl Debug for OrcString {
impl OrcString { impl OrcString {
/// Clone out the plain Rust [String] /// Clone out the plain Rust [String]
#[must_use]
pub fn get_string(self) -> String { pub fn get_string(self) -> String {
match self { match self {
Self::Interned(s) => s.as_str().to_owned(), Self::Interned(s) => s.as_str().to_owned(),
@@ -32,6 +34,14 @@ impl OrcString {
Rc::try_unwrap(rc).unwrap_or_else(|rc| (*rc).clone()), Rc::try_unwrap(rc).unwrap_or_else(|rc| (*rc).clone()),
} }
} }
/// Wrap in a [Clause] for returning from extern functions
pub fn cls(self) -> Clause {
Clause::P(Primitive::Literal(Literal::Str(self)))
}
/// Wrap in an [ExprInst] for embedding in runtime-generated code
pub fn exi(self) -> ExprInst { self.cls().wrap() }
} }
impl Deref for OrcString { impl Deref for OrcString {

View File

@@ -33,6 +33,7 @@ pub struct ModEntry<TItem: Clone, TExt: Clone> {
} }
impl<TItem: Clone, TExt: Clone> ModEntry<TItem, TExt> { impl<TItem: Clone, TExt: Clone> ModEntry<TItem, TExt> {
/// Returns the item in this entry if it contains one. /// Returns the item in this entry if it contains one.
#[must_use]
pub fn item(&self) -> Option<&TItem> { pub fn item(&self) -> Option<&TItem> {
match &self.member { match &self.member {
ModMember::Item(it) => Some(it), ModMember::Item(it) => Some(it),
@@ -56,6 +57,7 @@ pub type ModPath<'a> = Substack<'a, Tok<String>>;
impl<TItem: Clone, TExt: Clone> Module<TItem, TExt> { impl<TItem: Clone, TExt: Clone> Module<TItem, TExt> {
/// If the argument is false, returns all child names. /// If the argument is false, returns all child names.
/// If the argument is true, returns all public child names. /// If the argument is true, returns all public child names.
#[must_use]
pub fn keys(&self, public: bool) -> BoxedIter<Tok<String>> { pub fn keys(&self, public: bool) -> BoxedIter<Tok<String>> {
match public { match public {
false => Box::new(self.entries.keys().cloned()), false => Box::new(self.entries.keys().cloned()),
@@ -96,7 +98,6 @@ impl<TItem: Clone, TExt: Clone> Module<TItem, TExt> {
/// # Panics /// # Panics
/// ///
/// if path is empty, since the reference cannot be forwarded that way /// if path is empty, since the reference cannot be forwarded that way
#[allow(clippy::needless_arbitrary_self_type)] // duplicate
pub fn walk1_ref<'a: 'b, 'b>( pub fn walk1_ref<'a: 'b, 'b>(
&'a self, &'a self,
prefix: &'b [Tok<String>], prefix: &'b [Tok<String>],
@@ -223,9 +224,11 @@ pub struct WalkError<'a> {
} }
impl<'a> WalkError<'a> { impl<'a> WalkError<'a> {
/// Total length of the path represented by this error /// Total length of the path represented by this error
#[must_use]
pub fn depth(&self) -> usize { self.prefix.len() + self.pos + 1 } pub fn depth(&self) -> usize { self.prefix.len() + self.pos + 1 }
/// Attach a location to the error and convert into trait object for reporting /// Attach a location to the error and convert into trait object for reporting
#[must_use]
pub fn at(self, location: &Location) -> Rc<dyn ProjectError> { pub fn at(self, location: &Location) -> Rc<dyn ProjectError> {
// panic!("hello"); // panic!("hello");
WalkErrorWithLocation { WalkErrorWithLocation {

View File

@@ -1,201 +0,0 @@
use std::fmt::{Debug, Write};
use std::rc::Rc;
use mappable_rc::Mrc;
use super::get_name::get_name;
use super::primitive::Primitive;
use super::{ast, ast_to_postmacro, get_name, Literal};
use crate::executor::apply_lambda;
use crate::foreign::{Atom, ExternFn};
use crate::utils::{one_mrc_slice, string_from_charset, to_mrc_slice};
/// Indicates whether either side needs to be wrapped. Syntax whose end is
/// ambiguous on that side must use parentheses, or forward the flag
#[derive(PartialEq, Eq, Clone, Copy)]
struct Wrap(bool, bool);
#[derive(PartialEq, Eq, Hash, Clone)]
pub struct Expr(pub Clause, pub Vec<Clause>);
impl Expr {
fn deep_fmt(
&self,
f: &mut std::fmt::Formatter<'_>,
depth: usize,
tr: Wrap,
) -> std::fmt::Result {
let Expr(val, typ) = self;
if typ.len() > 0 {
val.deep_fmt(f, depth, Wrap(true, true))?;
for typterm in typ {
f.write_char(':')?;
typterm.deep_fmt(f, depth, Wrap(true, true))?;
}
} else {
val.deep_fmt(f, depth, tr)?;
}
Ok(())
}
}
impl Debug for Expr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.deep_fmt(f, 0, Wrap(false, false))
}
}
#[derive(PartialEq, Eq, Hash)]
pub enum Clause {
P(Primitive),
Apply(Rc<Expr>, Rc<Expr>),
Lambda(Rc<[Clause]>, Rc<Expr>),
Auto(Rc<[Clause]>, Rc<Expr>),
LambdaArg(usize),
AutoArg(usize),
}
const ARGNAME_CHARSET: &str = "abcdefghijklmnopqrstuvwxyz";
fn parametric_fmt(
f: &mut std::fmt::Formatter<'_>,
depth: usize,
prefix: &str,
argtyp: &[Clause],
body: &Expr,
wrap_right: bool,
) -> std::fmt::Result {
if wrap_right {
f.write_char('(')?;
}
f.write_str(prefix)?;
f.write_str(&string_from_charset(depth as u64, ARGNAME_CHARSET))?;
for typ in argtyp.iter() {
f.write_str(":")?;
typ.deep_fmt(f, depth, Wrap(false, false))?;
}
f.write_str(".")?;
body.deep_fmt(f, depth + 1, Wrap(false, false))?;
if wrap_right {
f.write_char(')')?;
}
Ok(())
}
impl Clause {
fn deep_fmt(
&self,
f: &mut std::fmt::Formatter<'_>,
depth: usize,
Wrap(wl, wr): Wrap,
) -> std::fmt::Result {
match self {
Self::P(p) => write!(f, "{p:?}"),
Self::Lambda(argtyp, body) =>
parametric_fmt(f, depth, "\\", argtyp, body, wr),
Self::Auto(argtyp, body) =>
parametric_fmt(f, depth, "@", argtyp, body, wr),
Self::LambdaArg(skip) | Self::AutoArg(skip) => {
let lambda_depth = (depth - skip - 1).try_into().unwrap();
f.write_str(&string_from_charset(lambda_depth, ARGNAME_CHARSET))
},
Self::Apply(func, x) => {
if wl {
f.write_char('(')?;
}
func.deep_fmt(f, depth, Wrap(false, true))?;
f.write_char(' ')?;
x.deep_fmt(f, depth, Wrap(true, wr && !wl))?;
if wl {
f.write_char(')')?;
}
Ok(())
},
}
}
pub fn wrap(self) -> Box<Expr> {
Box::new(Expr(self, vec![]))
}
pub fn wrap_t(self, t: Clause) -> Box<Expr> {
Box::new(Expr(self, vec![t]))
}
}
impl Clone for Clause {
fn clone(&self) -> Self {
match self {
Clause::Auto(t, b) => {
let new_id = get_name();
let new_body =
apply_lambda(*uid, Clause::AutoArg(new_id).wrap(), b.clone());
Clause::Auto(new_id, t.clone(), new_body)
},
Clause::Lambda(uid, t, b) => {
let new_id = get_name();
let new_body =
apply_lambda(*uid, Clause::LambdaArg(new_id).wrap(), b.clone());
Clause::Lambda(new_id, t.clone(), new_body)
},
Clause::Literal(l) => Clause::Literal(l.clone()),
Clause::ExternFn(nc) => Clause::ExternFn(nc.clone()),
Clause::Atom(a) => Clause::Atom(a.clone()),
Clause::Apply(f, x) => Clause::Apply(Box::clone(&f), x.clone()),
Clause::LambdaArg(id) => Clause::LambdaArg(*id),
Clause::AutoArg(id) => Clause::AutoArg(*id),
}
}
}
impl Debug for Clause {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.deep_fmt(f, Wrap(false, false))
}
}
impl TryFrom<&ast::Expr> for Expr {
type Error = ast_to_postmacro::Error;
fn try_from(value: &ast::Expr) -> Result<Self, Self::Error> {
ast_to_postmacro::expr(value)
}
}
impl TryFrom<&ast::Clause> for Clause {
type Error = ast_to_postmacro::Error;
fn try_from(value: &ast::Clause) -> Result<Self, Self::Error> {
ast_to_postmacro::clause(value)
}
}
pub fn is_used_clause(id: u64, is_auto: bool, clause: &Clause) -> bool {
match clause {
Clause::Atom(_) | Clause::ExternFn(_) | Clause::Literal(_) => false,
Clause::AutoArg(x) => is_auto && *x == id,
Clause::LambdaArg(x) => !is_auto && *x == id,
Clause::Apply(f, x) =>
is_used_expr(id, is_auto, &f) || is_used_expr(id, is_auto, &x),
Clause::Auto(n, t, b) => {
assert!(*n != id, "Shadowing should have been eliminated");
if is_auto && t.iter().any(|c| is_used_clause(id, is_auto, c)) {
return true;
};
is_used_expr(id, is_auto, b)
},
Clause::Lambda(n, t, b) => {
assert!(*n != id, "Shadowing should have been eliminated");
if is_auto && t.iter().any(|c| is_used_clause(id, is_auto, c)) {
return true;
};
is_used_expr(id, is_auto, b)
},
}
}
pub fn is_used_expr(
id: u64,
is_auto: bool,
Expr(val, typ): &Expr,
) -> bool {
if is_auto && typ.iter().any(|c| is_used_clause(id, is_auto, c)) {
return true;
};
is_used_clause(id, is_auto, val)
}

View File

@@ -10,7 +10,9 @@ pub type RuleExpr = Expr<Sym>;
/// injected to allow experimentation in the matcher implementation. /// injected to allow experimentation in the matcher implementation.
pub trait Matcher { pub trait Matcher {
/// Build matcher for a pattern /// Build matcher for a pattern
#[must_use]
fn new(pattern: Rc<Vec<RuleExpr>>) -> Self; fn new(pattern: Rc<Vec<RuleExpr>>) -> Self;
/// Apply matcher to a token sequence /// Apply matcher to a token sequence
#[must_use]
fn apply<'a>(&self, source: &'a [RuleExpr]) -> Option<State<'a>>; fn apply<'a>(&self, source: &'a [RuleExpr]) -> Option<State<'a>>;
} }

View File

@@ -4,6 +4,7 @@ use super::vec_match::vec_match;
use crate::rule::matcher::RuleExpr; use crate::rule::matcher::RuleExpr;
use crate::rule::state::State; use crate::rule::state::State;
#[must_use]
pub fn any_match<'a>( pub fn any_match<'a>(
matcher: &AnyMatcher, matcher: &AnyMatcher,
seq: &'a [RuleExpr], seq: &'a [RuleExpr],

View File

@@ -12,6 +12,7 @@ pub type MaxVecSplit<'a> =
/// Derive the details of the central vectorial and the two sides from a /// Derive the details of the central vectorial and the two sides from a
/// slice of Expr's /// slice of Expr's
#[must_use]
fn split_at_max_vec(pattern: &[RuleExpr]) -> Option<MaxVecSplit> { fn split_at_max_vec(pattern: &[RuleExpr]) -> Option<MaxVecSplit> {
let rngidx = pattern.iter().position_max_by_key(|expr| { let rngidx = pattern.iter().position_max_by_key(|expr| {
vec_attrs(expr).map(|attrs| attrs.1 as i64).unwrap_or(-1) vec_attrs(expr).map(|attrs| attrs.1 as i64).unwrap_or(-1)
@@ -23,10 +24,12 @@ fn split_at_max_vec(pattern: &[RuleExpr]) -> Option<MaxVecSplit> {
vec_attrs(placeh).map(|attrs| (left, attrs, right)) vec_attrs(placeh).map(|attrs| (left, attrs, right))
} }
#[must_use]
fn scal_cnt<'a>(iter: impl Iterator<Item = &'a RuleExpr>) -> usize { fn scal_cnt<'a>(iter: impl Iterator<Item = &'a RuleExpr>) -> usize {
iter.take_while(|expr| vec_attrs(expr).is_none()).count() iter.take_while(|expr| vec_attrs(expr).is_none()).count()
} }
#[must_use]
pub fn mk_any(pattern: &[RuleExpr]) -> AnyMatcher { pub fn mk_any(pattern: &[RuleExpr]) -> AnyMatcher {
let left_split = scal_cnt(pattern.iter()); let left_split = scal_cnt(pattern.iter());
if pattern.len() <= left_split { if pattern.len() <= left_split {
@@ -43,11 +46,13 @@ pub fn mk_any(pattern: &[RuleExpr]) -> AnyMatcher {
} }
/// Pattern MUST NOT contain vectorial placeholders /// Pattern MUST NOT contain vectorial placeholders
#[must_use]
fn mk_scalv(pattern: &[RuleExpr]) -> Vec<ScalMatcher> { fn mk_scalv(pattern: &[RuleExpr]) -> Vec<ScalMatcher> {
pattern.iter().map(mk_scalar).collect() pattern.iter().map(mk_scalar).collect()
} }
/// Pattern MUST start and end with a vectorial placeholder /// Pattern MUST start and end with a vectorial placeholder
#[must_use]
fn mk_vec(pattern: &[RuleExpr]) -> VecMatcher { fn mk_vec(pattern: &[RuleExpr]) -> VecMatcher {
debug_assert!(!pattern.is_empty(), "pattern cannot be empty"); debug_assert!(!pattern.is_empty(), "pattern cannot be empty");
debug_assert!( debug_assert!(
@@ -99,6 +104,7 @@ fn mk_vec(pattern: &[RuleExpr]) -> VecMatcher {
} }
/// Pattern MUST NOT be a vectorial placeholder /// Pattern MUST NOT be a vectorial placeholder
#[must_use]
fn mk_scalar(pattern: &RuleExpr) -> ScalMatcher { fn mk_scalar(pattern: &RuleExpr) -> ScalMatcher {
match &pattern.value { match &pattern.value {
Clause::P(p) => ScalMatcher::P(p.clone()), Clause::P(p) => ScalMatcher::P(p.clone()),

View File

@@ -4,6 +4,7 @@ use crate::ast::Clause;
use crate::rule::matcher::RuleExpr; use crate::rule::matcher::RuleExpr;
use crate::rule::state::{State, StateEntry}; use crate::rule::state::{State, StateEntry};
#[must_use]
pub fn scal_match<'a>( pub fn scal_match<'a>(
matcher: &ScalMatcher, matcher: &ScalMatcher,
expr: &'a RuleExpr, expr: &'a RuleExpr,
@@ -24,6 +25,7 @@ pub fn scal_match<'a>(
} }
} }
#[must_use]
pub fn scalv_match<'a>( pub fn scalv_match<'a>(
matchers: &[ScalMatcher], matchers: &[ScalMatcher],
seq: &'a [RuleExpr], seq: &'a [RuleExpr],

View File

@@ -8,6 +8,7 @@ use crate::rule::matcher::RuleExpr;
use crate::rule::state::{State, StateEntry}; use crate::rule::state::{State, StateEntry};
use crate::utils::unwrap_or; use crate::utils::unwrap_or;
#[must_use]
pub fn vec_match<'a>( pub fn vec_match<'a>(
matcher: &VecMatcher, matcher: &VecMatcher,
seq: &'a [RuleExpr], seq: &'a [RuleExpr],

View File

@@ -11,6 +11,7 @@ use crate::Sym;
/// Ensure that the rule's source begins and ends with a vectorial without /// Ensure that the rule's source begins and ends with a vectorial without
/// changing its meaning /// changing its meaning
#[must_use]
fn pad(mut rule: Rule<Sym>, i: &Interner) -> Rule<Sym> { fn pad(mut rule: Rule<Sym>, i: &Interner) -> Rule<Sym> {
let class: PHClass = PHClass::Vec { nonzero: false, prio: 0 }; let class: PHClass = PHClass::Vec { nonzero: false, prio: 0 };
let empty: &[Expr<Sym>] = &[]; let empty: &[Expr<Sym>] = &[];

View File

@@ -67,6 +67,7 @@ impl<M: Matcher> Repository<M> {
} }
/// Attempt to run each rule in priority order once /// Attempt to run each rule in priority order once
#[must_use]
pub fn step(&self, code: &RuleExpr) -> Option<RuleExpr> { pub fn step(&self, code: &RuleExpr) -> Option<RuleExpr> {
let glossary = code.value.collect_names(); let glossary = code.value.collect_names();
for (rule, deps, _) in self.cache.iter() { for (rule, deps, _) in self.cache.iter() {
@@ -87,7 +88,7 @@ impl<M: Matcher> Repository<M> {
/// Keep running the matching rule with the highest priority until no /// Keep running the matching rule with the highest priority until no
/// rules match. WARNING: this function might not terminate /// rules match. WARNING: this function might not terminate
#[allow(unused)] #[must_use]
pub fn pass(&self, code: &RuleExpr) -> Option<RuleExpr> { pub fn pass(&self, code: &RuleExpr) -> Option<RuleExpr> {
if let Some(mut processed) = self.step(code) { if let Some(mut processed) = self.step(code) {
while let Some(out) = self.step(&processed) { while let Some(out) = self.step(&processed) {
@@ -101,7 +102,7 @@ impl<M: Matcher> Repository<M> {
/// Attempt to run each rule in priority order `limit` times. Returns /// Attempt to run each rule in priority order `limit` times. Returns
/// the final tree and the number of iterations left to the limit. /// the final tree and the number of iterations left to the limit.
#[allow(unused)] #[must_use]
pub fn long_step( pub fn long_step(
&self, &self,
code: &RuleExpr, code: &RuleExpr,
@@ -138,6 +139,7 @@ impl<M: Debug + Matcher> Debug for Repository<M> {
} }
} }
#[must_use]
fn fmt_hex(num: f64) -> String { fn fmt_hex(num: f64) -> String {
let exponent = (num.log2() / 4_f64).floor(); let exponent = (num.log2() / 4_f64).floor();
let mantissa = num / 16_f64.powf(exponent); let mantissa = num / 16_f64.powf(exponent);

View File

@@ -23,6 +23,7 @@ pub enum RuleError {
} }
impl RuleError { impl RuleError {
/// Convert into a unified error trait object shared by all Orchid errors /// Convert into a unified error trait object shared by all Orchid errors
#[must_use]
pub fn to_project_error(self, rule: &Rule<Sym>) -> Rc<dyn ProjectError> { pub fn to_project_error(self, rule: &Rule<Sym>) -> Rc<dyn ProjectError> {
match self { match self {
RuleError::Missing(name) => Missing::new(rule, name).rc(), RuleError::Missing(name) => Missing::new(rule, name).rc(),
@@ -58,6 +59,7 @@ pub struct Missing {
name: Tok<String>, name: Tok<String>,
} }
impl Missing { impl Missing {
#[must_use]
pub fn new(rule: &ast::Rule<Sym>, name: Tok<String>) -> Self { pub fn new(rule: &ast::Rule<Sym>, name: Tok<String>) -> Self {
let mut locations = HashSet::new(); let mut locations = HashSet::new();
for expr in rule.template.iter() { for expr in rule.template.iter() {
@@ -99,6 +101,7 @@ pub struct Multiple {
name: Tok<String>, name: Tok<String>,
} }
impl Multiple { impl Multiple {
#[must_use]
pub fn new(rule: &ast::Rule<Sym>, name: Tok<String>) -> Self { pub fn new(rule: &ast::Rule<Sym>, name: Tok<String>) -> Self {
let mut locations = HashSet::new(); let mut locations = HashSet::new();
for expr in rule.template.iter() { for expr in rule.template.iter() {
@@ -137,6 +140,7 @@ pub struct ArityMismatch {
name: Tok<String>, name: Tok<String>,
} }
impl ArityMismatch { impl ArityMismatch {
#[must_use]
pub fn new(rule: &ast::Rule<Sym>, name: Tok<String>) -> Self { pub fn new(rule: &ast::Rule<Sym>, name: Tok<String>) -> Self {
let mut locations = HashSet::new(); let mut locations = HashSet::new();
for expr in rule.template.iter() { for expr in rule.template.iter() {
@@ -188,6 +192,7 @@ pub struct VecNeighbors {
n2: Tok<String>, n2: Tok<String>,
} }
impl VecNeighbors { impl VecNeighbors {
#[must_use]
pub fn new(rule: &ast::Rule<Sym>, n1: Tok<String>, n2: Tok<String>) -> Self { pub fn new(rule: &ast::Rule<Sym>, n1: Tok<String>, n2: Tok<String>) -> Self {
let mut locations = HashSet::new(); let mut locations = HashSet::new();
search_all_slcs(&rule.template[..], &mut |ev| { search_all_slcs(&rule.template[..], &mut |ev| {

View File

@@ -14,6 +14,7 @@ pub enum StateEntry<'a> {
} }
pub type State<'a> = HashMap<Tok<String>, StateEntry<'a>>; pub type State<'a> = HashMap<Tok<String>, StateEntry<'a>>;
#[must_use]
pub fn apply_exprv(template: &[RuleExpr], state: &State) -> Vec<RuleExpr> { pub fn apply_exprv(template: &[RuleExpr], state: &State) -> Vec<RuleExpr> {
template template
.iter() .iter()
@@ -22,6 +23,7 @@ pub fn apply_exprv(template: &[RuleExpr], state: &State) -> Vec<RuleExpr> {
.collect() .collect()
} }
#[must_use]
pub fn apply_expr(template: &RuleExpr, state: &State) -> Vec<RuleExpr> { pub fn apply_expr(template: &RuleExpr, state: &State) -> Vec<RuleExpr> {
let Expr { location, value } = template; let Expr { location, value } = template;
match value { match value {

View File

@@ -8,6 +8,7 @@ use crate::Sym;
/// Traverse the tree, calling pred on every sibling list until it returns /// Traverse the tree, calling pred on every sibling list until it returns
/// some vec then replace the sibling list with that vec and return true /// some vec then replace the sibling list with that vec and return true
/// return false if pred never returned some /// return false if pred never returned some
#[must_use]
pub fn exprv<F: FnMut(Rc<Vec<RuleExpr>>) -> Option<Rc<Vec<RuleExpr>>>>( pub fn exprv<F: FnMut(Rc<Vec<RuleExpr>>) -> Option<Rc<Vec<RuleExpr>>>>(
input: Rc<Vec<RuleExpr>>, input: Rc<Vec<RuleExpr>>,
pred: &mut F, pred: &mut F,
@@ -19,6 +20,7 @@ pub fn exprv<F: FnMut(Rc<Vec<RuleExpr>>) -> Option<Rc<Vec<RuleExpr>>>>(
.map(|i| Rc::new(i.collect())) .map(|i| Rc::new(i.collect()))
} }
#[must_use]
pub fn expr<F: FnMut(Rc<Vec<RuleExpr>>) -> Option<Rc<Vec<RuleExpr>>>>( pub fn expr<F: FnMut(Rc<Vec<RuleExpr>>) -> Option<Rc<Vec<RuleExpr>>>>(
input: &RuleExpr, input: &RuleExpr,
pred: &mut F, pred: &mut F,
@@ -27,6 +29,7 @@ pub fn expr<F: FnMut(Rc<Vec<RuleExpr>>) -> Option<Rc<Vec<RuleExpr>>>>(
.map(|value| Expr { value, location: input.location.clone() }) .map(|value| Expr { value, location: input.location.clone() })
} }
#[must_use]
pub fn clause<F: FnMut(Rc<Vec<RuleExpr>>) -> Option<Rc<Vec<RuleExpr>>>>( pub fn clause<F: FnMut(Rc<Vec<RuleExpr>>) -> Option<Rc<Vec<RuleExpr>>>>(
c: &Clause<Sym>, c: &Clause<Sym>,
pred: &mut F, pred: &mut F,

View File

@@ -4,6 +4,7 @@ use crate::interner::Tok;
/// Returns the name, priority and nonzero of the expression if it is /// Returns the name, priority and nonzero of the expression if it is
/// a vectorial placeholder /// a vectorial placeholder
#[must_use]
pub fn vec_attrs(expr: &RuleExpr) -> Option<(Tok<String>, u64, bool)> { pub fn vec_attrs(expr: &RuleExpr) -> Option<(Tok<String>, u64, bool)> {
match expr.value.clone() { match expr.value.clone() {
Clause::Placeh(Placeholder { Clause::Placeh(Placeholder {

View File

@@ -0,0 +1,5 @@
export const block_on := \action.\cont. (
action cont
(\e.panic "unwrapped asynch call")
\c.yield
)

View File

@@ -8,12 +8,14 @@ use std::time::Duration;
use hashbrown::HashMap; use hashbrown::HashMap;
use ordered_float::NotNan; use ordered_float::NotNan;
use rust_embed::RustEmbed;
use crate::facade::{IntoSystem, System}; use crate::facade::{IntoSystem, System};
use crate::foreign::cps_box::{init_cps, CPSBox}; use crate::foreign::cps_box::{init_cps, CPSBox};
use crate::foreign::{Atomic, ExternError, InertAtomic}; use crate::foreign::{Atomic, ExternError, InertAtomic};
use crate::interpreted::ExprInst; use crate::interpreted::ExprInst;
use crate::interpreter::HandlerTable; use crate::interpreter::HandlerTable;
use crate::pipeline::file_loader::embed_to_map;
use crate::systems::codegen::call; use crate::systems::codegen::call;
use crate::systems::stl::Boolean; use crate::systems::stl::Boolean;
use crate::utils::poller::{PollEvent, Poller}; use crate::utils::poller::{PollEvent, Poller};
@@ -68,6 +70,12 @@ impl MessagePort {
} }
} }
#[derive(RustEmbed)]
#[folder = "src/systems/asynch"]
#[prefix = "system/"]
#[include = "*.orc"]
struct AsynchEmbed;
type AnyHandler<'a> = Box<dyn FnMut(Box<dyn Any>) -> Vec<ExprInst> + 'a>; type AnyHandler<'a> = Box<dyn FnMut(Box<dyn Any>) -> Vec<ExprInst> + 'a>;
/// Datastructures the asynch system will eventually be constructed from. /// Datastructures the asynch system will eventually be constructed from.
@@ -80,6 +88,7 @@ pub struct AsynchSystem<'a> {
impl<'a> AsynchSystem<'a> { impl<'a> AsynchSystem<'a> {
/// Create a new async event loop that allows registering handlers and taking /// Create a new async event loop that allows registering handlers and taking
/// references to the port before it's converted into a [System] /// references to the port before it's converted into a [System]
#[must_use]
pub fn new() -> Self { pub fn new() -> Self {
let (sender, poller) = Poller::new(); let (sender, poller) = Poller::new();
Self { poller, sender, handlers: HashMap::new() } Self { poller, sender, handlers: HashMap::new() }
@@ -108,6 +117,7 @@ impl<'a> AsynchSystem<'a> {
/// Obtain a message port for sending messages to the main thread. If an /// Obtain a message port for sending messages to the main thread. If an
/// object is passed to the MessagePort that does not have a handler, the /// object is passed to the MessagePort that does not have a handler, the
/// main thread panics. /// main thread panics.
#[must_use]
pub fn get_port(&self) -> MessagePort { MessagePort(self.sender.clone()) } pub fn get_port(&self) -> MessagePort { MessagePort(self.sender.clone()) }
} }
@@ -181,7 +191,7 @@ impl<'a> IntoSystem<'a> for AsynchSystem<'a> {
]), ]),
) )
.unwrap_tree(), .unwrap_tree(),
code: HashMap::new(), code: embed_to_map::<AsynchEmbed>(".orc", i),
prelude: Vec::new(), prelude: Vec::new(),
handlers: handler_table, handlers: handler_table,
} }

View File

@@ -38,14 +38,24 @@ fn none() -> Clause {
/// Define a clause that can be called with a callback and passes the provided /// Define a clause that can be called with a callback and passes the provided
/// values to the callback in order. /// values to the callback in order.
pub fn tuple(data: Vec<ExprInst>) -> Clause { pub fn tuple(data: impl IntoIterator<Item = ExprInst>) -> Clause {
Clause::Lambda { let mut steps = Vec::new();
args: Some(PathSet { let mut body = Clause::LambdaArg.wrap();
next: None, for x in data.into_iter() {
steps: Rc::new(data.iter().map(|_| Side::Left).collect()), steps.push(Side::Left);
}), body = Clause::Apply { f: body, x }.wrap()
body: (data.into_iter()) }
.fold(Clause::LambdaArg.wrap(), |f, x| Clause::Apply { f, x }.wrap()), let path_set = PathSet { next: None, steps: Rc::new(steps) };
Clause::Lambda { args: Some(path_set), body }
}
#[cfg(test)]
mod test {
use crate::systems::codegen::tuple;
#[test]
fn tuple_printer() {
println!("Binary tuple: {}", tuple([0.into(), 1.into()]))
} }
} }
@@ -55,3 +65,9 @@ pub fn call(f: ExprInst, args: impl IntoIterator<Item = ExprInst>) -> Clause {
let x = unwrap_or!(it.by_ref().next(); return f.inspect(Clause::clone)); let x = unwrap_or!(it.by_ref().next(); return f.inspect(Clause::clone));
it.fold(Clause::Apply { f, x }, |acc, x| Clause::Apply { f: acc.wrap(), x }) it.fold(Clause::Apply { f, x }, |acc, x| Clause::Apply { f: acc.wrap(), x })
} }
/// Build an Orchid list from a Rust iterator
pub fn list(items: impl IntoIterator<Item = ExprInst>) -> Clause {
let mut iter = items.into_iter();
orchid_opt(iter.next().map(|it| tuple([it, list(iter).wrap()]).wrap()))
}

View File

@@ -1,19 +1,203 @@
use crate::foreign::cps_box::init_cps; use std::ffi::OsString;
use crate::foreign::InertAtomic; use std::fs::File;
use crate::systems::asynch::MessagePort; use std::io::{BufReader, Read, Write};
use crate::systems::scheduler::SeqScheduler; use std::path::Path;
use crate::{define_fn, OrcString};
use hashbrown::HashMap;
use itertools::Itertools;
use crate::facade::{IntoSystem, System};
use crate::foreign::cps_box::{init_cps, CPSBox};
use crate::foreign::{Atomic, InertAtomic};
use crate::interpreted::{Clause, ExprInst};
use crate::interpreter::HandlerTable;
use crate::systems::codegen::{call, list, orchid_opt, tuple};
use crate::systems::io::wrap_io_error;
use crate::systems::scheduler::{SeqScheduler, SharedHandle};
use crate::systems::stl::Boolean;
use crate::systems::RuntimeError;
use crate::utils::unwrap_or;
use crate::{define_fn, ConstTree, OrcString};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct ReadFile(OrcString); pub struct ReadFileCmd(OrcString);
impl InertAtomic for ReadFile { impl InertAtomic for ReadFileCmd {
fn type_str() -> &'static str { "a readfile command" } fn type_str() -> &'static str { "readfile command" }
} }
pub fn read_file(port: MessagePort, cmd: ReadFile) -> Vec<ExprInst> { #[derive(Debug, Clone)]
let new_file = pub struct ReadDirCmd(OrcString);
impl InertAtomic for ReadDirCmd {
fn type_str() -> &'static str { "readdir command" }
}
#[derive(Debug, Clone)]
pub struct WriteFile {
name: OrcString,
append: bool,
}
impl InertAtomic for WriteFile {
fn type_str() -> &'static str { "writefile command" }
}
#[must_use]
fn read_file(sched: &SeqScheduler, cmd: CPSBox<ReadFileCmd>) -> ExprInst {
let (ReadFileCmd(name), succ, fail, cont) = cmd.unpack3();
let name = name.get_string();
let cancel = sched.run_orphan(
move |_| File::open(name),
|file, _| match file {
Err(e) => vec![call(fail, [wrap_io_error(e)]).wrap()],
Ok(f) => {
let source =
SharedHandle::wrap(BufReader::new(Box::new(f) as Box<dyn Read>));
vec![call(succ, [source.atom_exi()]).wrap()]
},
},
);
call(cont, [init_cps(1, cancel).wrap()]).wrap()
}
#[must_use]
fn read_dir(sched: &SeqScheduler, cmd: CPSBox<ReadDirCmd>) -> ExprInst {
let (ReadDirCmd(name), succ, fail, cont) = cmd.unpack3();
let name = name.get_string();
let cancel = sched.run_orphan(
move |_| {
Path::new(&name)
.read_dir()?
.map(|r| r.and_then(|e| Ok((e.file_name(), e.file_type()?.is_dir()))))
.collect()
},
|items: std::io::Result<Vec<(OsString, bool)>>, _| match items {
Err(e) => vec![call(fail, [wrap_io_error(e)]).wrap()],
Ok(os_namev) => {
let converted = (os_namev.into_iter())
.map(|(n, d)| {
Ok(tuple([os_str_cls(n)?.wrap(), Boolean(d).atom_exi()]).wrap())
})
.collect::<Result<Vec<_>, Clause>>();
match converted {
Err(e) => vec![call(fail, [e.wrap()]).wrap()],
Ok(names) => vec![call(succ, [list(names).wrap()]).wrap()],
}
},
},
);
call(cont, [init_cps(1, cancel).wrap()]).wrap()
}
#[must_use]
pub fn write_file(sched: &SeqScheduler, cmd: CPSBox<WriteFile>) -> ExprInst {
let (WriteFile { name, append }, succ, fail, cont) = cmd.unpack3();
let name = name.get_string();
let cancel = sched.run_orphan(
move |_| File::options().write(true).append(append).open(name),
|file, _| match file {
Err(e) => vec![call(fail, [wrap_io_error(e)]).wrap()],
Ok(f) => {
let handle = SharedHandle::wrap(Box::new(f) as Box<dyn Write>);
vec![call(succ, [handle.atom_exi()]).wrap()]
},
},
);
call(cont, [init_cps(1, cancel).wrap()]).wrap()
}
#[derive(Debug, Clone)]
pub struct InvalidString(OsString);
impl InertAtomic for InvalidString {
fn type_str() -> &'static str { "invalidstring error" }
}
fn os_str_cls(str: OsString) -> Result<Clause, Clause> {
(str.into_string())
.map_err(|e| InvalidString(e).atom_cls())
.map(|s| OrcString::from(s).cls())
} }
define_fn! { define_fn! {
pub OpenFileRead = |x| Ok(init_cps(3, ReadFile(x.downcast()?))) pub IsInvalidString = |x| {
Ok(Boolean(x.downcast::<InvalidString>().is_ok()).atom_cls())
};
pub OpenFileRead = |x| Ok(init_cps(3, ReadFileCmd(x.downcast()?)));
pub ReadDir = |x| Ok(init_cps(3, ReadDirCmd(x.downcast()?)));
pub OpenFileWrite = |x| {
Ok(init_cps(3, WriteFile{ name: x.downcast()?, append: false }))
};
pub OpenFileAppend = |x| {
Ok(init_cps(3, WriteFile{ name: x.downcast()?, append: true }))
};
pub JoinPaths { root: OrcString, sub: OrcString } => {
let res = Path::new(root.as_str())
.join(sub.as_str())
.into_os_string();
os_str_cls(res.clone()).map_err(|_| RuntimeError::ext(
format!("result {res:?} contains illegal characters"),
"joining paths"
))
};
pub PopPath = |x| {
eprintln!("argument is {x}");
let arg = x.downcast::<OrcString>()?;
let full_path = Path::new(arg.as_str());
let parent = unwrap_or! {full_path.parent(); {
return Ok(orchid_opt(None))
}};
let sub = unwrap_or! {full_path.file_name(); {
return Ok(orchid_opt(None))
}};
Ok(orchid_opt(Some(tuple(
[parent.as_os_str(), sub]
.into_iter()
.map(|s| os_str_cls(s.to_owned()).map_err(|_| RuntimeError::ext(
format!("Result {s:?} contains illegal characters"),
"splitting a path"
)))
.map_ok(Clause::wrap)
.collect::<Result<Vec<_>, _>>()?
).wrap())))
}
}
/// A rudimentary system to read and write files.
#[derive(Clone)]
pub struct DirectFS {
scheduler: SeqScheduler,
}
impl DirectFS {
/// Create a new instance of the system.
pub fn new(scheduler: SeqScheduler) -> Self { Self { scheduler } }
}
impl IntoSystem<'static> for DirectFS {
fn into_system(self, i: &crate::Interner) -> System<'static> {
let mut handlers = HandlerTable::new();
let sched = self.scheduler.clone();
handlers.register(move |cmd| Ok(read_file(&sched, *cmd)));
let sched = self.scheduler.clone();
handlers.register(move |cmd| Ok(read_dir(&sched, *cmd)));
let sched = self.scheduler;
handlers.register(move |cmd| Ok(write_file(&sched, *cmd)));
System {
name: ["system", "directfs"].into_iter().map_into().collect(),
code: HashMap::new(),
prelude: Vec::new(),
constants: ConstTree::namespace(
[i.i("system"), i.i("directfs")],
ConstTree::tree([
(i.i("is_invalid_string"), ConstTree::xfn(IsInvalidString)),
(i.i("readfile"), ConstTree::xfn(OpenFileRead)),
(i.i("readdir"), ConstTree::xfn(ReadDir)),
(i.i("writefile"), ConstTree::xfn(OpenFileWrite)),
(i.i("appendfile"), ConstTree::xfn(OpenFileAppend)),
(i.i("join_paths"), ConstTree::xfn(JoinPaths)),
(i.i("pop_path"), ConstTree::xfn(PopPath)),
]),
)
.unwrap_tree(),
handlers,
}
}
} }

View File

@@ -1,2 +1,5 @@
//! A rudimentary system exposing methods for Orchid to interact with the file
//! system. All paths are strings.
mod commands; mod commands;
pub use commands::DirectFS;

View File

@@ -1,10 +1,11 @@
use super::flow::IOCmdHandlePack; use super::flow::IOCmdHandlePack;
use super::instances::{ use super::instances::{
BRead, ReadCmd, SRead, SinkHandle, SourceHandle, WriteCmd, BRead, ReadCmd, SRead, WriteCmd, Sink, Source,
}; };
use crate::foreign::cps_box::init_cps; use crate::foreign::cps_box::init_cps;
use crate::foreign::{Atom, Atomic}; use crate::foreign::{Atom, Atomic};
use crate::representations::OrcString; use crate::representations::OrcString;
use crate::systems::scheduler::SharedHandle;
use crate::systems::stl::Binary; use crate::systems::stl::Binary;
use crate::systems::RuntimeError; use crate::systems::RuntimeError;
use crate::{ast, define_fn, ConstTree, Interner, Primitive}; use crate::{ast, define_fn, ConstTree, Interner, Primitive};
@@ -22,17 +23,13 @@ define_fn! {
cmd: ReadCmd::RBytes(BRead::All), cmd: ReadCmd::RBytes(BRead::All),
handle: x.downcast()? handle: x.downcast()?
})); }));
ReadBytes { ReadBytes { stream: SharedHandle<Source>, n: u64 } => {
stream: SourceHandle, Ok(init_cps(3, IOCmdHandlePack{
n: u64 cmd: ReadCmd::RBytes(BRead::N(n.try_into().unwrap())),
} => Ok(init_cps(3, IOCmdHandlePack{ handle: stream.clone()
cmd: ReadCmd::RBytes(BRead::N(n.try_into().unwrap())), }))
handle: stream.clone() };
})); ReadUntil { stream: SharedHandle<Source>, pattern: u64 } => {
ReadUntil {
stream: SourceHandle,
pattern: u64
} => {
let delim = pattern.try_into().map_err(|_| RuntimeError::ext( let delim = pattern.try_into().map_err(|_| RuntimeError::ext(
"greater than 255".to_string(), "greater than 255".to_string(),
"converting number to byte" "converting number to byte"
@@ -42,20 +39,18 @@ define_fn! {
handle: stream handle: stream
})) }))
}; };
WriteStr { WriteStr { stream: SharedHandle<Sink>, string: OrcString } => {
stream: SinkHandle, Ok(init_cps(3, IOCmdHandlePack {
string: OrcString cmd: WriteCmd::WStr(string.get_string()),
} => Ok(init_cps(3, IOCmdHandlePack { handle: stream.clone(),
cmd: WriteCmd::WStr(string.get_string()), }))
handle: stream.clone(), };
})); WriteBin { stream: SharedHandle<Sink>, bytes: Binary } => {
WriteBin { Ok(init_cps(3, IOCmdHandlePack {
stream: SinkHandle, cmd: WriteCmd::WBytes(bytes),
bytes: Binary handle: stream.clone(),
} => Ok(init_cps(3, IOCmdHandlePack { }))
cmd: WriteCmd::WBytes(bytes), };
handle: stream.clone(),
}));
Flush = |x| Ok(init_cps(3, IOCmdHandlePack { Flush = |x| Ok(init_cps(3, IOCmdHandlePack {
cmd: WriteCmd::Flush, cmd: WriteCmd::Flush,
handle: x.downcast()? handle: x.downcast()?

View File

@@ -9,12 +9,11 @@ use crate::systems::scheduler::{Canceller, SharedHandle};
use crate::systems::stl::Binary; use crate::systems::stl::Binary;
use crate::Literal; use crate::Literal;
/// Any type that we can read controlled amounts of data from
pub type Source = BufReader<Box<dyn Read + Send>>; pub type Source = BufReader<Box<dyn Read + Send>>;
/// Any type that we can write data to
pub type Sink = Box<dyn Write + Send>; pub type Sink = Box<dyn Write + Send>;
pub type SourceHandle = SharedHandle<Source>;
pub type SinkHandle = SharedHandle<Sink>;
/// String reading command /// String reading command
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum SRead { pub enum SRead {
@@ -39,7 +38,7 @@ pub enum ReadCmd {
impl IOCmd for ReadCmd { impl IOCmd for ReadCmd {
type Stream = Source; type Stream = Source;
type Result = ReadResult; type Result = ReadResult;
type Handle = SourceHandle; type Handle = SharedHandle<Source>;
// This is a buggy rule, check manually // This is a buggy rule, check manually
#[allow(clippy::read_zero_byte_vec)] #[allow(clippy::read_zero_byte_vec)]
@@ -82,22 +81,21 @@ impl ReadResult {
pub fn dispatch(self, succ: ExprInst, fail: ExprInst) -> Vec<ExprInst> { pub fn dispatch(self, succ: ExprInst, fail: ExprInst) -> Vec<ExprInst> {
match self { match self {
ReadResult::RBin(_, Err(e)) | ReadResult::RStr(_, Err(e)) => { ReadResult::RBin(_, Err(e)) | ReadResult::RStr(_, Err(e)) => {
vec![call(fail, vec![wrap_io_error(e)]).wrap()] vec![call(fail, [wrap_io_error(e)]).wrap()]
}, },
ReadResult::RBin(_, Ok(bytes)) => { ReadResult::RBin(_, Ok(bytes)) => {
let arg = Binary(Arc::new(bytes)).atom_cls().wrap(); let arg = Binary(Arc::new(bytes)).atom_cls().wrap();
vec![call(succ, vec![arg]).wrap()] vec![call(succ, [arg]).wrap()]
}, },
ReadResult::RStr(_, Ok(text)) => { ReadResult::RStr(_, Ok(text)) => {
vec![call(succ, vec![Literal::Str(text.into()).into()]).wrap()] vec![call(succ, [Literal::Str(text.into()).into()]).wrap()]
}, },
} }
} }
} }
/// Placeholder function for an eventual conversion from [io::Error] to Orchid /// Function to convert [io::Error] to Orchid data
/// data pub fn wrap_io_error(_e: io::Error) -> ExprInst { Literal::Uint(0u64).into() }
fn wrap_io_error(_e: io::Error) -> ExprInst { Literal::Uint(0u64).into() }
/// Writing command (string or binary) /// Writing command (string or binary)
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -109,7 +107,7 @@ pub enum WriteCmd {
impl IOCmd for WriteCmd { impl IOCmd for WriteCmd {
type Stream = Sink; type Stream = Sink;
type Handle = SinkHandle; type Handle = SharedHandle<Sink>;
type Result = WriteResult; type Result = WriteResult;
fn execute( fn execute(

View File

@@ -9,3 +9,4 @@ mod service;
// pub use facade::{io_system, IOStream, IOSystem}; // pub use facade::{io_system, IOStream, IOSystem};
pub use service::{Service, Stream, StreamTable}; pub use service::{Service, Stream, StreamTable};
pub use instances::{wrap_io_error, Source, Sink};

View File

@@ -1,6 +1,7 @@
#[allow(unused)] // for doc #[allow(unused)] // for doc
use std::io::{BufReader, Read, Write}; use std::io::{BufReader, Read, Write};
use itertools::Itertools;
use rust_embed::RustEmbed; use rust_embed::RustEmbed;
use trait_set::trait_set; use trait_set::trait_set;
@@ -69,8 +70,8 @@ impl<'a, ST: IntoIterator<Item = (&'a str, Stream)>> IntoSystem<'static>
|stream| (stream, Vec::new()), |stream| (stream, Vec::new()),
); );
match result { match result {
Ok(cancel) => Ok(call(tail, vec![init_cps(1, cancel).wrap()]).wrap()), Ok(cancel) => Ok(call(tail, [init_cps(1, cancel).wrap()]).wrap()),
Err(e) => Ok(call(fail, vec![e.atom_exi()]).wrap()), Err(e) => Ok(call(fail, [e.atom_exi()]).wrap()),
} }
}); });
let scheduler = self.scheduler.clone(); let scheduler = self.scheduler.clone();
@@ -87,8 +88,8 @@ impl<'a, ST: IntoIterator<Item = (&'a str, Stream)>> IntoSystem<'static>
|stream| (stream, Vec::new()), |stream| (stream, Vec::new()),
); );
match result { match result {
Ok(cancel) => Ok(call(tail, vec![init_cps(1, cancel).wrap()]).wrap()), Ok(cancel) => Ok(call(tail, [init_cps(1, cancel).wrap()]).wrap()),
Err(e) => Ok(call(fail, vec![e.atom_exi()]).wrap()), Err(e) => Ok(call(fail, [e.atom_exi()]).wrap()),
} }
}); });
let streams = self.global_streams.into_iter().map(|(n, stream)| { let streams = self.global_streams.into_iter().map(|(n, stream)| {
@@ -101,7 +102,7 @@ impl<'a, ST: IntoIterator<Item = (&'a str, Stream)>> IntoSystem<'static>
}); });
System { System {
handlers, handlers,
name: vec!["system".to_string(), "io".to_string()], name: ["system", "io"].into_iter().map_into().collect(),
constants: io_bindings(i, streams).unwrap_tree(), constants: io_bindings(i, streams).unwrap_tree(),
code: embed_to_map::<IOEmbed>(".orc", i), code: embed_to_map::<IOEmbed>(".orc", i),
prelude: vec![FileEntry { prelude: vec![FileEntry {

View File

@@ -3,7 +3,7 @@ mod assertion_error;
pub mod asynch; pub mod asynch;
pub mod cast_exprinst; pub mod cast_exprinst;
pub mod codegen; pub mod codegen;
// mod directfs; pub mod directfs;
pub mod io; pub mod io;
mod runtime_error; mod runtime_error;
pub mod scheduler; pub mod scheduler;

View File

@@ -1,14 +1,9 @@
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc; use std::sync::Arc;
use crate::foreign::InertAtomic;
/// A single-fire thread-safe boolean flag with relaxed ordering /// A single-fire thread-safe boolean flag with relaxed ordering
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Canceller(Arc<AtomicBool>); pub struct Canceller(Arc<AtomicBool>);
impl InertAtomic for Canceller {
fn type_str() -> &'static str { "a canceller" }
}
impl Canceller { impl Canceller {
/// Create a new canceller /// Create a new canceller

View File

@@ -223,6 +223,26 @@ impl SeqScheduler {
}) })
} }
/// Run an operation asynchronously and then process its result in thread,
/// without queuing on any particular data.
pub fn run_orphan<T: Send + 'static>(
&self,
operation: impl FnOnce(Canceller) -> T + Send + 'static,
handler: impl FnOnce(T, Canceller) -> Vec<ExprInst> + 'static,
) -> Canceller {
let cancelled = Canceller::new();
let canc1 = cancelled.clone();
let opid = self.0.pending.borrow_mut().insert(Box::new(|data, _| {
handler(*data.downcast().expect("This is associated by ID"), canc1)
}));
let canc1 = cancelled.clone();
let mut port = self.0.port.clone();
self.0.pool.submit(Box::new(move || {
port.send(SyncReply { opid, data: Box::new(operation(canc1)) });
}));
cancelled
}
/// Schedule a function that will consume the value. After this the handle is /// Schedule a function that will consume the value. After this the handle is
/// considered sealed and all [SeqScheduler::schedule] calls will fail. /// considered sealed and all [SeqScheduler::schedule] calls will fail.
pub fn seal<T>( pub fn seal<T>(

View File

@@ -125,7 +125,7 @@ expr=x in
)? )?
} }
let (asl, bsl) = bin.0.split_at(i as usize); let (asl, bsl) = bin.0.split_at(i as usize);
Ok(tuple(vec![ Ok(tuple([
Binary(Arc::new(asl.to_vec())).atom_cls().into(), Binary(Arc::new(asl.to_vec())).atom_cls().into(),
Binary(Arc::new(bsl.to_vec())).atom_cls().into(), Binary(Arc::new(bsl.to_vec())).atom_cls().into(),
])) ]))

View File

@@ -1,13 +1,17 @@
import super::(option, fn::*, proc::*, loop::*, bool::*, known::*, num::*) import super::(option, fn::*, proc::*, loop::*, bool::*, known::*, num::*, tuple::*)
const pair := \a.\b. \f. f a b const pair := \a.\b. \f. f a b
-- Constructors -- Constructors
export const cons := \hd.\tl. option::some (pair hd tl) export const cons := \hd.\tl. option::some t[hd, tl]
export const end := option::none export const end := option::none
export const pop := \list.\default.\f.list default \cons.cons f export const pop := \list.\default.\f. do{
cps tuple = list default;
cps head, tail = tuple;
f head tail
}
-- Operators -- Operators
@@ -100,6 +104,25 @@ export const get := \list.\n. (
} }
) )
--[
Map every element to a pair of the index and the original element
]--
export const enumerate := \list. (
recursive r (list, n = 0)
pop list end \head.\tail.
cons t[n, head] $ r tail $ n + 1
)
--[
Turn a list of CPS commands into a sequence. This is achieved by calling every
element on the return value of the next element with the tail passed to it.
The continuation is passed to the very last argument.
]--
export const chain := \list.\cont. loop_over (list) {
cps head, list = pop list cont;
cps head;
}
macro new[...$item, ...$rest:1] =0x2p84=> (cons (...$item) new[...$rest]) macro new[...$item, ...$rest:1] =0x2p84=> (cons (...$item) new[...$rest])
macro new[...$end] =0x1p84=> (cons (...$end) end) macro new[...$end] =0x1p84=> (cons (...$end) end)
macro new[] =0x1p84=> end macro new[] =0x1p84=> end

View File

@@ -1,7 +1,7 @@
export operators[ + - * % / ] export operators[ + - * % / ]
macro ...$a + ...$b =0x2p36=> (add (...$a) (...$b)) macro ...$a + ...$b =0x2p36=> (add (...$a) (...$b))
macro ...$a - ...$b:1 =0x2p36=> (subtract (...$a) (...$b)) macro ...$a:1 - ...$b =0x2p36=> (subtract (...$a) (...$b))
macro ...$a * ...$b =0x1p36=> (multiply (...$a) (...$b)) macro ...$a * ...$b =0x1p36=> (multiply (...$a) (...$b))
macro ...$a % ...$b:1 =0x1p36=> (remainder (...$a) (...$b)) macro ...$a:1 % ...$b =0x1p36=> (remainder (...$a) (...$b))
macro ...$a / ...$b:1 =0x1p36=> (divide (...$a) (...$b)) macro ...$a:1 / ...$b =0x1p36=> (divide (...$a) (...$b))

View File

@@ -6,10 +6,13 @@ import std::bool::*
export ::([==], if, then, else, true, false) export ::([==], if, then, else, true, false)
import std::fn::* import std::fn::*
export ::([$ |> =>], identity, pass, pass2, return) export ::([$ |> =>], identity, pass, pass2, return)
import std::tuple::*
export ::(t)
import std::tuple
import std::list import std::list
import std::map import std::map
import std::option import std::option
export ::(list, map, option) export ::(tuple, list, map, option)
import std::loop::* import std::loop::*
export ::(loop_over, recursive) export ::(loop_over, recursive)

View File

@@ -65,7 +65,7 @@ expr=x in
let mut graphs = s.as_str().graphemes(true); let mut graphs = s.as_str().graphemes(true);
let a = graphs.by_ref().take(i as usize).collect::<String>(); let a = graphs.by_ref().take(i as usize).collect::<String>();
let b = graphs.collect::<String>(); let b = graphs.collect::<String>();
Ok(tuple(vec![a.into(), b.into()])) Ok(tuple([a.into(), b.into()]))
} }
} }

16
src/systems/stl/tuple.orc Normal file
View File

@@ -0,0 +1,16 @@
import super::(known::*, bool::*, num::*)
const discard_args := \n.\value. (
if n == 0 then value
else \_. discard_args (n - 1) value
)
export const pick := \tuple. \i.\n. tuple (
discard_args i \val. discard_args (n - 1 - i) val
)
macro t[...$item, ...$rest:1] =0x2p84=> (\f. t[...$rest] (f (...$item)))
macro t[...$end] =0x1p84=> (\f. f (...$end))
macro t[] =0x1p84=> \f.f
export ::(t)