From 7396078304dcff995d7b7731b88b4385603dbb48 Mon Sep 17 00:00:00 2001 From: Lawrence Bethlenfalvy Date: Sun, 17 Sep 2023 16:37:39 +0100 Subject: [PATCH] Added directfs Added a very rudimentary file I/O system suitable for experimenting with the language further. A better one will be designed when we have sensible error management. --- examples/file-browser/main.orc | 31 +++ src/bin/orcx.rs | 14 +- src/error/mod.rs | 1 - src/error/not_found.rs | 1 + src/error/project_error.rs | 5 + src/facade/environment.rs | 2 + src/facade/process.rs | 7 +- src/facade/system.rs | 1 + src/foreign/atom.rs | 8 + src/foreign/cps_box.rs | 21 +- src/foreign/extern_fn.rs | 3 + src/foreign/inert.rs | 1 + src/interner/mod.rs | 2 - src/interner/monotype.rs | 3 + src/interner/multitype.rs | 28 +-- src/interner/token.rs | 3 + src/interner/traits.rs | 69 ------ src/interpreter/apply.rs | 1 + src/interpreter/handler.rs | 2 + src/parse/comment.rs | 1 + src/parse/context.rs | 3 + src/parse/lexer.rs | 34 +-- src/parse/multiname.rs | 4 + src/parse/name.rs | 2 + src/parse/number.rs | 9 + src/parse/operators.rs | 1 + src/parse/placeholder.rs | 1 + src/parse/sourcefile.rs | 1 + src/parse/stream.rs | 8 +- src/parse/string.rs | 2 + src/pipeline/dealias/alias_cache.rs | 59 ----- src/pipeline/dealias/mod.rs | 1 - src/pipeline/dealias/resolve_aliases.rs | 3 +- src/pipeline/dealias/walk_with_links.rs | 1 + src/pipeline/file_loader.rs | 3 + src/pipeline/import_resolution/alias_map.rs | 55 ----- .../import_resolution/apply_aliases.rs | 86 -------- .../import_resolution/collect_aliases.rs | 47 ---- src/pipeline/import_resolution/decls.rs | 8 - src/pipeline/import_resolution/mod.rs | 8 - .../import_resolution/resolve_imports.rs | 22 -- src/pipeline/mod.rs | 1 - src/pipeline/project_tree/build_tree.rs | 1 + src/representations/ast.rs | 14 ++ src/representations/ast_to_postmacro.rs | 3 + src/representations/const_tree.rs | 8 + src/representations/interpreted.rs | 7 + src/representations/location.rs | 6 + src/representations/namelike.rs | 1 + .../postmacro_to_interpreted.rs | 1 + src/representations/project.rs | 4 + src/representations/sourcefile.rs | 2 + src/representations/string.rs | 12 +- src/representations/tree.rs | 5 +- src/representations/typed.rs | 201 ----------------- src/rule/matcher.rs | 2 + src/rule/matcher_vectree/any_match.rs | 1 + src/rule/matcher_vectree/build.rs | 6 + src/rule/matcher_vectree/scal_match.rs | 2 + src/rule/matcher_vectree/vec_match.rs | 1 + src/rule/prepare_rule.rs | 1 + src/rule/repository.rs | 6 +- src/rule/rule_error.rs | 5 + src/rule/state.rs | 2 + src/rule/update_first_seq.rs | 3 + src/rule/vec_attrs.rs | 1 + src/systems/asynch/async.orc | 5 + src/systems/asynch/system.rs | 12 +- src/systems/codegen.rs | 32 ++- src/systems/directfs/commands.rs | 206 +++++++++++++++++- src/systems/directfs/mod.rs | 5 +- src/systems/io/bindings.rs | 47 ++-- src/systems/io/instances.rs | 20 +- src/systems/io/mod.rs | 1 + src/systems/io/service.rs | 11 +- src/systems/mod.rs | 2 +- src/systems/scheduler/canceller.rs | 5 - src/systems/scheduler/system.rs | 20 ++ src/systems/stl/bin.rs | 2 +- src/systems/stl/list.orc | 29 ++- src/systems/stl/num.orc | 6 +- src/systems/stl/prelude.orc | 5 +- src/systems/stl/str.rs | 2 +- src/systems/stl/tuple.orc | 16 ++ 84 files changed, 563 insertions(+), 721 deletions(-) create mode 100644 examples/file-browser/main.orc delete mode 100644 src/interner/traits.rs delete mode 100644 src/pipeline/dealias/alias_cache.rs delete mode 100644 src/pipeline/import_resolution/alias_map.rs delete mode 100644 src/pipeline/import_resolution/apply_aliases.rs delete mode 100644 src/pipeline/import_resolution/collect_aliases.rs delete mode 100644 src/pipeline/import_resolution/decls.rs delete mode 100644 src/pipeline/import_resolution/mod.rs delete mode 100644 src/pipeline/import_resolution/resolve_imports.rs delete mode 100644 src/representations/typed.rs create mode 100644 src/systems/asynch/async.orc create mode 100644 src/systems/stl/tuple.orc diff --git a/examples/file-browser/main.orc b/examples/file-browser/main.orc new file mode 100644 index 0000000..b1992f3 --- /dev/null +++ b/examples/file-browser/main.orc @@ -0,0 +1,31 @@ +import system::(io, directfs, async) +import std::proc::* +import std::(to_string, to_uint) + +const folder_view := \path.\next. do{ + cps println $ "Contents of " ++ path; + cps entries = async::block_on $ directfs::readdir path; + cps list::enumerate entries + |> list::map (pass \id. pass \name.\is_dir. ( + println $ to_string id ++ ": " ++ name ++ if is_dir then "/" else "" + )) + |> list::chain; + cps print "select an entry, or .. to move up: "; + cps choice = readln; + let output = if choice == "..\n" + then directfs::pop_path path + |> option::unwrap + |> tuple::pick 0 2 + else ( + to_uint choice + |> (list::get entries) + |> option::unwrap + |> (directfs::join_paths path) + ); + next output +} + +const main := loop_over (path = "/home/lbfalvy/Code/orchid/examples") { + cps path = folder_view path; +} + diff --git a/src/bin/orcx.rs b/src/bin/orcx.rs index b7a25b4..f7b36b0 100644 --- a/src/bin/orcx.rs +++ b/src/bin/orcx.rs @@ -10,7 +10,7 @@ use itertools::Itertools; use orchidlang::facade::{Environment, PreMacro}; use orchidlang::systems::asynch::AsynchSystem; use orchidlang::systems::stl::StlConfig; -use orchidlang::systems::{io, scheduler}; +use orchidlang::systems::{directfs, io, scheduler}; use orchidlang::{ast, interpreted, interpreter, Interner, Sym, VName}; use crate::cli::cmd_prompt; @@ -64,6 +64,7 @@ impl Args { pub fn chk_proj(&self) -> Result<(), String> { self.chk_dir_main() } } +#[must_use] pub fn to_vname(data: &str, i: &Interner) -> VName { data.split("::").map(|s| i.i(s)).collect::>() } @@ -135,16 +136,17 @@ pub fn main() { let main = to_vname(&args.main, &i); let mut asynch = AsynchSystem::new(); let scheduler = scheduler::SeqScheduler::new(&mut asynch); - let io = io::Service::new(scheduler.clone(), [ + let std_streams = [ ("stdin", io::Stream::Source(BufReader::new(Box::new(std::io::stdin())))), ("stdout", io::Stream::Sink(Box::new(std::io::stdout()))), - ("stderr", io::Stream::Sink(Box::new(std::io::stderr()))), - ]); + // ("stderr", io::Stream::Sink(Box::new(std::io::stderr()))), + ]; let env = Environment::new(&i) .add_system(StlConfig { impure: true }) .add_system(asynch) - .add_system(scheduler) - .add_system(io); + .add_system(scheduler.clone()) + .add_system(io::Service::new(scheduler.clone(), std_streams)) + .add_system(directfs::DirectFS::new(scheduler)); let premacro = env.load_dir(&dir, &main).unwrap(); if args.dump_repo { println!("Parsed rules: {}", premacro.repo); diff --git a/src/error/mod.rs b/src/error/mod.rs index c31c5c5..b02e415 100644 --- a/src/error/mod.rs +++ b/src/error/mod.rs @@ -2,7 +2,6 @@ mod import_all; mod no_targets; mod not_exported; -// mod not_found; mod conflicting_roles; mod parse_error_with_tokens; mod project_error; diff --git a/src/error/not_found.rs b/src/error/not_found.rs index d335959..26eabf6 100644 --- a/src/error/not_found.rs +++ b/src/error/not_found.rs @@ -28,6 +28,7 @@ impl NotFound { /// /// Basically, if `e` was not produced by the `walk*` methods called on /// `path`. + #[must_use] pub fn from_walk_error( source: &[Tok], prefix: &[Tok], diff --git a/src/error/project_error.rs b/src/error/project_error.rs index 4c9de44..8ec041d 100644 --- a/src/error/project_error.rs +++ b/src/error/project_error.rs @@ -18,19 +18,24 @@ pub struct ErrorPosition { /// code changes pub trait ProjectError { /// A general description of this type of error + #[must_use] fn description(&self) -> &str; /// A formatted message that includes specific parameters + #[must_use] fn message(&self) -> String { self.description().to_string() } /// Code positions relevant to this error. If you don't implement this, you /// must implement [ProjectError::one_position] + #[must_use] fn positions(&self) -> BoxedIter { box_once(ErrorPosition { location: self.one_position(), message: None }) } /// Short way to provide a single location. If you don't implement this, you /// must implement [ProjectError::positions] + #[must_use] fn one_position(&self) -> Location { unimplemented!() } /// Convert the error into an `Rc` to be able to /// handle various errors together + #[must_use] fn rc(self) -> Rc where Self: Sized + 'static, diff --git a/src/facade/environment.rs b/src/facade/environment.rs index 3a71f0b..b7665c5 100644 --- a/src/facade/environment.rs +++ b/src/facade/environment.rs @@ -23,9 +23,11 @@ pub struct Environment<'a> { } impl<'a> Environment<'a> { /// Initialize a new environment + #[must_use] pub fn new(i: &'a Interner) -> Self { Self { i, systems: Vec::new() } } /// Register a new system in the environment + #[must_use] pub fn add_system<'b: 'a>(mut self, is: impl IntoSystem<'b> + 'b) -> Self { self.systems.push(Box::new(is).into_system(self.i)); self diff --git a/src/facade/process.rs b/src/facade/process.rs index dd1cfbd..5a0644a 100644 --- a/src/facade/process.rs +++ b/src/facade/process.rs @@ -33,6 +33,7 @@ impl<'a> Process<'a> { /// Find all unbound constant names in a symbol. This is often useful to /// identify dynamic loading targets. + #[must_use] pub fn unbound_refs(&self, key: Sym) -> Vec<(Sym, Location)> { let mut errors = Vec::new(); let sym = self.symbols.get(&key).expect("symbol must exist"); @@ -48,9 +49,9 @@ impl<'a> Process<'a> { errors } - /// Assert that, unless [interpreted::Clause::Constant]s are created - /// procedurally, a [interpreter::RuntimeError::MissingSymbol] cannot be - /// produced + /// Assert that the code contains no invalid constants. This ensures that, + /// unless [interpreted::Clause::Constant]s are created procedurally, + /// a [interpreter::RuntimeError::MissingSymbol] cannot be produced pub fn validate_refs(&self) -> ProjectResult<()> { for key in self.symbols.keys() { if let Some((symbol, location)) = self.unbound_refs(key.clone()).pop() { diff --git a/src/facade/system.rs b/src/facade/system.rs index cdc7b10..d50f96c 100644 --- a/src/facade/system.rs +++ b/src/facade/system.rs @@ -27,6 +27,7 @@ pub struct System<'a> { impl<'a> System<'a> { /// Intern the name of the system so that it can be used as an Orchid /// namespace + #[must_use] pub fn vname(&self, i: &Interner) -> VName { self.name.iter().map(|s| i.i(s)).collect::>() } diff --git a/src/foreign/atom.rs b/src/foreign/atom.rs index 9f66dcb..aa00f40 100644 --- a/src/foreign/atom.rs +++ b/src/foreign/atom.rs @@ -37,8 +37,10 @@ where /// ```ignore /// fn as_any(self: Box) -> Box { self } /// ``` + #[must_use] fn as_any(self: Box) -> Box; /// See [Atomic::as_any], exactly the same but for references + #[must_use] fn as_any_ref(&self) -> &dyn Any; /// Attempt to normalize this value. If it wraps a value, this should report @@ -47,6 +49,7 @@ where fn run(self: Box, ctx: Context) -> AtomicResult; /// Wrap the atom in a clause to be placed in an [AtomicResult]. + #[must_use] fn atom_cls(self) -> Clause where Self: Sized, @@ -55,6 +58,7 @@ where } /// Wrap the atom in a new expression instance to be placed in a tree + #[must_use] fn atom_exi(self) -> ExprInst where Self: Sized, @@ -73,10 +77,12 @@ where pub struct Atom(pub Box); impl Atom { /// Wrap an [Atomic] in a type-erased box + #[must_use] pub fn new(data: T) -> Self { Self(Box::new(data) as Box) } /// Get the contained data + #[must_use] pub fn data(&self) -> &dyn Atomic { self.0.as_ref() as &dyn Atomic } /// Attempt to downcast contained data to a specific type pub fn try_cast(self) -> Result { @@ -86,8 +92,10 @@ impl Atom { } } /// Test the type of the contained data without downcasting + #[must_use] pub fn is(&self) -> bool { self.data().as_any_ref().is::() } /// Downcast contained data, panic if it isn't the specified type + #[must_use] pub fn cast(self) -> T { *self.0.as_any().downcast().expect("Type mismatch on Atom::cast") } diff --git a/src/foreign/cps_box.rs b/src/foreign/cps_box.rs index 0ff4c6c..90e4f3d 100644 --- a/src/foreign/cps_box.rs +++ b/src/foreign/cps_box.rs @@ -25,6 +25,7 @@ struct CPSFn { pub payload: T, } impl CPSFn { + #[must_use] fn new(argc: usize, payload: T) -> Self { debug_assert!( argc > 0, @@ -55,37 +56,25 @@ pub struct CPSBox { pub continuations: Vec, } impl CPSBox { - /// Assert that the command was instantiated with the correct number of - /// possible continuations. This is decided by the native bindings, not user - /// code, therefore this error may be uncovered by usercode but can never be - /// produced at will. - pub fn assert_count(&self, expect: usize) { - let real = self.continuations.len(); - debug_assert!( - real == expect, - "Tried to read {expect} argument(s) but {real} were provided for {:?}", - self.payload - ) - } /// Unpack the wrapped command and the continuation + #[must_use] pub fn unpack1(self) -> (T, ExprInst) { - self.assert_count(1); let [cont]: [ExprInst; 1] = self.continuations.try_into().expect("size checked"); (self.payload, cont) } /// Unpack the wrapped command and 2 continuations (usually an async and a /// sync) + #[must_use] pub fn unpack2(self) -> (T, ExprInst, ExprInst) { - self.assert_count(2); let [c1, c2]: [ExprInst; 2] = self.continuations.try_into().expect("size checked"); (self.payload, c1, c2) } /// Unpack the wrapped command and 3 continuations (usually an async success, /// an async fail and a sync) + #[must_use] pub fn unpack3(self) -> (T, ExprInst, ExprInst, ExprInst) { - self.assert_count(3); let [c1, c2, c3]: [ExprInst; 3] = self.continuations.try_into().expect("size checked"); (self.payload, c1, c2, c3) @@ -97,6 +86,7 @@ impl InertAtomic for CPSBox { } /// Like [init_cps] but wrapped in a [ConstTree] for init-time usage +#[must_use] pub fn const_cps(argc: usize, payload: T) -> ConstTree { ConstTree::xfn(CPSFn::new(argc, payload)) } @@ -106,6 +96,7 @@ pub fn const_cps(argc: usize, payload: T) -> ConstTree { /// /// This function is meant to be used in an external function defined with /// [crate::define_fn]. For usage in a [ConstTree], see [mk_const] +#[must_use] pub fn init_cps(argc: usize, payload: T) -> Clause { CPSFn::new(argc, payload).xfn_cls() } diff --git a/src/foreign/extern_fn.rs b/src/foreign/extern_fn.rs index fd4f6a0..41972bd 100644 --- a/src/foreign/extern_fn.rs +++ b/src/foreign/extern_fn.rs @@ -16,6 +16,7 @@ pub type XfnResult = Result>; /// Errors produced by external code pub trait ExternError: Display { /// Convert into trait object + #[must_use] fn into_extern(self) -> Rc where Self: 'static + Sized, @@ -37,6 +38,7 @@ impl Error for dyn ExternError {} /// these are also external functions. pub trait ExternFn: DynClone { /// Display name of the function + #[must_use] fn name(&self) -> &str; /// Combine the function with an argument to produce a new clause fn apply(self: Box, arg: ExprInst, ctx: Context) -> XfnResult; @@ -45,6 +47,7 @@ pub trait ExternFn: DynClone { self.name().hash(&mut state) } /// Wrap this function in a clause to be placed in an [AtomicResult]. + #[must_use] fn xfn_cls(self) -> Clause where Self: Sized + 'static, diff --git a/src/foreign/inert.rs b/src/foreign/inert.rs index c425421..07e90e3 100644 --- a/src/foreign/inert.rs +++ b/src/foreign/inert.rs @@ -19,6 +19,7 @@ use crate::Primitive; /// provided in argument lists. pub trait InertAtomic: Debug + Clone + 'static { /// Typename to be shown in the error when a conversion from [ExprInst] fails + #[must_use] fn type_str() -> &'static str; /// Proxies to [Responder] so that you don't have to implmeent it manually if /// you need it, but behaves exactly as the default implementation. diff --git a/src/interner/mod.rs b/src/interner/mod.rs index 6e276e1..c04bbdf 100644 --- a/src/interner/mod.rs +++ b/src/interner/mod.rs @@ -5,9 +5,7 @@ mod monotype; mod multitype; mod token; -// mod traits; pub use monotype::TypedInterner; pub use multitype::Interner; pub use token::Tok; -// pub use traits::{DisplayBundle, InternedDisplay, InternedInto}; diff --git a/src/interner/monotype.rs b/src/interner/monotype.rs index 1524621..6ae83bd 100644 --- a/src/interner/monotype.rs +++ b/src/interner/monotype.rs @@ -15,11 +15,13 @@ pub struct TypedInterner { } impl TypedInterner { /// Create a fresh interner instance + #[must_use] pub fn new() -> Rc { Rc::new(Self { tokens: RefCell::new(HashMap::new()) }) } /// Intern an object, returning a token + #[must_use] pub fn i>( self: &Rc, q: &Q, @@ -42,6 +44,7 @@ impl TypedInterner { } /// Helper function to compute hashes outside a hashmap +#[must_use] fn compute_hash( hash_builder: &impl BuildHasher, key: &(impl Hash + ?Sized), diff --git a/src/interner/multitype.rs b/src/interner/multitype.rs index a02adaf..5771e8e 100644 --- a/src/interner/multitype.rs +++ b/src/interner/multitype.rs @@ -8,7 +8,6 @@ use hashbrown::HashMap; use super::monotype::TypedInterner; use super::token::Tok; -// use super::InternedDisplay; /// A collection of interners based on their type. Allows to intern any object /// that implements [ToOwned]. Objects of the same type are stored together in a @@ -18,9 +17,11 @@ pub struct Interner { } impl Interner { /// Create a new interner + #[must_use] pub fn new() -> Self { Self { interners: RefCell::new(HashMap::new()) } } /// Intern something + #[must_use] pub fn i(&self, q: &Q) -> Tok where Q::Owned: 'static + Eq + Hash + Clone + Borrow, @@ -31,32 +32,10 @@ impl Interner { } /// Fully resolve a list of interned things. + #[must_use] pub fn extern_all(s: &[Tok]) -> Vec { s.iter().map(|t| (**t).clone()).collect() } - - // /// A variant of `unwrap` using [InternedDisplay] to circumvent `unwrap`'s - // /// dependencyon [Debug]. For clarity, [expect] should be preferred. - // pub fn unwrap(&self, result: Result) -> T { - // result.unwrap_or_else(|e| { - // println!("Unwrapped Error: {}", e.bundle(self)); - // panic!("Unwrapped an error"); - // }) - // } - - // /// A variant of `expect` using [InternedDisplay] to circumvent `expect`'s - // /// depeendency on [Debug]. - // pub fn expect( - // &self, - // result: Result, - // msg: &str, - // ) -> T { - // result.unwrap_or_else(|e| { - // println!("Expectation failed: {msg}"); - // println!("Error: {}", e.bundle(self)); - // panic!("Expected an error"); - // }) - // } } impl Default for Interner { @@ -64,6 +43,7 @@ impl Default for Interner { } /// Get or create an interner for a given type. +#[must_use] fn get_interner( interners: &mut RefMut>>, ) -> Rc> { diff --git a/src/interner/token.rs b/src/interner/token.rs index b9529e3..b4b09bd 100644 --- a/src/interner/token.rs +++ b/src/interner/token.rs @@ -18,15 +18,18 @@ pub struct Tok { } impl Tok { /// Create a new token. Used exclusively by the interner + #[must_use] pub(crate) fn new(data: Rc, interner: Weak>) -> Self { Self { data, interner } } /// Take the ID number out of a token + #[must_use] pub fn id(&self) -> NonZeroUsize { ((self.data.as_ref() as *const T as usize).try_into()) .expect("Pointer can always be cast to nonzero") } /// Cast into usize + #[must_use] pub fn usize(&self) -> usize { self.id().into() } /// pub fn assert_comparable(&self, other: &Self) { diff --git a/src/interner/traits.rs b/src/interner/traits.rs deleted file mode 100644 index 9c4d01a..0000000 --- a/src/interner/traits.rs +++ /dev/null @@ -1,69 +0,0 @@ -use core::fmt::{self, Display, Formatter}; -use core::ops::Deref; -use std::rc::Rc; - -use crate::interner::Interner; - -/// A variant of [std::fmt::Display] for objects that contain interned -/// strings and therefore can only be stringified in the presence of a -/// string interner -/// -/// The functions defined here are suffixed to distinguish them from -/// the ones in Display and ToString respectively, because Rust can't -/// identify functions based on arity -pub trait InternedDisplay { - /// formats the value using the given formatter and string interner - fn fmt_i( - &self, - f: &mut std::fmt::Formatter<'_>, - i: &Interner, - ) -> std::fmt::Result; - - /// Converts the value to a string to be displayed - fn to_string_i(&self, i: &Interner) -> String { - self.bundle(i).to_string() - } - - /// Combine with an interner to implement [Display] - fn bundle<'a>(&'a self, interner: &'a Interner) -> DisplayBundle<'a, Self> { - DisplayBundle { interner, data: self } - } -} - -// Special loophole for Rc -impl InternedDisplay for Rc -where - T: InternedDisplay, -{ - fn fmt_i(&self, f: &mut Formatter<'_>, i: &Interner) -> fmt::Result { - self.deref().fmt_i(f, i) - } -} - -/// A reference to an [InternedDisplay] type and an [Interner] tied together -/// to implement [Display] -pub struct DisplayBundle<'a, T: InternedDisplay + ?Sized> { - interner: &'a Interner, - data: &'a T, -} - -impl<'a, T: InternedDisplay + ?Sized> Display for DisplayBundle<'a, T> { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - self.data.fmt_i(f, self.interner) - } -} - -/// Conversions that are possible in the presence of an interner -/// -/// Essentially, this allows to define abstractions over interned and -/// non-interned versions of a type and convert between them -pub trait InternedInto { - /// Execute the conversion - fn into_i(self, i: &Interner) -> U; -} - -impl, U> InternedInto for T { - fn into_i(self, _i: &Interner) -> U { - self.into() - } -} diff --git a/src/interpreter/apply.rs b/src/interpreter/apply.rs index 041cfae..340b442 100644 --- a/src/interpreter/apply.rs +++ b/src/interpreter/apply.rs @@ -48,6 +48,7 @@ fn map_at( /// Replace the [Clause::LambdaArg] placeholders at the ends of the [PathSet] /// with the value in the body. Note that a path may point to multiple /// placeholders. +#[must_use] fn substitute(paths: &PathSet, value: Clause, body: ExprInst) -> ExprInst { let PathSet { steps, next } = paths; unwrap_always(map_at(steps, body, &mut |checkpoint| -> Always { diff --git a/src/interpreter/handler.rs b/src/interpreter/handler.rs index 3ee238f..47e5b20 100644 --- a/src/interpreter/handler.rs +++ b/src/interpreter/handler.rs @@ -21,6 +21,7 @@ pub struct HandlerTable<'a> { } impl<'a> HandlerTable<'a> { /// Create a new [HandlerTable] + #[must_use] pub fn new() -> Self { Self { handlers: HashMap::new() } } /// Add a handler function to interpret a type of atom and decide what happens @@ -46,6 +47,7 @@ impl<'a> HandlerTable<'a> { } /// Combine two non-overlapping handler sets + #[must_use] pub fn combine(mut self, other: Self) -> Self { for (key, value) in other.handlers { let prev = self.handlers.insert(key, value); diff --git a/src/parse/comment.rs b/src/parse/comment.rs index 741b7af..b28d2e5 100644 --- a/src/parse/comment.rs +++ b/src/parse/comment.rs @@ -4,6 +4,7 @@ pub use chumsky::{self, Parser}; use super::decls::SimpleParser; /// Parses Lua-style comments +#[must_use] pub fn comment_parser() -> impl SimpleParser { choice(( just("--[").ignore_then(take_until(just("]--").ignored())), diff --git a/src/parse/context.rs b/src/parse/context.rs index b60ffa9..7e08c78 100644 --- a/src/parse/context.rs +++ b/src/parse/context.rs @@ -8,8 +8,11 @@ use crate::{Tok, VName}; /// Hiding type parameters in associated types allows for simpler /// parser definitions pub trait Context: Clone { + #[must_use] fn ops(&self) -> &[Tok]; + #[must_use] fn file(&self) -> Rc; + #[must_use] fn interner(&self) -> &Interner; } diff --git a/src/parse/lexer.rs b/src/parse/lexer.rs index 35707cf..ee1a5a6 100644 --- a/src/parse/lexer.rs +++ b/src/parse/lexer.rs @@ -25,10 +25,12 @@ pub struct Entry { } impl Entry { /// Checks if the lexeme is a comment or line break + #[must_use] pub fn is_filler(&self) -> bool { matches!(self.lexeme, Lexeme::Comment(_) | Lexeme::BR) } + #[must_use] pub fn is_keyword(&self) -> bool { matches!( self.lexeme, @@ -40,12 +42,15 @@ impl Entry { ) } + #[must_use] pub fn location(&self) -> Location { self.location.clone() } + #[must_use] pub fn range(&self) -> Range { self.location.range().expect("An Entry can only have a known location") } + #[must_use] pub fn file(&self) -> Rc { self.location.file().expect("An Entry can only have a range location") } @@ -57,30 +62,6 @@ impl Display for Entry { } } -// impl From for (Lexeme, Range) { -// fn from(ent: Entry) -> Self { -// (ent.lexeme.clone(), ent.range()) -// } -// } - -// impl Span for Entry { -// type Context = (Lexeme, Rc>); -// type Offset = usize; - -// fn context(&self) -> Self::Context { -// (self.lexeme.clone(), self.file()) -// } -// fn start(&self) -> Self::Offset { -// self.range().start() -// } -// fn end(&self) -> Self::Offset { -// self.range().end() -// } -// fn new((lexeme, file): Self::Context, range: Range) -> Self { -// Self { lexeme, location: Location::Range { file, range } } -// } -// } - impl AsRef for Entry { fn as_ref(&self) -> &Location { &self.location } } @@ -159,12 +140,14 @@ impl Display for Lexeme { } impl Lexeme { + #[must_use] pub fn rule(prio: impl Into) -> Self { Lexeme::Arrow( NotNan::new(prio.into()).expect("Rule priority cannot be NaN"), ) } + #[must_use] pub fn parser>( self, ) -> impl Parser + Clone { @@ -181,10 +164,12 @@ impl Display for LexedText { } } +#[must_use] fn paren_parser(lp: char, rp: char) -> impl SimpleParser { just(lp).to(Lexeme::LP(lp)).or(just(rp).to(Lexeme::RP(lp))) } +#[must_use] pub fn literal_parser<'a>( ctx: impl Context + 'a, ) -> impl SimpleParser + 'a { @@ -199,6 +184,7 @@ pub fn literal_parser<'a>( pub static BASE_OPS: &[&str] = &[",", ".", "..", "...", "*"]; +#[must_use] pub fn lexer<'a>( ctx: impl Context + 'a, source: Rc, diff --git a/src/parse/multiname.rs b/src/parse/multiname.rs index d3fa9cf..765506d 100644 --- a/src/parse/multiname.rs +++ b/src/parse/multiname.rs @@ -16,19 +16,23 @@ struct Subresult { location: Location, } impl Subresult { + #[must_use] fn new_glob(location: Location) -> Self { Self { glob: true, deque: VecDeque::new(), location } } + #[must_use] fn new_named(name: Tok, location: Location) -> Self { Self { location, glob: false, deque: VecDeque::from([name]) } } + #[must_use] fn push_front(mut self, name: Tok) -> Self { self.deque.push_front(name); self } + #[must_use] fn finalize(self) -> Import { let Self { mut deque, glob, location } = self; debug_assert!(glob || !deque.is_empty(), "The constructors forbid this"); diff --git a/src/parse/name.rs b/src/parse/name.rs index 66fc7ae..14ff57f 100644 --- a/src/parse/name.rs +++ b/src/parse/name.rs @@ -42,6 +42,7 @@ pub static NOT_NAME_CHAR: &[char] = &[ /// TODO: `.` could possibly be parsed as an operator in some contexts. /// This operator is very common in maths so it's worth a try. /// Investigate. +#[must_use] pub fn anyop_parser<'a>() -> impl SimpleParser + 'a { filter(move |c| { !NOT_NAME_CHAR.contains(c) @@ -57,6 +58,7 @@ pub fn anyop_parser<'a>() -> impl SimpleParser + 'a { /// Parse an operator or name. Failing both, parse everything up to /// the next whitespace or blacklisted character as a new operator. +#[must_use] pub fn name_parser<'a>( ops: &[impl AsRef + Clone], ) -> impl SimpleParser + 'a { diff --git a/src/parse/number.rs b/src/parse/number.rs index 89917fe..117a5dc 100644 --- a/src/parse/number.rs +++ b/src/parse/number.rs @@ -13,6 +13,7 @@ fn assert_not_digit(base: u32, c: char) { /// Parse an arbitrarily grouped sequence of digits starting with an underscore. /// /// TODO: this should use separated_by and parse the leading group too +#[must_use] fn separated_digits_parser(base: u32) -> impl SimpleParser { just('_') .ignore_then(text::digits(base)) @@ -23,6 +24,7 @@ fn separated_digits_parser(base: u32) -> impl SimpleParser { /// parse a grouped uint /// /// Not to be confused with [int_parser] which does a lot more +#[must_use] fn uint_parser(base: u32) -> impl SimpleParser { text::int(base).then(separated_digits_parser(base)).map( move |(s1, s2): (String, String)| { @@ -33,6 +35,7 @@ fn uint_parser(base: u32) -> impl SimpleParser { /// parse exponent notation, or return 0 as the default exponent. /// The exponent is always in decimal. +#[must_use] fn pow_parser() -> impl SimpleParser { choice(( just('p').ignore_then(text::int(10)).map(|s: String| s.parse().unwrap()), @@ -68,6 +71,7 @@ fn nat2f(base: u64) -> impl Fn((NotNan, i32)) -> NotNan { } /// parse an uint from exponential notation (panics if 'p' is a digit in base) +#[must_use] fn pow_uint_parser(base: u32) -> impl SimpleParser { assert_not_digit(base, 'p'); uint_parser(base).then(pow_parser()).map(nat2u(base.into())) @@ -76,6 +80,7 @@ fn pow_uint_parser(base: u32) -> impl SimpleParser { /// parse an uint from a base determined by its prefix or lack thereof /// /// Not to be confused with [uint_parser] which is a component of it. +#[must_use] pub fn int_parser() -> impl SimpleParser { choice(( just("0b").ignore_then(pow_uint_parser(2)), @@ -86,6 +91,7 @@ pub fn int_parser() -> impl SimpleParser { } /// parse a float from dot notation +#[must_use] fn dotted_parser(base: u32) -> impl SimpleParser> { uint_parser(base) .then( @@ -107,6 +113,7 @@ fn dotted_parser(base: u32) -> impl SimpleParser> { } /// parse a float from dotted and optionally also exponential notation +#[must_use] fn pow_float_parser(base: u32) -> impl SimpleParser> { assert_not_digit(base, 'p'); dotted_parser(base).then(pow_parser()).map(nat2f(base.into())) @@ -114,6 +121,7 @@ fn pow_float_parser(base: u32) -> impl SimpleParser> { /// parse a float with dotted and optionally exponential notation from a base /// determined by its prefix +#[must_use] pub fn float_parser() -> impl SimpleParser> { choice(( just("0b").ignore_then(pow_float_parser(2)), @@ -124,6 +132,7 @@ pub fn float_parser() -> impl SimpleParser> { .labelled("float") } +#[must_use] pub fn print_nat16(num: NotNan) -> String { let exp = num.log(16.0).floor(); let man = num / 16_f64.powf(exp); diff --git a/src/parse/operators.rs b/src/parse/operators.rs index 1a31754..a140b06 100644 --- a/src/parse/operators.rs +++ b/src/parse/operators.rs @@ -2,6 +2,7 @@ use chumsky::prelude::*; use super::decls::SimpleParser; +#[must_use] pub fn operators_parser( f: impl Fn(String) -> T, ) -> impl SimpleParser> { diff --git a/src/parse/placeholder.rs b/src/parse/placeholder.rs index c14da2f..947a995 100644 --- a/src/parse/placeholder.rs +++ b/src/parse/placeholder.rs @@ -6,6 +6,7 @@ use super::decls::SimpleParser; use super::number::int_parser; use crate::ast::{PHClass, Placeholder}; +#[must_use] pub fn placeholder_parser( ctx: impl Context, ) -> impl SimpleParser { diff --git a/src/parse/sourcefile.rs b/src/parse/sourcefile.rs index a6a9deb..d5a7cc3 100644 --- a/src/parse/sourcefile.rs +++ b/src/parse/sourcefile.rs @@ -287,6 +287,7 @@ fn vec_to_single( } } +#[must_use] pub fn expr_slice_location(v: &[impl AsRef]) -> Location { v.first() .map(|l| l.as_ref().clone().to(v.last().unwrap().as_ref().clone())) diff --git a/src/parse/stream.rs b/src/parse/stream.rs index 52781fd..13d0f4d 100644 --- a/src/parse/stream.rs +++ b/src/parse/stream.rs @@ -5,6 +5,7 @@ use crate::Location; /// Represents a slice which may or may not contain items, and a fallback entry /// used for error reporting whenever the errant stream is empty. +#[must_use = "streams represent segments of code that must be parsed"] #[derive(Clone, Copy)] pub struct Stream<'a> { pub fallback: &'a Entry, @@ -42,6 +43,7 @@ impl<'a> Stream<'a> { }) } + #[must_use] pub fn location(self) -> Location { self.data.first().map_or_else( || self.fallback.location(), @@ -98,12 +100,6 @@ impl<'a> Stream<'a> { } } -// impl<'a> From<(&'a Entry, &'a [Entry])> for Stream<'a> { -// fn from((fallback, data): (&'a Entry, &'a [Entry])) -> Self { -// Self::new(fallback, data) -// } -// } - pub fn skip_parenthesized<'a>( it: impl Iterator, ) -> impl Iterator { diff --git a/src/parse/string.rs b/src/parse/string.rs index 5230c88..371b22c 100644 --- a/src/parse/string.rs +++ b/src/parse/string.rs @@ -4,6 +4,7 @@ use chumsky::{self, Parser}; use super::decls::SimpleParser; /// Parses a text character that is not the specified delimiter +#[must_use] fn text_parser(delim: char) -> impl SimpleParser { // Copied directly from Chumsky's JSON example. let escape = just('\\').ignore_then( @@ -35,6 +36,7 @@ fn text_parser(delim: char) -> impl SimpleParser { } /// Parse a string between double quotes +#[must_use] pub fn str_parser() -> impl SimpleParser { just('"') .ignore_then( diff --git a/src/pipeline/dealias/alias_cache.rs b/src/pipeline/dealias/alias_cache.rs deleted file mode 100644 index 5835966..0000000 --- a/src/pipeline/dealias/alias_cache.rs +++ /dev/null @@ -1,59 +0,0 @@ -use std::slice; - -use chumsky::primitive::Container; -use hashbrown::HashMap; - -use crate::representations::project::{ProjectMod, ItemKind, ProjectEntry}; -use crate::tree::ModMember; -use crate::utils::{pushed, unwrap_or}; -use crate::{ProjectTree, VName, Tok, NameLike}; - -use super::walk_with_links::{walk_with_links, Target}; - -pub struct AliasCache { - data: HashMap>, Option>>>, -} -impl AliasCache { - pub fn new() -> Self { - Self { data: HashMap::new() } - } - - /// Finds the absolute nsname corresponding to the given name in the given - /// context, if it's imported. If the name is defined locally, returns None - /// to avoid allocating several vectors for every local variable. - pub fn resolv_name<'a>( - &'a mut self, - root: &ProjectMod, - location: &[Tok], - name: Tok - ) -> Option<&'a [Tok]> { - let full_path = pushed(location, name); - if let Some(result) = self.data.get(&full_path) { - return result.as_deref(); - } - let (ent, finalp) = walk_with_links(root, location.iter().cloned()) - .expect("This path should be valid"); - let m = unwrap_or!{ent => Target::Mod; panic!("Must be a module")}; - let result = m.extra.imports_from.get(&name).map(|next| { - self.resolv_name(root, &next, name).unwrap_or(&next) - }); - self.data.insert(full_path, result.map(|s| s.to_vec())); - return result; - } - - /// Find the absolute target of a - pub fn resolv_vec<'a>( - &'a mut self, - root: &ProjectMod, - modname: &[Tok], - vname: &[Tok], - ) -> Option<&'a [Tok]> { - let (name, ns) = vname.split_last().expect("name cannot be empty"); - if ns.is_empty() { - self.resolv_name(modname, name) - } else { - let origin = self.resolv_vec(modname, ns)?; - self.resolv_name(origin, name) - } - } -} diff --git a/src/pipeline/dealias/mod.rs b/src/pipeline/dealias/mod.rs index d8cef8e..b006b5f 100644 --- a/src/pipeline/dealias/mod.rs +++ b/src/pipeline/dealias/mod.rs @@ -1,4 +1,3 @@ -// mod alias_cache; mod resolve_aliases; mod walk_with_links; diff --git a/src/pipeline/dealias/resolve_aliases.rs b/src/pipeline/dealias/resolve_aliases.rs index 2b25568..f150741 100644 --- a/src/pipeline/dealias/resolve_aliases.rs +++ b/src/pipeline/dealias/resolve_aliases.rs @@ -11,6 +11,7 @@ use crate::tree::{ModEntry, ModMember, Module}; use crate::utils::pure_push::pushed; use crate::{Interner, ProjectTree, Tok, VName}; +#[must_use] fn resolve_aliases_rec( root: &ProjectMod, module: &ProjectMod, @@ -26,7 +27,6 @@ fn resolve_aliases_rec( let full_name = (module.extra.path.iter()).chain(n.iter()).cloned(); match walk_with_links(root, full_name, false) { Ok(rep) => Some(rep.abs_path), - // Ok(_) => None, Err(e) => { let leftovers = e.tail.collect::>(); if !leftovers.is_empty() { @@ -87,6 +87,7 @@ fn resolve_aliases_rec( } } +#[must_use] pub fn resolve_aliases( project: ProjectTree, updated: &impl Fn(&[Tok]) -> bool, diff --git a/src/pipeline/dealias/walk_with_links.rs b/src/pipeline/dealias/walk_with_links.rs index 08ba53a..6a56332 100644 --- a/src/pipeline/dealias/walk_with_links.rs +++ b/src/pipeline/dealias/walk_with_links.rs @@ -12,6 +12,7 @@ pub enum Target<'a, N: NameLike> { Leaf(&'a ProjectItem), } +#[must_use = "this is the sole product of this function"] pub struct WalkReport<'a, N: NameLike> { pub target: Target<'a, N>, pub abs_path: VName, diff --git a/src/pipeline/file_loader.rs b/src/pipeline/file_loader.rs index 39b41fd..8221b23 100644 --- a/src/pipeline/file_loader.rs +++ b/src/pipeline/file_loader.rs @@ -87,6 +87,7 @@ pub fn load_file(root: &Path, path: &[Tok]) -> IOResult { } /// Generates a cached file loader for a directory +#[must_use] pub fn mk_dir_cache(root: PathBuf) -> Cache<'static, VName, IOResult> { Cache::new(move |vname: VName, _this| load_file(&root, &vname)) } @@ -125,6 +126,7 @@ pub fn load_embed(path: &str, ext: &str) -> IOResult { } /// Generates a cached file loader for a [RustEmbed] +#[must_use] pub fn mk_embed_cache( ext: &str, ) -> Cache<'_, Vec, IOResult> { @@ -136,6 +138,7 @@ pub fn mk_embed_cache( /// Load all files from an embed and convert them into a map usable in a /// [System] +#[must_use] pub fn embed_to_map( suffix: &str, i: &Interner, diff --git a/src/pipeline/import_resolution/alias_map.rs b/src/pipeline/import_resolution/alias_map.rs deleted file mode 100644 index 1cdf559..0000000 --- a/src/pipeline/import_resolution/alias_map.rs +++ /dev/null @@ -1,55 +0,0 @@ -use std::hash::Hash; - -use hashbrown::{HashMap, HashSet}; - -use crate::{interner::Tok, VName}; - -#[derive(Clone, Debug, Default)] -pub struct AliasMap { - pub targets: HashMap, - pub aliases: HashMap>, -} -impl AliasMap { - pub fn new() -> Self { - Self::default() - } - - pub fn link(&mut self, alias: VName, target: VName) { - let prev = self.targets.insert(alias.clone(), target.clone()); - debug_assert!(prev.is_none(), "Alias already has a target"); - multimap_entry(&mut self.aliases, &target).insert(alias.clone()); - // Remove aliases of the alias - if let Some(alts) = self.aliases.remove(&alias) { - for alt in alts { - // Assert that this step has always been done in the past - debug_assert!( - self.aliases.get(&alt).map(HashSet::is_empty).unwrap_or(true), - "Alias set of alias not empty" - ); - let alt_target = self.targets.insert(alt.clone(), target.clone()); - debug_assert!( - alt_target.as_ref() == Some(&alias), - "Name not target of its own alias" - ); - multimap_entry(&mut self.aliases, &alias).insert(alt); - } - } - } - - pub fn resolve(&self, alias: &[Tok]) -> Option<&VName> { - self.targets.get(alias) - } -} - -/// find or create the set belonging to the given key in the given -/// map-to-set (aka. multimap) -fn multimap_entry<'a, K: Eq + Hash + Clone, V>( - map: &'a mut HashMap>, - key: &'_ K, -) -> &'a mut HashSet { - map - .raw_entry_mut() - .from_key(key) - .or_insert_with(|| (key.clone(), HashSet::new())) - .1 -} diff --git a/src/pipeline/import_resolution/apply_aliases.rs b/src/pipeline/import_resolution/apply_aliases.rs deleted file mode 100644 index 5a4f4e4..0000000 --- a/src/pipeline/import_resolution/apply_aliases.rs +++ /dev/null @@ -1,86 +0,0 @@ -use super::alias_map::AliasMap; -use super::decls::{InjectedAsFn, UpdatedFn}; -use crate::ast::{Expr, Rule}; -use crate::interner::Tok; -use crate::representations::project::{ItemKind, ProjectMod}; -use crate::representations::tree::ModMember; -use crate::representations::VName; -use crate::utils::Substack; - -fn resolve_rec( - namespace: &[Tok], - alias_map: &AliasMap, -) -> Option { - if let Some(alias) = alias_map.resolve(namespace) { - Some(alias.clone()) - } else if let Some((foot, body)) = namespace.split_last() { - let mut new_beginning = resolve_rec(body, alias_map)?; - new_beginning.push(foot.clone()); - Some(new_beginning) - } else { - None - } -} - -fn resolve( - namespace: &[Tok], - alias_map: &AliasMap, - injected_as: &impl InjectedAsFn, -) -> Option { - injected_as(namespace).or_else(|| { - let next_v = resolve_rec(namespace, alias_map)?; - Some(injected_as(&next_v).unwrap_or(next_v)) - }) -} - -fn process_expr( - expr: &Expr, - alias_map: &AliasMap, - injected_as: &impl InjectedAsFn, -) -> Expr { - expr - .map_names(&|n| resolve(n, alias_map, injected_as)) - .unwrap_or_else(|| expr.clone()) -} - -/// Replace all aliases with the name they're originally defined as -fn apply_aliases_rec( - path: Substack>, - module: &mut ProjectMod, - alias_map: &AliasMap, - injected_as: &impl InjectedAsFn, - updated: &impl UpdatedFn, -) { - for (name, entry) in module.entries.iter_mut() { - match &mut entry.member { - ModMember::Sub(sub) => { - let subpath = path.push(name.clone()); - apply_aliases_rec(subpath, sub, alias_map, injected_as, updated) - }, - ModMember::Item(it) => match &mut it.kind { - ItemKind::None => (), - ItemKind::Const(expr) => - *expr = process_expr(expr, alias_map, injected_as), - ItemKind::Alias(name) => - if let Some(alt) = alias_map.resolve(&name) { - *name = alt.clone() - }, - }, - _ => (), - } - } - for Rule { pattern, prio, template } in module.extra.rules.iter_mut() { - for expr in pattern.iter_mut().chain(template.iter_mut()) { - *expr = process_expr(expr, alias_map, injected_as) - } - } -} - -pub fn apply_aliases( - module: &mut ProjectMod, - alias_map: &AliasMap, - injected_as: &impl InjectedAsFn, - updated: &impl UpdatedFn, -) { - apply_aliases_rec(Substack::Bottom, module, alias_map, injected_as, updated) -} diff --git a/src/pipeline/import_resolution/collect_aliases.rs b/src/pipeline/import_resolution/collect_aliases.rs deleted file mode 100644 index b63fcc3..0000000 --- a/src/pipeline/import_resolution/collect_aliases.rs +++ /dev/null @@ -1,47 +0,0 @@ -use super::alias_map::AliasMap; -use super::decls::UpdatedFn; -use crate::error::ProjectResult; -use crate::interner::Tok; -use crate::representations::project::{ProjectMod, ProjectTree}; -use crate::representations::tree::ModMember; -use crate::representations::VName; -use crate::utils::{pushed, unwrap_or}; - -/// Populate target and alias maps from the module tree recursively -fn collect_aliases_rec( - path: Vec>, - module: &ProjectMod, - project: &ProjectTree, - alias_map: &mut AliasMap, - updated: &impl UpdatedFn, -) -> ProjectResult<()> { - // Assume injected module has been alias-resolved - if !updated(&path) { - return Ok(()); - }; - for (name, target_sym_v) in module.extra.imports_from.iter() { - let sym_path_v = pushed(&path, name.clone()); - alias_map.link(sym_path_v, target_sym_v.clone()); - } - for (name, entry) in module.entries.iter() { - let submodule = unwrap_or!(&entry.member => ModMember::Sub; continue); - collect_aliases_rec( - pushed(&path, name.clone()), - submodule, - project, - alias_map, - updated, - )? - } - Ok(()) -} - -/// Populate target and alias maps from the module tree -pub fn collect_aliases( - module: &ProjectMod, - project: &ProjectTree, - alias_map: &mut AliasMap, - updated: &impl UpdatedFn, -) -> ProjectResult<()> { - collect_aliases_rec(Vec::new(), module, project, alias_map, updated) -} diff --git a/src/pipeline/import_resolution/decls.rs b/src/pipeline/import_resolution/decls.rs deleted file mode 100644 index b52f206..0000000 --- a/src/pipeline/import_resolution/decls.rs +++ /dev/null @@ -1,8 +0,0 @@ -use trait_set::trait_set; - -use crate::{interner::Tok, VName}; - -trait_set! { - pub trait InjectedAsFn = Fn(&[Tok]) -> Option; - pub trait UpdatedFn = Fn(&[Tok]) -> bool; -} diff --git a/src/pipeline/import_resolution/mod.rs b/src/pipeline/import_resolution/mod.rs deleted file mode 100644 index 1904825..0000000 --- a/src/pipeline/import_resolution/mod.rs +++ /dev/null @@ -1,8 +0,0 @@ -mod alias_map; -mod apply_aliases; -mod collect_aliases; -mod decls; -mod resolve_imports; -mod alias_cache; - -pub use resolve_imports::resolve_imports; diff --git a/src/pipeline/import_resolution/resolve_imports.rs b/src/pipeline/import_resolution/resolve_imports.rs deleted file mode 100644 index ec09559..0000000 --- a/src/pipeline/import_resolution/resolve_imports.rs +++ /dev/null @@ -1,22 +0,0 @@ -use super::alias_cache::AliasCache; -use super::alias_map::AliasMap; -use super::apply_aliases::apply_aliases; -use super::collect_aliases::collect_aliases; -use super::decls::{InjectedAsFn, UpdatedFn}; -use crate::error::ProjectResult; -use crate::representations::project::ProjectTree; -use crate::representations::VName; - -/// Follow import chains to locate the original name of all tokens, then -/// replace these aliases with the original names throughout the tree -pub fn resolve_imports( - mut project: ProjectTree, - injected_as: &impl InjectedAsFn, - updated: &impl UpdatedFn, -) -> ProjectResult> { - let mut cache = AliasCache::new(&project); - // let mut map = AliasMap::new(); - // collect_aliases(&project.0, &project, &mut map, updated)?; - // apply_aliases(&mut project.0, &map, injected_as, updated); - Ok(project) -} diff --git a/src/pipeline/mod.rs b/src/pipeline/mod.rs index 60a5816..d74c6c4 100644 --- a/src/pipeline/mod.rs +++ b/src/pipeline/mod.rs @@ -1,7 +1,6 @@ //! Loading Orchid modules from source pub mod file_loader; mod import_abs_path; -// mod import_resolution; mod dealias; mod parse_layer; mod project_tree; diff --git a/src/pipeline/project_tree/build_tree.rs b/src/pipeline/project_tree/build_tree.rs index 516f7ec..96360fb 100644 --- a/src/pipeline/project_tree/build_tree.rs +++ b/src/pipeline/project_tree/build_tree.rs @@ -16,6 +16,7 @@ use crate::utils::get_or::get_or_default; use crate::utils::pure_push::pushed_ref; use crate::{Tok, VName}; +#[must_use = "A submodule may not be integrated into the tree"] pub struct TreeReport { pub entries: HashMap, ProjectEntry>, pub rules: Vec>, diff --git a/src/representations/ast.rs b/src/representations/ast.rs index f54a1bc..5d3a539 100644 --- a/src/representations/ast.rs +++ b/src/representations/ast.rs @@ -32,6 +32,7 @@ pub struct Expr { impl Expr { /// Process all names with the given mapper. /// Return a new object if anything was processed + #[must_use] pub fn map_names(&self, pred: &impl Fn(&N) -> Option) -> Option { Some(Self { value: self.value.map_names(pred)?, @@ -40,6 +41,7 @@ impl Expr { } /// Transform from one name system to another + #[must_use] pub fn transform_names(self, pred: &impl Fn(N) -> O) -> Expr { Expr { value: self.value.transform_names(pred), location: self.location } } @@ -71,6 +73,7 @@ pub fn search_all_slcs( impl Expr { /// Add the specified prefix to every Name + #[must_use] pub fn prefix( &self, prefix: &[Tok], @@ -143,6 +146,7 @@ pub enum Clause { impl Clause { /// Extract the expressions from an auto, lambda or S + #[must_use] pub fn body(&self) -> Option>>> { match self { Self::Lambda(_, body) | Self::S(_, body) => Some(body.clone()), @@ -151,6 +155,7 @@ impl Clause { } /// Convert with identical meaning + #[must_use] pub fn into_expr(self) -> Expr { if let Self::S('(', body) = &self { if body.len() == 1 { @@ -164,6 +169,7 @@ impl Clause { } /// Convert with identical meaning + #[must_use] pub fn from_exprs(exprs: &[Expr]) -> Option { if exprs.is_empty() { None @@ -173,7 +179,9 @@ impl Clause { Some(Self::S('(', Rc::new(exprs.to_vec()))) } } + /// Convert with identical meaning + #[must_use] pub fn from_exprv(exprv: &Rc>>) -> Option> { if exprv.len() < 2 { Self::from_exprs(exprv) @@ -185,6 +193,7 @@ impl Clause { /// Collect all names that appear in this expression. /// NOTICE: this isn't the total set of unbound names, it's mostly useful to /// make weak statements for optimization. + #[must_use] pub fn collect_names(&self) -> HashSet { if let Self::Name(n) = self { return HashSet::from([n.clone()]); @@ -202,6 +211,7 @@ impl Clause { /// Process all names with the given mapper. /// Return a new object if anything was processed + #[must_use] pub fn map_names(&self, pred: &impl Fn(&N) -> Option) -> Option { match self { Clause::P(_) | Clause::Placeh(_) => None, @@ -244,6 +254,7 @@ impl Clause { } /// Transform from one name representation to another + #[must_use] pub fn transform_names( self, pred: &impl Fn(N) -> O, @@ -292,6 +303,7 @@ impl Clause { impl Clause { /// Add the specified prefix to every Name + #[must_use] pub fn prefix( &self, prefix: &[Tok], @@ -348,6 +360,7 @@ pub struct Rule { impl Rule { /// Namespace all tokens in the rule + #[must_use] pub fn prefix( &self, prefix: &[Tok], @@ -364,6 +377,7 @@ impl Rule { /// Return a list of all names that don't contain a namespace separator `::`. /// These are exported when the rule is exported + #[must_use] pub fn collect_single_names(&self) -> VName { let mut names = Vec::new(); for e in self.pattern.iter() { diff --git a/src/representations/ast_to_postmacro.rs b/src/representations/ast_to_postmacro.rs index 7fe02cc..99fd2a4 100644 --- a/src/representations/ast_to_postmacro.rs +++ b/src/representations/ast_to_postmacro.rs @@ -26,6 +26,7 @@ pub struct Error { pub kind: ErrorKind, } impl Error { + #[must_use] pub fn new(kind: ErrorKind, location: &Location) -> Self { Self { location: location.clone(), kind } } @@ -64,6 +65,7 @@ struct Context<'a> { } impl<'a> Context<'a> { + #[must_use] fn w_name<'b>(&'b self, name: Sym) -> Context<'b> where 'a: 'b, @@ -71,6 +73,7 @@ impl<'a> Context<'a> { Context { names: self.names.push(name) } } + #[must_use] fn new() -> Context<'static> { Context { names: Substack::Bottom } } } diff --git a/src/representations/const_tree.rs b/src/representations/const_tree.rs index e86d6e1..1f3e61a 100644 --- a/src/representations/const_tree.rs +++ b/src/representations/const_tree.rs @@ -24,6 +24,7 @@ pub enum ConstTree { } impl ConstTree { /// Describe a [Primitive] + #[must_use] pub fn primitive(primitive: Primitive) -> Self { Self::Const(Expr { location: Location::Unknown, @@ -31,18 +32,22 @@ impl ConstTree { }) } /// Describe an [ExternFn] + #[must_use] pub fn xfn(xfn: impl ExternFn + 'static) -> Self { Self::primitive(Primitive::ExternFn(Box::new(xfn))) } /// Describe an [Atomic] + #[must_use] pub fn atom(atom: impl Atomic + 'static) -> Self { Self::primitive(Primitive::Atom(Atom(Box::new(atom)))) } /// Describe a module + #[must_use] pub fn tree(arr: impl IntoIterator, Self)>) -> Self { Self::Tree(arr.into_iter().collect()) } /// Namespace the tree with the list of names + #[must_use] pub fn namespace( pref: impl IntoIterator>, data: Self, @@ -59,6 +64,7 @@ impl ConstTree { /// # Panics /// /// If this is a leaf node aka. constant and not a namespace + #[must_use] pub fn unwrap_tree(self) -> HashMap, Self> { match self { Self::Tree(map) => map, @@ -87,6 +93,7 @@ impl Add for ConstTree { } } +#[must_use] fn from_const_tree_rec( path: Substack>, consts: HashMap, ConstTree>, @@ -119,6 +126,7 @@ fn from_const_tree_rec( /// Convert a map of [ConstTree] into a [ProjectTree] that can be used with the /// layered parsing system +#[must_use] pub fn from_const_tree( consts: HashMap, ConstTree>, file: &[Tok], diff --git a/src/representations/interpreted.rs b/src/representations/interpreted.rs index 283fc58..f4b8fc1 100644 --- a/src/representations/interpreted.rs +++ b/src/representations/interpreted.rs @@ -64,10 +64,12 @@ pub struct ExprInst(pub Rc>); impl ExprInst { /// Wrap an [Expr] in a shared container so that normalizatoin steps are /// applied to all references + #[must_use] pub fn new(expr: Expr) -> Self { Self(Rc::new(RefCell::new(expr))) } /// Take the [Expr] out of this container if it's the last reference to it, or /// clone it out. + #[must_use] pub fn expr_val(self) -> Expr { Rc::try_unwrap(self.0) .map(|c| c.into_inner()) @@ -79,6 +81,7 @@ impl ExprInst { /// # Panics /// /// if the expression is already borrowed in read-write mode + #[must_use] pub fn expr(&self) -> impl Deref + '_ { self.0.as_ref().borrow() } @@ -88,6 +91,7 @@ impl ExprInst { /// # Panics /// /// if the expression is already borrowed + #[must_use] pub fn expr_mut(&self) -> impl DerefMut + '_ { self.0.as_ref().borrow_mut() } @@ -124,6 +128,7 @@ impl ExprInst { /// Call a predicate on the expression, returning whatever the /// predicate returns. This is a convenience function for reaching /// through the RefCell. + #[must_use] pub fn inspect(&self, predicate: impl FnOnce(&Clause) -> T) -> T { predicate(&self.expr().clause) } @@ -178,10 +183,12 @@ impl ExprInst { } /// Get the code location data associated with this expresssion directly + #[must_use] pub fn location(&self) -> Location { self.expr().location.clone() } /// If this expression is an [Atomic], request an object of the given type. /// If it's not an atomic, fail the request automatically. + #[must_use = "your request might not have succeeded"] pub fn request(&self) -> Option { match &self.expr().clause { Clause::P(Primitive::Atom(a)) => request(&*a.0), diff --git a/src/representations/location.rs b/src/representations/location.rs index 285d845..c239b58 100644 --- a/src/representations/location.rs +++ b/src/representations/location.rs @@ -27,6 +27,7 @@ pub enum Location { impl Location { /// Range, if known. If the range is known, the file is always known + #[must_use] pub fn range(&self) -> Option> { if let Self::Range { range, .. } = self { Some(range.clone()) @@ -36,6 +37,7 @@ impl Location { } /// File, if known + #[must_use] pub fn file(&self) -> Option> { if let Self::File(file) | Self::Range { file, .. } = self { Some(file.clone()) @@ -45,6 +47,7 @@ impl Location { } /// Associated source code, if known + #[must_use] pub fn source(&self) -> Option> { if let Self::Range { source, .. } = self { Some(source.clone()) @@ -55,6 +58,7 @@ impl Location { /// If the two locations are ranges in the same file, connect them. /// Otherwise choose the more accurate, preferring lhs if equal. + #[must_use] pub fn to(self, other: Self) -> Self { match self { Location::Unknown => other, @@ -75,6 +79,7 @@ impl Location { /// Choose one of the two locations, preferring better accuracy, or lhs if /// equal + #[must_use] pub fn or(self, alt: Self) -> Self { match (&self, &alt) { (Self::Unknown, _) => alt, @@ -110,6 +115,7 @@ impl Debug for Location { } } +#[must_use] fn pos2lc(s: &str, i: usize) -> (usize, usize) { s.chars().take(i).fold((1, 1), |(line, col), char| { if char == '\n' { (line + 1, 1) } else { (line, col + 1) } diff --git a/src/representations/namelike.rs b/src/representations/namelike.rs index 8d28e28..b833ccd 100644 --- a/src/representations/namelike.rs +++ b/src/representations/namelike.rs @@ -21,6 +21,7 @@ pub type Sym = Tok; /// handled together in datastructures pub trait NameLike: 'static + Clone + Eq + Hash + Debug { /// Fully resolve the name for printing + #[must_use] fn to_strv(&self) -> Vec; } diff --git a/src/representations/postmacro_to_interpreted.rs b/src/representations/postmacro_to_interpreted.rs index 499a4ae..e76bd51 100644 --- a/src/representations/postmacro_to_interpreted.rs +++ b/src/representations/postmacro_to_interpreted.rs @@ -5,6 +5,7 @@ use super::path_set::PathSet; use super::{interpreted, postmacro}; use crate::utils::Side; +#[must_use] fn collect_paths_expr_rec( expr: &postmacro::Expr, depth: usize, diff --git a/src/representations/project.rs b/src/representations/project.rs index eb9d67e..b245c8a 100644 --- a/src/representations/project.rs +++ b/src/representations/project.rs @@ -137,6 +137,7 @@ fn collect_rules_rec( /// Collect the complete list of rules to be used by the rule repository from /// the [ProjectTree] +#[must_use] pub fn collect_rules(project: &ProjectTree) -> Vec> { let mut rules = Vec::new(); collect_rules_rec(&mut rules, &project.0); @@ -164,6 +165,7 @@ fn collect_consts_rec( } /// Extract the symbol table from a [ProjectTree] +#[must_use] pub fn collect_consts( project: &ProjectTree, i: &Interner, @@ -173,6 +175,7 @@ pub fn collect_consts( consts } +#[must_use] fn vname_to_sym_tree_rec( tree: ProjectMod, i: &Interner, @@ -218,6 +221,7 @@ fn vname_to_sym_tree_rec( /// Convert a flexible vname-based tree to a more rigid but faster symbol-based /// tree. The pipeline works with vnames, but the macro executor works with /// symbols. +#[must_use] pub fn vname_to_sym_tree( tree: ProjectTree, i: &Interner, diff --git a/src/representations/sourcefile.rs b/src/representations/sourcefile.rs index 49563c1..33954b6 100644 --- a/src/representations/sourcefile.rs +++ b/src/representations/sourcefile.rs @@ -34,6 +34,7 @@ impl Import { /// /// Returns the path if this is a glob import, or the path plus the /// name if this is a specific import + #[must_use] pub fn nonglob_path(&self) -> VName { let mut path_vec = self.path.clone(); if let Some(n) = &self.name { @@ -241,6 +242,7 @@ pub fn absolute_path( }) } +#[must_use = "this could be None which means that there are too many supers"] fn absolute_path_rec( abs_location: &[Tok], rel_path: &[Tok], diff --git a/src/representations/string.rs b/src/representations/string.rs index d723c91..77c0282 100644 --- a/src/representations/string.rs +++ b/src/representations/string.rs @@ -3,7 +3,8 @@ use std::hash::Hash; use std::ops::Deref; use std::rc::Rc; -use crate::Tok; +use crate::interpreted::{Clause, ExprInst}; +use crate::{Literal, Primitive, Tok}; /// An Orchid string which may or may not be interned #[derive(Clone, Eq)] @@ -25,6 +26,7 @@ impl Debug for OrcString { impl OrcString { /// Clone out the plain Rust [String] + #[must_use] pub fn get_string(self) -> String { match self { Self::Interned(s) => s.as_str().to_owned(), @@ -32,6 +34,14 @@ impl OrcString { Rc::try_unwrap(rc).unwrap_or_else(|rc| (*rc).clone()), } } + + /// Wrap in a [Clause] for returning from extern functions + pub fn cls(self) -> Clause { + Clause::P(Primitive::Literal(Literal::Str(self))) + } + + /// Wrap in an [ExprInst] for embedding in runtime-generated code + pub fn exi(self) -> ExprInst { self.cls().wrap() } } impl Deref for OrcString { diff --git a/src/representations/tree.rs b/src/representations/tree.rs index 72ca2dd..9eef4d4 100644 --- a/src/representations/tree.rs +++ b/src/representations/tree.rs @@ -33,6 +33,7 @@ pub struct ModEntry { } impl ModEntry { /// Returns the item in this entry if it contains one. + #[must_use] pub fn item(&self) -> Option<&TItem> { match &self.member { ModMember::Item(it) => Some(it), @@ -56,6 +57,7 @@ pub type ModPath<'a> = Substack<'a, Tok>; impl Module { /// If the argument is false, returns all child names. /// If the argument is true, returns all public child names. + #[must_use] pub fn keys(&self, public: bool) -> BoxedIter> { match public { false => Box::new(self.entries.keys().cloned()), @@ -96,7 +98,6 @@ impl Module { /// # Panics /// /// if path is empty, since the reference cannot be forwarded that way - #[allow(clippy::needless_arbitrary_self_type)] // duplicate pub fn walk1_ref<'a: 'b, 'b>( &'a self, prefix: &'b [Tok], @@ -223,9 +224,11 @@ pub struct WalkError<'a> { } impl<'a> WalkError<'a> { /// Total length of the path represented by this error + #[must_use] pub fn depth(&self) -> usize { self.prefix.len() + self.pos + 1 } /// Attach a location to the error and convert into trait object for reporting + #[must_use] pub fn at(self, location: &Location) -> Rc { // panic!("hello"); WalkErrorWithLocation { diff --git a/src/representations/typed.rs b/src/representations/typed.rs deleted file mode 100644 index 7a338a1..0000000 --- a/src/representations/typed.rs +++ /dev/null @@ -1,201 +0,0 @@ -use std::fmt::{Debug, Write}; -use std::rc::Rc; - -use mappable_rc::Mrc; - -use super::get_name::get_name; -use super::primitive::Primitive; -use super::{ast, ast_to_postmacro, get_name, Literal}; -use crate::executor::apply_lambda; -use crate::foreign::{Atom, ExternFn}; -use crate::utils::{one_mrc_slice, string_from_charset, to_mrc_slice}; - -/// Indicates whether either side needs to be wrapped. Syntax whose end is -/// ambiguous on that side must use parentheses, or forward the flag -#[derive(PartialEq, Eq, Clone, Copy)] -struct Wrap(bool, bool); - -#[derive(PartialEq, Eq, Hash, Clone)] -pub struct Expr(pub Clause, pub Vec); -impl Expr { - fn deep_fmt( - &self, - f: &mut std::fmt::Formatter<'_>, - depth: usize, - tr: Wrap, - ) -> std::fmt::Result { - let Expr(val, typ) = self; - if typ.len() > 0 { - val.deep_fmt(f, depth, Wrap(true, true))?; - for typterm in typ { - f.write_char(':')?; - typterm.deep_fmt(f, depth, Wrap(true, true))?; - } - } else { - val.deep_fmt(f, depth, tr)?; - } - Ok(()) - } -} - -impl Debug for Expr { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.deep_fmt(f, 0, Wrap(false, false)) - } -} - -#[derive(PartialEq, Eq, Hash)] -pub enum Clause { - P(Primitive), - Apply(Rc, Rc), - Lambda(Rc<[Clause]>, Rc), - Auto(Rc<[Clause]>, Rc), - LambdaArg(usize), - AutoArg(usize), -} - -const ARGNAME_CHARSET: &str = "abcdefghijklmnopqrstuvwxyz"; - -fn parametric_fmt( - f: &mut std::fmt::Formatter<'_>, - depth: usize, - prefix: &str, - argtyp: &[Clause], - body: &Expr, - wrap_right: bool, -) -> std::fmt::Result { - if wrap_right { - f.write_char('(')?; - } - f.write_str(prefix)?; - f.write_str(&string_from_charset(depth as u64, ARGNAME_CHARSET))?; - for typ in argtyp.iter() { - f.write_str(":")?; - typ.deep_fmt(f, depth, Wrap(false, false))?; - } - f.write_str(".")?; - body.deep_fmt(f, depth + 1, Wrap(false, false))?; - if wrap_right { - f.write_char(')')?; - } - Ok(()) -} - -impl Clause { - fn deep_fmt( - &self, - f: &mut std::fmt::Formatter<'_>, - depth: usize, - Wrap(wl, wr): Wrap, - ) -> std::fmt::Result { - match self { - Self::P(p) => write!(f, "{p:?}"), - Self::Lambda(argtyp, body) => - parametric_fmt(f, depth, "\\", argtyp, body, wr), - Self::Auto(argtyp, body) => - parametric_fmt(f, depth, "@", argtyp, body, wr), - Self::LambdaArg(skip) | Self::AutoArg(skip) => { - let lambda_depth = (depth - skip - 1).try_into().unwrap(); - f.write_str(&string_from_charset(lambda_depth, ARGNAME_CHARSET)) - }, - Self::Apply(func, x) => { - if wl { - f.write_char('(')?; - } - func.deep_fmt(f, depth, Wrap(false, true))?; - f.write_char(' ')?; - x.deep_fmt(f, depth, Wrap(true, wr && !wl))?; - if wl { - f.write_char(')')?; - } - Ok(()) - }, - } - } - pub fn wrap(self) -> Box { - Box::new(Expr(self, vec![])) - } - pub fn wrap_t(self, t: Clause) -> Box { - Box::new(Expr(self, vec![t])) - } -} - -impl Clone for Clause { - fn clone(&self) -> Self { - match self { - Clause::Auto(t, b) => { - let new_id = get_name(); - let new_body = - apply_lambda(*uid, Clause::AutoArg(new_id).wrap(), b.clone()); - Clause::Auto(new_id, t.clone(), new_body) - }, - Clause::Lambda(uid, t, b) => { - let new_id = get_name(); - let new_body = - apply_lambda(*uid, Clause::LambdaArg(new_id).wrap(), b.clone()); - Clause::Lambda(new_id, t.clone(), new_body) - }, - Clause::Literal(l) => Clause::Literal(l.clone()), - Clause::ExternFn(nc) => Clause::ExternFn(nc.clone()), - Clause::Atom(a) => Clause::Atom(a.clone()), - Clause::Apply(f, x) => Clause::Apply(Box::clone(&f), x.clone()), - Clause::LambdaArg(id) => Clause::LambdaArg(*id), - Clause::AutoArg(id) => Clause::AutoArg(*id), - } - } -} - -impl Debug for Clause { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.deep_fmt(f, Wrap(false, false)) - } -} - -impl TryFrom<&ast::Expr> for Expr { - type Error = ast_to_postmacro::Error; - fn try_from(value: &ast::Expr) -> Result { - ast_to_postmacro::expr(value) - } -} - -impl TryFrom<&ast::Clause> for Clause { - type Error = ast_to_postmacro::Error; - fn try_from(value: &ast::Clause) -> Result { - ast_to_postmacro::clause(value) - } -} - -pub fn is_used_clause(id: u64, is_auto: bool, clause: &Clause) -> bool { - match clause { - Clause::Atom(_) | Clause::ExternFn(_) | Clause::Literal(_) => false, - Clause::AutoArg(x) => is_auto && *x == id, - Clause::LambdaArg(x) => !is_auto && *x == id, - Clause::Apply(f, x) => - is_used_expr(id, is_auto, &f) || is_used_expr(id, is_auto, &x), - Clause::Auto(n, t, b) => { - assert!(*n != id, "Shadowing should have been eliminated"); - if is_auto && t.iter().any(|c| is_used_clause(id, is_auto, c)) { - return true; - }; - is_used_expr(id, is_auto, b) - }, - Clause::Lambda(n, t, b) => { - assert!(*n != id, "Shadowing should have been eliminated"); - if is_auto && t.iter().any(|c| is_used_clause(id, is_auto, c)) { - return true; - }; - is_used_expr(id, is_auto, b) - }, - } -} - -pub fn is_used_expr( - id: u64, - is_auto: bool, - Expr(val, typ): &Expr, -) -> bool { - if is_auto && typ.iter().any(|c| is_used_clause(id, is_auto, c)) { - return true; - }; - is_used_clause(id, is_auto, val) -} diff --git a/src/rule/matcher.rs b/src/rule/matcher.rs index c57ce3d..d0bfe9a 100644 --- a/src/rule/matcher.rs +++ b/src/rule/matcher.rs @@ -10,7 +10,9 @@ pub type RuleExpr = Expr; /// injected to allow experimentation in the matcher implementation. pub trait Matcher { /// Build matcher for a pattern + #[must_use] fn new(pattern: Rc>) -> Self; /// Apply matcher to a token sequence + #[must_use] fn apply<'a>(&self, source: &'a [RuleExpr]) -> Option>; } diff --git a/src/rule/matcher_vectree/any_match.rs b/src/rule/matcher_vectree/any_match.rs index cfb86b2..40619ba 100644 --- a/src/rule/matcher_vectree/any_match.rs +++ b/src/rule/matcher_vectree/any_match.rs @@ -4,6 +4,7 @@ use super::vec_match::vec_match; use crate::rule::matcher::RuleExpr; use crate::rule::state::State; +#[must_use] pub fn any_match<'a>( matcher: &AnyMatcher, seq: &'a [RuleExpr], diff --git a/src/rule/matcher_vectree/build.rs b/src/rule/matcher_vectree/build.rs index 27df048..8ab8bed 100644 --- a/src/rule/matcher_vectree/build.rs +++ b/src/rule/matcher_vectree/build.rs @@ -12,6 +12,7 @@ pub type MaxVecSplit<'a> = /// Derive the details of the central vectorial and the two sides from a /// slice of Expr's +#[must_use] fn split_at_max_vec(pattern: &[RuleExpr]) -> Option { let rngidx = pattern.iter().position_max_by_key(|expr| { vec_attrs(expr).map(|attrs| attrs.1 as i64).unwrap_or(-1) @@ -23,10 +24,12 @@ fn split_at_max_vec(pattern: &[RuleExpr]) -> Option { vec_attrs(placeh).map(|attrs| (left, attrs, right)) } +#[must_use] fn scal_cnt<'a>(iter: impl Iterator) -> usize { iter.take_while(|expr| vec_attrs(expr).is_none()).count() } +#[must_use] pub fn mk_any(pattern: &[RuleExpr]) -> AnyMatcher { let left_split = scal_cnt(pattern.iter()); if pattern.len() <= left_split { @@ -43,11 +46,13 @@ pub fn mk_any(pattern: &[RuleExpr]) -> AnyMatcher { } /// Pattern MUST NOT contain vectorial placeholders +#[must_use] fn mk_scalv(pattern: &[RuleExpr]) -> Vec { pattern.iter().map(mk_scalar).collect() } /// Pattern MUST start and end with a vectorial placeholder +#[must_use] fn mk_vec(pattern: &[RuleExpr]) -> VecMatcher { debug_assert!(!pattern.is_empty(), "pattern cannot be empty"); debug_assert!( @@ -99,6 +104,7 @@ fn mk_vec(pattern: &[RuleExpr]) -> VecMatcher { } /// Pattern MUST NOT be a vectorial placeholder +#[must_use] fn mk_scalar(pattern: &RuleExpr) -> ScalMatcher { match &pattern.value { Clause::P(p) => ScalMatcher::P(p.clone()), diff --git a/src/rule/matcher_vectree/scal_match.rs b/src/rule/matcher_vectree/scal_match.rs index f3ba8cc..1e3dc65 100644 --- a/src/rule/matcher_vectree/scal_match.rs +++ b/src/rule/matcher_vectree/scal_match.rs @@ -4,6 +4,7 @@ use crate::ast::Clause; use crate::rule::matcher::RuleExpr; use crate::rule::state::{State, StateEntry}; +#[must_use] pub fn scal_match<'a>( matcher: &ScalMatcher, expr: &'a RuleExpr, @@ -24,6 +25,7 @@ pub fn scal_match<'a>( } } +#[must_use] pub fn scalv_match<'a>( matchers: &[ScalMatcher], seq: &'a [RuleExpr], diff --git a/src/rule/matcher_vectree/vec_match.rs b/src/rule/matcher_vectree/vec_match.rs index 1697da6..0d113ca 100644 --- a/src/rule/matcher_vectree/vec_match.rs +++ b/src/rule/matcher_vectree/vec_match.rs @@ -8,6 +8,7 @@ use crate::rule::matcher::RuleExpr; use crate::rule::state::{State, StateEntry}; use crate::utils::unwrap_or; +#[must_use] pub fn vec_match<'a>( matcher: &VecMatcher, seq: &'a [RuleExpr], diff --git a/src/rule/prepare_rule.rs b/src/rule/prepare_rule.rs index 7948430..5c93aeb 100644 --- a/src/rule/prepare_rule.rs +++ b/src/rule/prepare_rule.rs @@ -11,6 +11,7 @@ use crate::Sym; /// Ensure that the rule's source begins and ends with a vectorial without /// changing its meaning +#[must_use] fn pad(mut rule: Rule, i: &Interner) -> Rule { let class: PHClass = PHClass::Vec { nonzero: false, prio: 0 }; let empty: &[Expr] = &[]; diff --git a/src/rule/repository.rs b/src/rule/repository.rs index e2f5e9c..609dc89 100644 --- a/src/rule/repository.rs +++ b/src/rule/repository.rs @@ -67,6 +67,7 @@ impl Repository { } /// Attempt to run each rule in priority order once + #[must_use] pub fn step(&self, code: &RuleExpr) -> Option { let glossary = code.value.collect_names(); for (rule, deps, _) in self.cache.iter() { @@ -87,7 +88,7 @@ impl Repository { /// Keep running the matching rule with the highest priority until no /// rules match. WARNING: this function might not terminate - #[allow(unused)] + #[must_use] pub fn pass(&self, code: &RuleExpr) -> Option { if let Some(mut processed) = self.step(code) { while let Some(out) = self.step(&processed) { @@ -101,7 +102,7 @@ impl Repository { /// Attempt to run each rule in priority order `limit` times. Returns /// the final tree and the number of iterations left to the limit. - #[allow(unused)] + #[must_use] pub fn long_step( &self, code: &RuleExpr, @@ -138,6 +139,7 @@ impl Debug for Repository { } } +#[must_use] fn fmt_hex(num: f64) -> String { let exponent = (num.log2() / 4_f64).floor(); let mantissa = num / 16_f64.powf(exponent); diff --git a/src/rule/rule_error.rs b/src/rule/rule_error.rs index 66ff0c9..8adc06a 100644 --- a/src/rule/rule_error.rs +++ b/src/rule/rule_error.rs @@ -23,6 +23,7 @@ pub enum RuleError { } impl RuleError { /// Convert into a unified error trait object shared by all Orchid errors + #[must_use] pub fn to_project_error(self, rule: &Rule) -> Rc { match self { RuleError::Missing(name) => Missing::new(rule, name).rc(), @@ -58,6 +59,7 @@ pub struct Missing { name: Tok, } impl Missing { + #[must_use] pub fn new(rule: &ast::Rule, name: Tok) -> Self { let mut locations = HashSet::new(); for expr in rule.template.iter() { @@ -99,6 +101,7 @@ pub struct Multiple { name: Tok, } impl Multiple { + #[must_use] pub fn new(rule: &ast::Rule, name: Tok) -> Self { let mut locations = HashSet::new(); for expr in rule.template.iter() { @@ -137,6 +140,7 @@ pub struct ArityMismatch { name: Tok, } impl ArityMismatch { + #[must_use] pub fn new(rule: &ast::Rule, name: Tok) -> Self { let mut locations = HashSet::new(); for expr in rule.template.iter() { @@ -188,6 +192,7 @@ pub struct VecNeighbors { n2: Tok, } impl VecNeighbors { + #[must_use] pub fn new(rule: &ast::Rule, n1: Tok, n2: Tok) -> Self { let mut locations = HashSet::new(); search_all_slcs(&rule.template[..], &mut |ev| { diff --git a/src/rule/state.rs b/src/rule/state.rs index a567d12..0c02cfc 100644 --- a/src/rule/state.rs +++ b/src/rule/state.rs @@ -14,6 +14,7 @@ pub enum StateEntry<'a> { } pub type State<'a> = HashMap, StateEntry<'a>>; +#[must_use] pub fn apply_exprv(template: &[RuleExpr], state: &State) -> Vec { template .iter() @@ -22,6 +23,7 @@ pub fn apply_exprv(template: &[RuleExpr], state: &State) -> Vec { .collect() } +#[must_use] pub fn apply_expr(template: &RuleExpr, state: &State) -> Vec { let Expr { location, value } = template; match value { diff --git a/src/rule/update_first_seq.rs b/src/rule/update_first_seq.rs index 8988804..16aebd1 100644 --- a/src/rule/update_first_seq.rs +++ b/src/rule/update_first_seq.rs @@ -8,6 +8,7 @@ use crate::Sym; /// Traverse the tree, calling pred on every sibling list until it returns /// some vec then replace the sibling list with that vec and return true /// return false if pred never returned some +#[must_use] pub fn exprv>) -> Option>>>( input: Rc>, pred: &mut F, @@ -19,6 +20,7 @@ pub fn exprv>) -> Option>>>( .map(|i| Rc::new(i.collect())) } +#[must_use] pub fn expr>) -> Option>>>( input: &RuleExpr, pred: &mut F, @@ -27,6 +29,7 @@ pub fn expr>) -> Option>>>( .map(|value| Expr { value, location: input.location.clone() }) } +#[must_use] pub fn clause>) -> Option>>>( c: &Clause, pred: &mut F, diff --git a/src/rule/vec_attrs.rs b/src/rule/vec_attrs.rs index 86798b7..1425449 100644 --- a/src/rule/vec_attrs.rs +++ b/src/rule/vec_attrs.rs @@ -4,6 +4,7 @@ use crate::interner::Tok; /// Returns the name, priority and nonzero of the expression if it is /// a vectorial placeholder +#[must_use] pub fn vec_attrs(expr: &RuleExpr) -> Option<(Tok, u64, bool)> { match expr.value.clone() { Clause::Placeh(Placeholder { diff --git a/src/systems/asynch/async.orc b/src/systems/asynch/async.orc new file mode 100644 index 0000000..737b59a --- /dev/null +++ b/src/systems/asynch/async.orc @@ -0,0 +1,5 @@ +export const block_on := \action.\cont. ( + action cont + (\e.panic "unwrapped asynch call") + \c.yield +) diff --git a/src/systems/asynch/system.rs b/src/systems/asynch/system.rs index 06fe66f..b27978c 100644 --- a/src/systems/asynch/system.rs +++ b/src/systems/asynch/system.rs @@ -8,12 +8,14 @@ use std::time::Duration; use hashbrown::HashMap; use ordered_float::NotNan; +use rust_embed::RustEmbed; use crate::facade::{IntoSystem, System}; use crate::foreign::cps_box::{init_cps, CPSBox}; use crate::foreign::{Atomic, ExternError, InertAtomic}; use crate::interpreted::ExprInst; use crate::interpreter::HandlerTable; +use crate::pipeline::file_loader::embed_to_map; use crate::systems::codegen::call; use crate::systems::stl::Boolean; use crate::utils::poller::{PollEvent, Poller}; @@ -68,6 +70,12 @@ impl MessagePort { } } +#[derive(RustEmbed)] +#[folder = "src/systems/asynch"] +#[prefix = "system/"] +#[include = "*.orc"] +struct AsynchEmbed; + type AnyHandler<'a> = Box) -> Vec + 'a>; /// Datastructures the asynch system will eventually be constructed from. @@ -80,6 +88,7 @@ pub struct AsynchSystem<'a> { impl<'a> AsynchSystem<'a> { /// Create a new async event loop that allows registering handlers and taking /// references to the port before it's converted into a [System] + #[must_use] pub fn new() -> Self { let (sender, poller) = Poller::new(); Self { poller, sender, handlers: HashMap::new() } @@ -108,6 +117,7 @@ impl<'a> AsynchSystem<'a> { /// Obtain a message port for sending messages to the main thread. If an /// object is passed to the MessagePort that does not have a handler, the /// main thread panics. + #[must_use] pub fn get_port(&self) -> MessagePort { MessagePort(self.sender.clone()) } } @@ -181,7 +191,7 @@ impl<'a> IntoSystem<'a> for AsynchSystem<'a> { ]), ) .unwrap_tree(), - code: HashMap::new(), + code: embed_to_map::(".orc", i), prelude: Vec::new(), handlers: handler_table, } diff --git a/src/systems/codegen.rs b/src/systems/codegen.rs index 87bdc0d..c5575f1 100644 --- a/src/systems/codegen.rs +++ b/src/systems/codegen.rs @@ -38,14 +38,24 @@ fn none() -> Clause { /// Define a clause that can be called with a callback and passes the provided /// values to the callback in order. -pub fn tuple(data: Vec) -> Clause { - Clause::Lambda { - args: Some(PathSet { - next: None, - steps: Rc::new(data.iter().map(|_| Side::Left).collect()), - }), - body: (data.into_iter()) - .fold(Clause::LambdaArg.wrap(), |f, x| Clause::Apply { f, x }.wrap()), +pub fn tuple(data: impl IntoIterator) -> Clause { + let mut steps = Vec::new(); + let mut body = Clause::LambdaArg.wrap(); + for x in data.into_iter() { + steps.push(Side::Left); + body = Clause::Apply { f: body, x }.wrap() + } + let path_set = PathSet { next: None, steps: Rc::new(steps) }; + Clause::Lambda { args: Some(path_set), body } +} + +#[cfg(test)] +mod test { + use crate::systems::codegen::tuple; + + #[test] + fn tuple_printer() { + println!("Binary tuple: {}", tuple([0.into(), 1.into()])) } } @@ -55,3 +65,9 @@ pub fn call(f: ExprInst, args: impl IntoIterator) -> Clause { let x = unwrap_or!(it.by_ref().next(); return f.inspect(Clause::clone)); it.fold(Clause::Apply { f, x }, |acc, x| Clause::Apply { f: acc.wrap(), x }) } + +/// Build an Orchid list from a Rust iterator +pub fn list(items: impl IntoIterator) -> Clause { + let mut iter = items.into_iter(); + orchid_opt(iter.next().map(|it| tuple([it, list(iter).wrap()]).wrap())) +} diff --git a/src/systems/directfs/commands.rs b/src/systems/directfs/commands.rs index 36aef94..669baa3 100644 --- a/src/systems/directfs/commands.rs +++ b/src/systems/directfs/commands.rs @@ -1,19 +1,203 @@ -use crate::foreign::cps_box::init_cps; -use crate::foreign::InertAtomic; -use crate::systems::asynch::MessagePort; -use crate::systems::scheduler::SeqScheduler; -use crate::{define_fn, OrcString}; +use std::ffi::OsString; +use std::fs::File; +use std::io::{BufReader, Read, Write}; +use std::path::Path; + +use hashbrown::HashMap; +use itertools::Itertools; + +use crate::facade::{IntoSystem, System}; +use crate::foreign::cps_box::{init_cps, CPSBox}; +use crate::foreign::{Atomic, InertAtomic}; +use crate::interpreted::{Clause, ExprInst}; +use crate::interpreter::HandlerTable; +use crate::systems::codegen::{call, list, orchid_opt, tuple}; +use crate::systems::io::wrap_io_error; +use crate::systems::scheduler::{SeqScheduler, SharedHandle}; +use crate::systems::stl::Boolean; +use crate::systems::RuntimeError; +use crate::utils::unwrap_or; +use crate::{define_fn, ConstTree, OrcString}; #[derive(Debug, Clone)] -struct ReadFile(OrcString); -impl InertAtomic for ReadFile { - fn type_str() -> &'static str { "a readfile command" } +pub struct ReadFileCmd(OrcString); +impl InertAtomic for ReadFileCmd { + fn type_str() -> &'static str { "readfile command" } } -pub fn read_file(port: MessagePort, cmd: ReadFile) -> Vec { - let new_file = +#[derive(Debug, Clone)] +pub struct ReadDirCmd(OrcString); +impl InertAtomic for ReadDirCmd { + fn type_str() -> &'static str { "readdir command" } +} + +#[derive(Debug, Clone)] +pub struct WriteFile { + name: OrcString, + append: bool, +} +impl InertAtomic for WriteFile { + fn type_str() -> &'static str { "writefile command" } +} + +#[must_use] +fn read_file(sched: &SeqScheduler, cmd: CPSBox) -> ExprInst { + let (ReadFileCmd(name), succ, fail, cont) = cmd.unpack3(); + let name = name.get_string(); + let cancel = sched.run_orphan( + move |_| File::open(name), + |file, _| match file { + Err(e) => vec![call(fail, [wrap_io_error(e)]).wrap()], + Ok(f) => { + let source = + SharedHandle::wrap(BufReader::new(Box::new(f) as Box)); + vec![call(succ, [source.atom_exi()]).wrap()] + }, + }, + ); + call(cont, [init_cps(1, cancel).wrap()]).wrap() +} + +#[must_use] +fn read_dir(sched: &SeqScheduler, cmd: CPSBox) -> ExprInst { + let (ReadDirCmd(name), succ, fail, cont) = cmd.unpack3(); + let name = name.get_string(); + let cancel = sched.run_orphan( + move |_| { + Path::new(&name) + .read_dir()? + .map(|r| r.and_then(|e| Ok((e.file_name(), e.file_type()?.is_dir())))) + .collect() + }, + |items: std::io::Result>, _| match items { + Err(e) => vec![call(fail, [wrap_io_error(e)]).wrap()], + Ok(os_namev) => { + let converted = (os_namev.into_iter()) + .map(|(n, d)| { + Ok(tuple([os_str_cls(n)?.wrap(), Boolean(d).atom_exi()]).wrap()) + }) + .collect::, Clause>>(); + match converted { + Err(e) => vec![call(fail, [e.wrap()]).wrap()], + Ok(names) => vec![call(succ, [list(names).wrap()]).wrap()], + } + }, + }, + ); + call(cont, [init_cps(1, cancel).wrap()]).wrap() +} + +#[must_use] +pub fn write_file(sched: &SeqScheduler, cmd: CPSBox) -> ExprInst { + let (WriteFile { name, append }, succ, fail, cont) = cmd.unpack3(); + let name = name.get_string(); + let cancel = sched.run_orphan( + move |_| File::options().write(true).append(append).open(name), + |file, _| match file { + Err(e) => vec![call(fail, [wrap_io_error(e)]).wrap()], + Ok(f) => { + let handle = SharedHandle::wrap(Box::new(f) as Box); + vec![call(succ, [handle.atom_exi()]).wrap()] + }, + }, + ); + call(cont, [init_cps(1, cancel).wrap()]).wrap() +} + +#[derive(Debug, Clone)] +pub struct InvalidString(OsString); +impl InertAtomic for InvalidString { + fn type_str() -> &'static str { "invalidstring error" } +} + +fn os_str_cls(str: OsString) -> Result { + (str.into_string()) + .map_err(|e| InvalidString(e).atom_cls()) + .map(|s| OrcString::from(s).cls()) } define_fn! { - pub OpenFileRead = |x| Ok(init_cps(3, ReadFile(x.downcast()?))) + pub IsInvalidString = |x| { + Ok(Boolean(x.downcast::().is_ok()).atom_cls()) + }; + pub OpenFileRead = |x| Ok(init_cps(3, ReadFileCmd(x.downcast()?))); + pub ReadDir = |x| Ok(init_cps(3, ReadDirCmd(x.downcast()?))); + pub OpenFileWrite = |x| { + Ok(init_cps(3, WriteFile{ name: x.downcast()?, append: false })) + }; + pub OpenFileAppend = |x| { + Ok(init_cps(3, WriteFile{ name: x.downcast()?, append: true })) + }; + + pub JoinPaths { root: OrcString, sub: OrcString } => { + let res = Path::new(root.as_str()) + .join(sub.as_str()) + .into_os_string(); + os_str_cls(res.clone()).map_err(|_| RuntimeError::ext( + format!("result {res:?} contains illegal characters"), + "joining paths" + )) + }; + pub PopPath = |x| { + eprintln!("argument is {x}"); + let arg = x.downcast::()?; + let full_path = Path::new(arg.as_str()); + let parent = unwrap_or! {full_path.parent(); { + return Ok(orchid_opt(None)) + }}; + let sub = unwrap_or! {full_path.file_name(); { + return Ok(orchid_opt(None)) + }}; + Ok(orchid_opt(Some(tuple( + [parent.as_os_str(), sub] + .into_iter() + .map(|s| os_str_cls(s.to_owned()).map_err(|_| RuntimeError::ext( + format!("Result {s:?} contains illegal characters"), + "splitting a path" + ))) + .map_ok(Clause::wrap) + .collect::, _>>()? + ).wrap()))) + } +} + +/// A rudimentary system to read and write files. +#[derive(Clone)] +pub struct DirectFS { + scheduler: SeqScheduler, +} +impl DirectFS { + /// Create a new instance of the system. + pub fn new(scheduler: SeqScheduler) -> Self { Self { scheduler } } +} + +impl IntoSystem<'static> for DirectFS { + fn into_system(self, i: &crate::Interner) -> System<'static> { + let mut handlers = HandlerTable::new(); + let sched = self.scheduler.clone(); + handlers.register(move |cmd| Ok(read_file(&sched, *cmd))); + let sched = self.scheduler.clone(); + handlers.register(move |cmd| Ok(read_dir(&sched, *cmd))); + let sched = self.scheduler; + handlers.register(move |cmd| Ok(write_file(&sched, *cmd))); + System { + name: ["system", "directfs"].into_iter().map_into().collect(), + code: HashMap::new(), + prelude: Vec::new(), + constants: ConstTree::namespace( + [i.i("system"), i.i("directfs")], + ConstTree::tree([ + (i.i("is_invalid_string"), ConstTree::xfn(IsInvalidString)), + (i.i("readfile"), ConstTree::xfn(OpenFileRead)), + (i.i("readdir"), ConstTree::xfn(ReadDir)), + (i.i("writefile"), ConstTree::xfn(OpenFileWrite)), + (i.i("appendfile"), ConstTree::xfn(OpenFileAppend)), + (i.i("join_paths"), ConstTree::xfn(JoinPaths)), + (i.i("pop_path"), ConstTree::xfn(PopPath)), + ]), + ) + .unwrap_tree(), + handlers, + } + } } diff --git a/src/systems/directfs/mod.rs b/src/systems/directfs/mod.rs index 6de2606..dd33cae 100644 --- a/src/systems/directfs/mod.rs +++ b/src/systems/directfs/mod.rs @@ -1,2 +1,5 @@ - +//! A rudimentary system exposing methods for Orchid to interact with the file +//! system. All paths are strings. mod commands; + +pub use commands::DirectFS; diff --git a/src/systems/io/bindings.rs b/src/systems/io/bindings.rs index 80977e7..ce290a4 100644 --- a/src/systems/io/bindings.rs +++ b/src/systems/io/bindings.rs @@ -1,10 +1,11 @@ use super::flow::IOCmdHandlePack; use super::instances::{ - BRead, ReadCmd, SRead, SinkHandle, SourceHandle, WriteCmd, + BRead, ReadCmd, SRead, WriteCmd, Sink, Source, }; use crate::foreign::cps_box::init_cps; use crate::foreign::{Atom, Atomic}; use crate::representations::OrcString; +use crate::systems::scheduler::SharedHandle; use crate::systems::stl::Binary; use crate::systems::RuntimeError; use crate::{ast, define_fn, ConstTree, Interner, Primitive}; @@ -22,17 +23,13 @@ define_fn! { cmd: ReadCmd::RBytes(BRead::All), handle: x.downcast()? })); - ReadBytes { - stream: SourceHandle, - n: u64 - } => Ok(init_cps(3, IOCmdHandlePack{ - cmd: ReadCmd::RBytes(BRead::N(n.try_into().unwrap())), - handle: stream.clone() - })); - ReadUntil { - stream: SourceHandle, - pattern: u64 - } => { + ReadBytes { stream: SharedHandle, n: u64 } => { + Ok(init_cps(3, IOCmdHandlePack{ + cmd: ReadCmd::RBytes(BRead::N(n.try_into().unwrap())), + handle: stream.clone() + })) + }; + ReadUntil { stream: SharedHandle, pattern: u64 } => { let delim = pattern.try_into().map_err(|_| RuntimeError::ext( "greater than 255".to_string(), "converting number to byte" @@ -42,20 +39,18 @@ define_fn! { handle: stream })) }; - WriteStr { - stream: SinkHandle, - string: OrcString - } => Ok(init_cps(3, IOCmdHandlePack { - cmd: WriteCmd::WStr(string.get_string()), - handle: stream.clone(), - })); - WriteBin { - stream: SinkHandle, - bytes: Binary - } => Ok(init_cps(3, IOCmdHandlePack { - cmd: WriteCmd::WBytes(bytes), - handle: stream.clone(), - })); + WriteStr { stream: SharedHandle, string: OrcString } => { + Ok(init_cps(3, IOCmdHandlePack { + cmd: WriteCmd::WStr(string.get_string()), + handle: stream.clone(), + })) + }; + WriteBin { stream: SharedHandle, bytes: Binary } => { + Ok(init_cps(3, IOCmdHandlePack { + cmd: WriteCmd::WBytes(bytes), + handle: stream.clone(), + })) + }; Flush = |x| Ok(init_cps(3, IOCmdHandlePack { cmd: WriteCmd::Flush, handle: x.downcast()? diff --git a/src/systems/io/instances.rs b/src/systems/io/instances.rs index 38b80df..caed569 100644 --- a/src/systems/io/instances.rs +++ b/src/systems/io/instances.rs @@ -9,12 +9,11 @@ use crate::systems::scheduler::{Canceller, SharedHandle}; use crate::systems::stl::Binary; use crate::Literal; +/// Any type that we can read controlled amounts of data from pub type Source = BufReader>; +/// Any type that we can write data to pub type Sink = Box; -pub type SourceHandle = SharedHandle; -pub type SinkHandle = SharedHandle; - /// String reading command #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum SRead { @@ -39,7 +38,7 @@ pub enum ReadCmd { impl IOCmd for ReadCmd { type Stream = Source; type Result = ReadResult; - type Handle = SourceHandle; + type Handle = SharedHandle; // This is a buggy rule, check manually #[allow(clippy::read_zero_byte_vec)] @@ -82,22 +81,21 @@ impl ReadResult { pub fn dispatch(self, succ: ExprInst, fail: ExprInst) -> Vec { match self { ReadResult::RBin(_, Err(e)) | ReadResult::RStr(_, Err(e)) => { - vec![call(fail, vec![wrap_io_error(e)]).wrap()] + vec![call(fail, [wrap_io_error(e)]).wrap()] }, ReadResult::RBin(_, Ok(bytes)) => { let arg = Binary(Arc::new(bytes)).atom_cls().wrap(); - vec![call(succ, vec![arg]).wrap()] + vec![call(succ, [arg]).wrap()] }, ReadResult::RStr(_, Ok(text)) => { - vec![call(succ, vec![Literal::Str(text.into()).into()]).wrap()] + vec![call(succ, [Literal::Str(text.into()).into()]).wrap()] }, } } } -/// Placeholder function for an eventual conversion from [io::Error] to Orchid -/// data -fn wrap_io_error(_e: io::Error) -> ExprInst { Literal::Uint(0u64).into() } +/// Function to convert [io::Error] to Orchid data +pub fn wrap_io_error(_e: io::Error) -> ExprInst { Literal::Uint(0u64).into() } /// Writing command (string or binary) #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -109,7 +107,7 @@ pub enum WriteCmd { impl IOCmd for WriteCmd { type Stream = Sink; - type Handle = SinkHandle; + type Handle = SharedHandle; type Result = WriteResult; fn execute( diff --git a/src/systems/io/mod.rs b/src/systems/io/mod.rs index 2c0bbd4..744604e 100644 --- a/src/systems/io/mod.rs +++ b/src/systems/io/mod.rs @@ -9,3 +9,4 @@ mod service; // pub use facade::{io_system, IOStream, IOSystem}; pub use service::{Service, Stream, StreamTable}; +pub use instances::{wrap_io_error, Source, Sink}; diff --git a/src/systems/io/service.rs b/src/systems/io/service.rs index b2d397e..5b7e5cb 100644 --- a/src/systems/io/service.rs +++ b/src/systems/io/service.rs @@ -1,6 +1,7 @@ #[allow(unused)] // for doc use std::io::{BufReader, Read, Write}; +use itertools::Itertools; use rust_embed::RustEmbed; use trait_set::trait_set; @@ -69,8 +70,8 @@ impl<'a, ST: IntoIterator> IntoSystem<'static> |stream| (stream, Vec::new()), ); match result { - Ok(cancel) => Ok(call(tail, vec![init_cps(1, cancel).wrap()]).wrap()), - Err(e) => Ok(call(fail, vec![e.atom_exi()]).wrap()), + Ok(cancel) => Ok(call(tail, [init_cps(1, cancel).wrap()]).wrap()), + Err(e) => Ok(call(fail, [e.atom_exi()]).wrap()), } }); let scheduler = self.scheduler.clone(); @@ -87,8 +88,8 @@ impl<'a, ST: IntoIterator> IntoSystem<'static> |stream| (stream, Vec::new()), ); match result { - Ok(cancel) => Ok(call(tail, vec![init_cps(1, cancel).wrap()]).wrap()), - Err(e) => Ok(call(fail, vec![e.atom_exi()]).wrap()), + Ok(cancel) => Ok(call(tail, [init_cps(1, cancel).wrap()]).wrap()), + Err(e) => Ok(call(fail, [e.atom_exi()]).wrap()), } }); let streams = self.global_streams.into_iter().map(|(n, stream)| { @@ -101,7 +102,7 @@ impl<'a, ST: IntoIterator> IntoSystem<'static> }); System { handlers, - name: vec!["system".to_string(), "io".to_string()], + name: ["system", "io"].into_iter().map_into().collect(), constants: io_bindings(i, streams).unwrap_tree(), code: embed_to_map::(".orc", i), prelude: vec![FileEntry { diff --git a/src/systems/mod.rs b/src/systems/mod.rs index 9380219..139e75f 100644 --- a/src/systems/mod.rs +++ b/src/systems/mod.rs @@ -3,7 +3,7 @@ mod assertion_error; pub mod asynch; pub mod cast_exprinst; pub mod codegen; -// mod directfs; +pub mod directfs; pub mod io; mod runtime_error; pub mod scheduler; diff --git a/src/systems/scheduler/canceller.rs b/src/systems/scheduler/canceller.rs index 009b611..aad28be 100644 --- a/src/systems/scheduler/canceller.rs +++ b/src/systems/scheduler/canceller.rs @@ -1,14 +1,9 @@ use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; -use crate::foreign::InertAtomic; - /// A single-fire thread-safe boolean flag with relaxed ordering #[derive(Debug, Clone)] pub struct Canceller(Arc); -impl InertAtomic for Canceller { - fn type_str() -> &'static str { "a canceller" } -} impl Canceller { /// Create a new canceller diff --git a/src/systems/scheduler/system.rs b/src/systems/scheduler/system.rs index 7eec7b9..fe3afea 100644 --- a/src/systems/scheduler/system.rs +++ b/src/systems/scheduler/system.rs @@ -223,6 +223,26 @@ impl SeqScheduler { }) } + /// Run an operation asynchronously and then process its result in thread, + /// without queuing on any particular data. + pub fn run_orphan( + &self, + operation: impl FnOnce(Canceller) -> T + Send + 'static, + handler: impl FnOnce(T, Canceller) -> Vec + 'static, + ) -> Canceller { + let cancelled = Canceller::new(); + let canc1 = cancelled.clone(); + let opid = self.0.pending.borrow_mut().insert(Box::new(|data, _| { + handler(*data.downcast().expect("This is associated by ID"), canc1) + })); + let canc1 = cancelled.clone(); + let mut port = self.0.port.clone(); + self.0.pool.submit(Box::new(move || { + port.send(SyncReply { opid, data: Box::new(operation(canc1)) }); + })); + cancelled + } + /// Schedule a function that will consume the value. After this the handle is /// considered sealed and all [SeqScheduler::schedule] calls will fail. pub fn seal( diff --git a/src/systems/stl/bin.rs b/src/systems/stl/bin.rs index 78ee66b..5142d6e 100644 --- a/src/systems/stl/bin.rs +++ b/src/systems/stl/bin.rs @@ -125,7 +125,7 @@ expr=x in )? } let (asl, bsl) = bin.0.split_at(i as usize); - Ok(tuple(vec![ + Ok(tuple([ Binary(Arc::new(asl.to_vec())).atom_cls().into(), Binary(Arc::new(bsl.to_vec())).atom_cls().into(), ])) diff --git a/src/systems/stl/list.orc b/src/systems/stl/list.orc index 6afc6b2..a3dfb6c 100644 --- a/src/systems/stl/list.orc +++ b/src/systems/stl/list.orc @@ -1,13 +1,17 @@ -import super::(option, fn::*, proc::*, loop::*, bool::*, known::*, num::*) +import super::(option, fn::*, proc::*, loop::*, bool::*, known::*, num::*, tuple::*) const pair := \a.\b. \f. f a b -- Constructors -export const cons := \hd.\tl. option::some (pair hd tl) +export const cons := \hd.\tl. option::some t[hd, tl] export const end := option::none -export const pop := \list.\default.\f.list default \cons.cons f +export const pop := \list.\default.\f. do{ + cps tuple = list default; + cps head, tail = tuple; + f head tail +} -- Operators @@ -100,6 +104,25 @@ export const get := \list.\n. ( } ) +--[ + Map every element to a pair of the index and the original element +]-- +export const enumerate := \list. ( + recursive r (list, n = 0) + pop list end \head.\tail. + cons t[n, head] $ r tail $ n + 1 +) + +--[ + Turn a list of CPS commands into a sequence. This is achieved by calling every + element on the return value of the next element with the tail passed to it. + The continuation is passed to the very last argument. +]-- +export const chain := \list.\cont. loop_over (list) { + cps head, list = pop list cont; + cps head; +} + macro new[...$item, ...$rest:1] =0x2p84=> (cons (...$item) new[...$rest]) macro new[...$end] =0x1p84=> (cons (...$end) end) macro new[] =0x1p84=> end diff --git a/src/systems/stl/num.orc b/src/systems/stl/num.orc index 58c7bbd..e08df41 100644 --- a/src/systems/stl/num.orc +++ b/src/systems/stl/num.orc @@ -1,7 +1,7 @@ export operators[ + - * % / ] macro ...$a + ...$b =0x2p36=> (add (...$a) (...$b)) -macro ...$a - ...$b:1 =0x2p36=> (subtract (...$a) (...$b)) +macro ...$a:1 - ...$b =0x2p36=> (subtract (...$a) (...$b)) macro ...$a * ...$b =0x1p36=> (multiply (...$a) (...$b)) -macro ...$a % ...$b:1 =0x1p36=> (remainder (...$a) (...$b)) -macro ...$a / ...$b:1 =0x1p36=> (divide (...$a) (...$b)) +macro ...$a:1 % ...$b =0x1p36=> (remainder (...$a) (...$b)) +macro ...$a:1 / ...$b =0x1p36=> (divide (...$a) (...$b)) diff --git a/src/systems/stl/prelude.orc b/src/systems/stl/prelude.orc index 36d04b2..3f3d507 100644 --- a/src/systems/stl/prelude.orc +++ b/src/systems/stl/prelude.orc @@ -6,10 +6,13 @@ import std::bool::* export ::([==], if, then, else, true, false) import std::fn::* export ::([$ |> =>], identity, pass, pass2, return) +import std::tuple::* +export ::(t) +import std::tuple import std::list import std::map import std::option -export ::(list, map, option) +export ::(tuple, list, map, option) import std::loop::* export ::(loop_over, recursive) diff --git a/src/systems/stl/str.rs b/src/systems/stl/str.rs index ec3828b..540be20 100644 --- a/src/systems/stl/str.rs +++ b/src/systems/stl/str.rs @@ -65,7 +65,7 @@ expr=x in let mut graphs = s.as_str().graphemes(true); let a = graphs.by_ref().take(i as usize).collect::(); let b = graphs.collect::(); - Ok(tuple(vec![a.into(), b.into()])) + Ok(tuple([a.into(), b.into()])) } } diff --git a/src/systems/stl/tuple.orc b/src/systems/stl/tuple.orc new file mode 100644 index 0000000..0a01f24 --- /dev/null +++ b/src/systems/stl/tuple.orc @@ -0,0 +1,16 @@ +import super::(known::*, bool::*, num::*) + +const discard_args := \n.\value. ( + if n == 0 then value + else \_. discard_args (n - 1) value +) + +export const pick := \tuple. \i.\n. tuple ( + discard_args i \val. discard_args (n - 1 - i) val +) + +macro t[...$item, ...$rest:1] =0x2p84=> (\f. t[...$rest] (f (...$item))) +macro t[...$end] =0x1p84=> (\f. f (...$end)) +macro t[] =0x1p84=> \f.f + +export ::(t)