Removed a copious amount of premature Rc-s

This commit is contained in:
2023-06-18 04:22:20 +01:00
parent aebbf51228
commit 79e28883db
56 changed files with 716 additions and 636 deletions

35
Cargo.lock generated
View File

@@ -212,6 +212,16 @@ dependencies = [
"crypto-common", "crypto-common",
] ]
[[package]]
name = "duplicate"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de78e66ac9061e030587b2a2e75cc88f22304913c907b11307bca737141230cb"
dependencies = [
"heck",
"proc-macro-error",
]
[[package]] [[package]]
name = "dyn-clone" name = "dyn-clone"
version = "1.0.11" version = "1.0.11"
@@ -395,6 +405,7 @@ version = "0.2.2"
dependencies = [ dependencies = [
"chumsky", "chumsky",
"clap", "clap",
"duplicate",
"dyn-clone", "dyn-clone",
"hashbrown 0.13.2", "hashbrown 0.13.2",
"itertools", "itertools",
@@ -420,6 +431,30 @@ version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79" checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79"
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2",
"quote",
"syn 1.0.109",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2",
"quote",
"version_check",
]
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.56" version = "1.0.56"

View File

@@ -32,3 +32,4 @@ clap = { version = "4.3", features = ["derive"] }
trait-set = "0.3" trait-set = "0.3"
paste = "1.0" paste = "1.0"
rust-embed = { version = "6.6", features = ["include-exclude"] } rust-embed = { version = "6.6", features = ["include-exclude"] }
duplicate = "1.0.0"

View File

@@ -2,13 +2,15 @@ mod cli;
use std::fs::File; use std::fs::File;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::process; use std::{iter, process};
use clap::Parser; use clap::Parser;
use hashbrown::HashMap; use hashbrown::HashMap;
use itertools::Itertools; use itertools::Itertools;
use orchidlang::interner::{InternedDisplay, Interner, Sym}; use orchidlang::interner::{InternedDisplay, Interner};
use orchidlang::{ast, ast_to_interpreted, interpreter, pipeline, rule, stl}; use orchidlang::{
ast, ast_to_interpreted, interpreter, pipeline, rule, stl, Stok, Sym, VName,
};
use crate::cli::cmd_prompt; use crate::cli::cmd_prompt;
@@ -66,12 +68,16 @@ impl Args {
/// Load and parse all source related to the symbol `target` or all symbols /// Load and parse all source related to the symbol `target` or all symbols
/// in the namespace `target` in the context of the STL. All sourcefiles must /// in the namespace `target` in the context of the STL. All sourcefiles must
/// reside within `dir`. /// reside within `dir`.
fn load_dir(dir: &Path, target: Sym, i: &Interner) -> pipeline::ProjectTree { fn load_dir(
dir: &Path,
target: &[Stok],
i: &Interner,
) -> pipeline::ProjectTree<VName> {
let file_cache = pipeline::file_loader::mk_dir_cache(dir.to_path_buf(), i); let file_cache = pipeline::file_loader::mk_dir_cache(dir.to_path_buf(), i);
let library = stl::mk_stl(i, stl::StlOptions::default()); let library = stl::mk_stl(i, stl::StlOptions::default());
pipeline::parse_layer( pipeline::parse_layer(
&[target], iter::once(target),
&|path| file_cache.find(&path), &|path| file_cache.find(path),
&library, &library,
&stl::mk_prelude(i), &stl::mk_prelude(i),
i, i,
@@ -79,12 +85,12 @@ fn load_dir(dir: &Path, target: Sym, i: &Interner) -> pipeline::ProjectTree {
.expect("Failed to load source code") .expect("Failed to load source code")
} }
pub fn to_sym(data: &str, i: &Interner) -> Sym { pub fn to_vname(data: &str, i: &Interner) -> VName {
i.i(&data.split("::").map(|s| i.i(s)).collect::<Vec<_>>()[..]) data.split("::").map(|s| i.i(s)).collect::<Vec<_>>()
} }
/// A little utility to step through the resolution of a macro set /// A little utility to step through the resolution of a macro set
pub fn macro_debug(repo: rule::Repo, mut code: ast::Expr, i: &Interner) { pub fn macro_debug(repo: rule::Repo, mut code: ast::Expr<Sym>, i: &Interner) {
let mut idx = 0; let mut idx = 0;
println!("Macro debugger working on {}", code.bundle(i)); println!("Macro debugger working on {}", code.bundle(i));
loop { loop {
@@ -119,8 +125,8 @@ pub fn main() {
args.chk_proj().unwrap_or_else(|e| panic!("{e}")); args.chk_proj().unwrap_or_else(|e| panic!("{e}"));
let dir = PathBuf::try_from(args.dir).unwrap(); let dir = PathBuf::try_from(args.dir).unwrap();
let i = Interner::new(); let i = Interner::new();
let main = to_sym(&args.main, &i); let main = to_vname(&args.main, &i);
let project = load_dir(&dir, main, &i); let project = pipeline::vname_to_sym_tree(load_dir(&dir, &main, &i), &i);
let rules = pipeline::collect_rules(&project); let rules = pipeline::collect_rules(&project);
let consts = pipeline::collect_consts(&project, &i); let consts = pipeline::collect_consts(&project, &i);
let repo = rule::Repo::new(rules, &i).unwrap_or_else(|(rule, error)| { let repo = rule::Repo::new(rules, &i).unwrap_or_else(|(rule, error)| {
@@ -135,7 +141,7 @@ pub fn main() {
println!("Parsed rules: {}", repo.bundle(&i)); println!("Parsed rules: {}", repo.bundle(&i));
return; return;
} else if !args.macro_debug.is_empty() { } else if !args.macro_debug.is_empty() {
let name = to_sym(&args.macro_debug, &i); let name = i.i(&to_vname(&args.macro_debug, &i));
let code = consts let code = consts
.get(&name) .get(&name)
.unwrap_or_else(|| panic!("Constant {} not found", args.macro_debug)); .unwrap_or_else(|| panic!("Constant {} not found", args.macro_debug));
@@ -153,7 +159,7 @@ pub fn main() {
} }
let ctx = let ctx =
interpreter::Context { symbols: &exec_table, interner: &i, gas: None }; interpreter::Context { symbols: &exec_table, interner: &i, gas: None };
let entrypoint = exec_table.get(&main).unwrap_or_else(|| { let entrypoint = exec_table.get(&i.i(&main)).unwrap_or_else(|| {
let main = args.main; let main = args.main;
let symbols = let symbols =
exec_table.keys().map(|t| i.extern_vec(*t).join("::")).join(", "); exec_table.keys().map(|t| i.extern_vec(*t).join("::")).join(", ");

View File

@@ -2,24 +2,12 @@
//! //!
//! Can be used to deduplicate various structures for fast equality comparisons. //! Can be used to deduplicate various structures for fast equality comparisons.
//! The parser uses it to intern strings. //! The parser uses it to intern strings.
mod display;
mod monotype; mod monotype;
mod multitype; mod multitype;
mod token; mod token;
mod traits;
pub use display::{DisplayBundle, InternedDisplay};
pub use monotype::TypedInterner; pub use monotype::TypedInterner;
pub use multitype::Interner; pub use multitype::Interner;
pub use token::Tok; pub use token::Tok;
pub use traits::{DisplayBundle, InternedDisplay, InternedInto};
/// A symbol, nsname, nname or namespaced name is a sequence of namespaces
/// and an identifier. The [Vec] can never be empty.
///
/// Throughout different stages of processing, these names can be
///
/// - local names to be prefixed with the current module
/// - imported names starting with a segment
/// - ending a single import or
/// - defined in one of the glob imported modules
/// - absolute names
pub type Sym = Tok<Vec<Tok<String>>>;

View File

@@ -54,3 +54,18 @@ impl<'a, T: InternedDisplay + ?Sized> Display for DisplayBundle<'a, T> {
self.data.fmt_i(f, self.interner) self.data.fmt_i(f, self.interner)
} }
} }
/// Conversions that are possible in the presence of an interner
///
/// Essentially, this allows to define abstractions over interned and
/// non-interned versions of a type and convert between them
pub trait InternedInto<U> {
/// Execute the conversion
fn into_i(self, i: &Interner) -> U;
}
impl<T: Into<U>, U> InternedInto<U> for T {
fn into_i(self, _i: &Interner) -> U {
self.into()
}
}

View File

@@ -1,7 +1,8 @@
use hashbrown::HashMap; use hashbrown::HashMap;
use crate::interner::{Interner, Sym}; use crate::interner::Interner;
use crate::representations::interpreted::ExprInst; use crate::representations::interpreted::ExprInst;
use crate::Sym;
/// All the data associated with an interpreter run /// All the data associated with an interpreter run
#[derive(Clone)] #[derive(Clone)]

View File

@@ -18,7 +18,10 @@ pub mod rule;
pub mod stl; pub mod stl;
mod utils; mod utils;
pub use interner::Sym; use interner::Tok;
pub use representations::{NameLike, Sym, VName};
/// Element of VName and a common occurrence in the API
pub type Stok = Tok<String>;
pub use representations::ast_to_interpreted::ast_to_interpreted; pub use representations::ast_to_interpreted::ast_to_interpreted;
pub use representations::{ pub use representations::{
ast, interpreted, sourcefile, tree, Literal, Location, PathSet, Primitive, ast, interpreted, sourcefile, tree, Literal, Location, PathSet, Primitive,

View File

@@ -8,15 +8,14 @@ use super::context::Context;
use super::decls::SimpleParser; use super::decls::SimpleParser;
use super::enum_filter::enum_filter; use super::enum_filter::enum_filter;
use super::lexer::{filter_map_lex, Entry, Lexeme}; use super::lexer::{filter_map_lex, Entry, Lexeme};
use crate::interner::Sym;
use crate::representations::ast::{Clause, Expr}; use crate::representations::ast::{Clause, Expr};
use crate::representations::location::Location; use crate::representations::location::Location;
use crate::representations::Primitive; use crate::representations::{Primitive, VName};
/// Parses any number of expr wrapped in (), [] or {} /// Parses any number of expr wrapped in (), [] or {}
fn sexpr_parser( fn sexpr_parser(
expr: impl SimpleParser<Entry, Expr> + Clone, expr: impl SimpleParser<Entry, Expr<VName>> + Clone,
) -> impl SimpleParser<Entry, (Clause, Range<usize>)> + Clone { ) -> impl SimpleParser<Entry, (Clause<VName>, Range<usize>)> + Clone {
let body = expr.repeated(); let body = expr.repeated();
choice(( choice((
Lexeme::LP('(').parser().then(body.clone()).then(Lexeme::RP('(').parser()), Lexeme::LP('(').parser().then(body.clone()).then(Lexeme::RP('(').parser()),
@@ -40,9 +39,9 @@ fn sexpr_parser(
/// and type and body are both expressions. Comments are allowed /// and type and body are both expressions. Comments are allowed
/// and ignored everywhere in between the tokens /// and ignored everywhere in between the tokens
fn lambda_parser<'a>( fn lambda_parser<'a>(
expr: impl SimpleParser<Entry, Expr> + Clone + 'a, expr: impl SimpleParser<Entry, Expr<VName>> + Clone + 'a,
ctx: impl Context + 'a, ctx: impl Context + 'a,
) -> impl SimpleParser<Entry, (Clause, Range<usize>)> + Clone + 'a { ) -> impl SimpleParser<Entry, (Clause<VName>, Range<usize>)> + Clone + 'a {
Lexeme::BS Lexeme::BS
.parser() .parser()
.ignore_then(expr.clone()) .ignore_then(expr.clone())
@@ -56,9 +55,8 @@ fn lambda_parser<'a>(
/// Parses a sequence of names separated by :: <br/> /// Parses a sequence of names separated by :: <br/>
/// Comments and line breaks are allowed and ignored in between /// Comments and line breaks are allowed and ignored in between
pub fn ns_name_parser<'a>( pub fn ns_name_parser<'a>()
ctx: impl Context + 'a, -> impl SimpleParser<Entry, (VName, Range<usize>)> + Clone + 'a {
) -> impl SimpleParser<Entry, (Sym, Range<usize>)> + Clone + 'a {
filter_map_lex(enum_filter!(Lexeme::Name)) filter_map_lex(enum_filter!(Lexeme::Name))
.separated_by(Lexeme::NS.parser()) .separated_by(Lexeme::NS.parser())
.at_least(1) .at_least(1)
@@ -66,32 +64,31 @@ pub fn ns_name_parser<'a>(
let start = elements.first().expect("can never be empty").1.start; let start = elements.first().expect("can never be empty").1.start;
let end = elements.last().expect("can never be empty").1.end; let end = elements.last().expect("can never be empty").1.end;
let tokens = (elements.iter().map(|(t, _)| *t)).collect::<Vec<_>>(); let tokens = (elements.iter().map(|(t, _)| *t)).collect::<Vec<_>>();
(ctx.interner().i(&tokens), start..end) (tokens, start..end)
}) })
.labelled("Namespaced name") .labelled("Namespaced name")
} }
pub fn namelike_parser<'a>( pub fn namelike_parser<'a>()
ctx: impl Context + 'a, -> impl SimpleParser<Entry, (Clause<VName>, Range<usize>)> + Clone + 'a {
) -> impl SimpleParser<Entry, (Clause, Range<usize>)> + Clone + 'a {
choice(( choice((
filter_map_lex(enum_filter!(Lexeme::PH)) filter_map_lex(enum_filter!(Lexeme::PH))
.map(|(ph, range)| (Clause::Placeh(ph), range)), .map(|(ph, range)| (Clause::Placeh(ph), range)),
ns_name_parser(ctx).map(|(token, range)| (Clause::Name(token), range)), ns_name_parser().map(|(token, range)| (Clause::Name(token), range)),
)) ))
} }
pub fn clause_parser<'a>( pub fn clause_parser<'a>(
expr: impl SimpleParser<Entry, Expr> + Clone + 'a, expr: impl SimpleParser<Entry, Expr<VName>> + Clone + 'a,
ctx: impl Context + 'a, ctx: impl Context + 'a,
) -> impl SimpleParser<Entry, (Clause, Range<usize>)> + Clone + 'a { ) -> impl SimpleParser<Entry, (Clause<VName>, Range<usize>)> + Clone + 'a {
choice(( choice((
filter_map_lex(enum_filter!(Lexeme >> Primitive; Literal)) filter_map_lex(enum_filter!(Lexeme >> Primitive; Literal))
.map(|(p, s)| (Clause::P(p), s)) .map(|(p, s)| (Clause::P(p), s))
.labelled("Literal"), .labelled("Literal"),
sexpr_parser(expr.clone()), sexpr_parser(expr.clone()),
lambda_parser(expr, ctx.clone()), lambda_parser(expr, ctx),
namelike_parser(ctx), namelike_parser(),
)) ))
.labelled("Clause") .labelled("Clause")
} }
@@ -99,7 +96,7 @@ pub fn clause_parser<'a>(
/// Parse an expression /// Parse an expression
pub fn xpr_parser<'a>( pub fn xpr_parser<'a>(
ctx: impl Context + 'a, ctx: impl Context + 'a,
) -> impl SimpleParser<Entry, Expr> + 'a { ) -> impl SimpleParser<Entry, Expr<VName>> + 'a {
recursive(move |expr| { recursive(move |expr| {
clause_parser(expr, ctx.clone()).map(move |(value, range)| Expr { clause_parser(expr, ctx.clone()).map(move |(value, range)| Expr {
value, value,

View File

@@ -15,20 +15,17 @@ use super::Entry;
use crate::ast::{Clause, Constant, Expr, Rule}; use crate::ast::{Clause, Constant, Expr, Rule};
use crate::representations::location::Location; use crate::representations::location::Location;
use crate::representations::sourcefile::{FileEntry, Member, Namespace}; use crate::representations::sourcefile::{FileEntry, Member, Namespace};
use crate::representations::VName;
fn rule_parser<'a>( fn rule_parser<'a>(
ctx: impl Context + 'a, ctx: impl Context + 'a,
) -> impl SimpleParser<Entry, Rule> + 'a { ) -> impl SimpleParser<Entry, Rule<VName>> + 'a {
xpr_parser(ctx.clone()) xpr_parser(ctx.clone())
.repeated() .repeated()
.at_least(1) .at_least(1)
.then(filter_map_lex(enum_filter!(Lexeme::Rule))) .then(filter_map_lex(enum_filter!(Lexeme::Rule)))
.then(xpr_parser(ctx).repeated().at_least(1)) .then(xpr_parser(ctx).repeated().at_least(1))
.map(|((p, (prio, _)), t)| Rule { .map(|((p, (prio, _)), t)| Rule { pattern: p, prio, template: t })
pattern: Rc::new(p),
prio,
template: Rc::new(t),
})
.labelled("Rule") .labelled("Rule")
} }

View File

@@ -6,12 +6,11 @@ use std::{fs, io};
use chumsky::text::Character; use chumsky::text::Character;
use rust_embed::RustEmbed; use rust_embed::RustEmbed;
use crate::interner::{Interner, Sym}; use crate::interner::Interner;
use crate::pipeline::error::{ use crate::pipeline::error::{ErrorPosition, ProjectError};
ErrorPosition, ProjectError, UnexpectedDirectory,
};
use crate::utils::iter::box_once; use crate::utils::iter::box_once;
use crate::utils::{BoxedIter, Cache}; use crate::utils::{BoxedIter, Cache};
use crate::{Stok, VName};
/// All the data available about a failed source load call /// All the data available about a failed source load call
#[derive(Debug)] #[derive(Debug)]
@@ -89,9 +88,9 @@ pub fn load_file(root: &Path, path: &[impl AsRef<str>]) -> IOResult {
} }
/// Generates a cached file loader for a directory /// Generates a cached file loader for a directory
pub fn mk_dir_cache(root: PathBuf, i: &Interner) -> Cache<Sym, IOResult> { pub fn mk_dir_cache(root: PathBuf, i: &Interner) -> Cache<VName, IOResult> {
Cache::new(move |token: Sym, _this| -> IOResult { Cache::new(move |vname: VName, _this| -> IOResult {
let path = i.r(token).iter().map(|t| i.r(*t).as_str()).collect::<Vec<_>>(); let path = vname.iter().map(|t| i.r(*t).as_str()).collect::<Vec<_>>();
load_file(&root, &path) load_file(&root, &path)
}) })
} }
@@ -128,28 +127,9 @@ pub fn load_embed<T: 'static + RustEmbed>(path: &str, ext: &str) -> IOResult {
pub fn mk_embed_cache<'a, T: 'static + RustEmbed>( pub fn mk_embed_cache<'a, T: 'static + RustEmbed>(
ext: &'a str, ext: &'a str,
i: &'a Interner, i: &'a Interner,
) -> Cache<'a, Sym, IOResult> { ) -> Cache<'a, Vec<Stok>, IOResult> {
Cache::new(move |token: Sym, _this| -> IOResult { Cache::new(move |vname: VName, _this| -> IOResult {
let path = i.extern_vec(token).join("/"); let path = i.extern_all(&vname).join("/");
load_embed::<T>(&path, ext) load_embed::<T>(&path, ext)
}) })
} }
/// Loads the string contents of a file at the given location.
/// If the path points to a directory, raises an error.
pub fn load_text(
path: Sym,
load_file: &impl Fn(Sym) -> IOResult,
i: &Interner,
) -> Result<Rc<String>, Rc<dyn ProjectError>> {
if let Loaded::Code(s) = load_file(path)? {
Ok(s)
} else {
Err(
UnexpectedDirectory {
path: i.r(path).iter().map(|t| i.r(*t)).cloned().collect(),
}
.rc(),
)
}
}

View File

@@ -2,22 +2,22 @@ use std::hash::Hash;
use hashbrown::{HashMap, HashSet}; use hashbrown::{HashMap, HashSet};
use crate::interner::Sym; use crate::interner::Tok;
#[derive(Clone, Debug, Default)] #[derive(Clone, Debug, Default)]
pub struct AliasMap { pub struct AliasMap {
pub targets: HashMap<Sym, Sym>, pub targets: HashMap<Vec<Tok<String>>, Vec<Tok<String>>>,
pub aliases: HashMap<Sym, HashSet<Sym>>, pub aliases: HashMap<Vec<Tok<String>>, HashSet<Vec<Tok<String>>>>,
} }
impl AliasMap { impl AliasMap {
pub fn new() -> Self { pub fn new() -> Self {
Self::default() Self::default()
} }
pub fn link(&mut self, alias: Sym, target: Sym) { pub fn link(&mut self, alias: Vec<Tok<String>>, target: Vec<Tok<String>>) {
let prev = self.targets.insert(alias, target); let prev = self.targets.insert(alias.clone(), target.clone());
debug_assert!(prev.is_none(), "Alias already has a target"); debug_assert!(prev.is_none(), "Alias already has a target");
multimap_entry(&mut self.aliases, &target).insert(alias); multimap_entry(&mut self.aliases, &target).insert(alias.clone());
// Remove aliases of the alias // Remove aliases of the alias
if let Some(alts) = self.aliases.remove(&alias) { if let Some(alts) = self.aliases.remove(&alias) {
for alt in alts { for alt in alts {
@@ -26,17 +26,18 @@ impl AliasMap {
self.aliases.get(&alt).map(HashSet::is_empty).unwrap_or(true), self.aliases.get(&alt).map(HashSet::is_empty).unwrap_or(true),
"Alias set of alias not empty" "Alias set of alias not empty"
); );
let alt_target = self.targets.insert(alt.clone(), target.clone());
debug_assert!( debug_assert!(
self.targets.insert(alt, target) == Some(alias), alt_target.as_ref() == Some(&alias),
"Name not target of its own alias" "Name not target of its own alias"
); );
multimap_entry(&mut self.aliases, &target).insert(alt); multimap_entry(&mut self.aliases, &alias).insert(alt);
} }
} }
} }
pub fn resolve(&self, alias: Sym) -> Option<Sym> { pub fn resolve(&self, alias: &[Tok<String>]) -> Option<&Vec<Tok<String>>> {
self.targets.get(&alias).copied() self.targets.get(alias)
} }
} }

View File

@@ -1,24 +1,22 @@
use std::rc::Rc;
use hashbrown::HashMap; use hashbrown::HashMap;
use super::alias_map::AliasMap; use super::alias_map::AliasMap;
use super::decls::{InjectedAsFn, UpdatedFn}; use super::decls::{InjectedAsFn, UpdatedFn};
use crate::ast::{Expr, Rule}; use crate::ast::{Expr, Rule};
use crate::interner::{Interner, Sym, Tok}; use crate::interner::Tok;
use crate::pipeline::{ProjectExt, ProjectModule}; use crate::pipeline::{ProjectExt, ProjectModule};
use crate::representations::tree::{ModEntry, ModMember}; use crate::representations::tree::{ModEntry, ModMember};
use crate::representations::VName;
use crate::utils::Substack; use crate::utils::Substack;
fn resolve_rec( fn resolve_rec(
token: Sym, namespace: &[Tok<String>],
alias_map: &AliasMap, alias_map: &AliasMap,
i: &Interner,
) -> Option<Vec<Tok<String>>> { ) -> Option<Vec<Tok<String>>> {
if let Some(alias) = alias_map.resolve(token) { if let Some(alias) = alias_map.resolve(namespace) {
Some(i.r(alias).clone()) Some(alias.clone())
} else if let Some((foot, body)) = i.r(token).split_last() { } else if let Some((foot, body)) = namespace.split_last() {
let mut new_beginning = resolve_rec(i.i(body), alias_map, i)?; let mut new_beginning = resolve_rec(body, alias_map)?;
new_beginning.push(*foot); new_beginning.push(*foot);
Some(new_beginning) Some(new_beginning)
} else { } else {
@@ -27,25 +25,23 @@ fn resolve_rec(
} }
fn resolve( fn resolve(
token: Sym, namespace: &[Tok<String>],
alias_map: &AliasMap, alias_map: &AliasMap,
injected_as: &impl InjectedAsFn, injected_as: &impl InjectedAsFn,
i: &Interner, ) -> Option<Vec<Tok<String>>> {
) -> Option<Sym> { injected_as(namespace).or_else(|| {
injected_as(&i.r(token)[..]).or_else(|| { let next_v = resolve_rec(namespace, alias_map)?;
let next_v = resolve_rec(token, alias_map, i)?; Some(injected_as(&next_v).unwrap_or(next_v))
Some(injected_as(&next_v).unwrap_or_else(|| i.i(&next_v)))
}) })
} }
fn process_expr( fn process_expr(
expr: &Expr, expr: &Expr<VName>,
alias_map: &AliasMap, alias_map: &AliasMap,
injected_as: &impl InjectedAsFn, injected_as: &impl InjectedAsFn,
i: &Interner, ) -> Expr<VName> {
) -> Expr {
expr expr
.map_names(&|n| resolve(n, alias_map, injected_as, i)) .map_names(&|n| resolve(n, alias_map, injected_as))
.unwrap_or_else(|| expr.clone()) .unwrap_or_else(|| expr.clone())
} }
@@ -53,32 +49,23 @@ fn process_expr(
/// Replace all aliases with the name they're originally defined as /// Replace all aliases with the name they're originally defined as
fn apply_aliases_rec( fn apply_aliases_rec(
path: Substack<Tok<String>>, path: Substack<Tok<String>>,
module: &ProjectModule, module: &ProjectModule<VName>,
alias_map: &AliasMap, alias_map: &AliasMap,
i: &Interner,
injected_as: &impl InjectedAsFn, injected_as: &impl InjectedAsFn,
updated: &impl UpdatedFn, updated: &impl UpdatedFn,
) -> ProjectModule { ) -> ProjectModule<VName> {
let items = (module.items.iter()) let items = (module.items.iter())
.map(|(name, ent)| { .map(|(name, ent)| {
let ModEntry { exported, member } = ent; let ModEntry { exported, member } = ent;
let member = match member { let member = match member {
ModMember::Item(expr) => ModMember::Item(expr) =>
ModMember::Item(process_expr(expr, alias_map, injected_as, i)), ModMember::Item(process_expr(expr, alias_map, injected_as)),
ModMember::Sub(module) => { ModMember::Sub(module) => {
let subpath = path.push(*name); let subpath = path.push(*name);
let new_mod = if !updated(&subpath.iter().rev_vec_clone()) { let new_mod = if !updated(&subpath.iter().rev_vec_clone()) {
module.clone() module.clone()
} else { } else {
let module = module.as_ref(); apply_aliases_rec(subpath, module, alias_map, injected_as, updated)
Rc::new(apply_aliases_rec(
subpath,
module,
alias_map,
i,
injected_as,
updated,
))
}; };
ModMember::Sub(new_mod) ModMember::Sub(new_mod)
}, },
@@ -91,16 +78,12 @@ fn apply_aliases_rec(
let Rule { pattern, prio, template } = rule; let Rule { pattern, prio, template } = rule;
Rule { Rule {
prio: *prio, prio: *prio,
pattern: Rc::new( pattern: (pattern.iter())
(pattern.iter()) .map(|expr| process_expr(expr, alias_map, injected_as))
.map(|expr| process_expr(expr, alias_map, injected_as, i))
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
), template: (template.iter())
template: Rc::new( .map(|expr| process_expr(expr, alias_map, injected_as))
(template.iter())
.map(|expr| process_expr(expr, alias_map, injected_as, i))
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
),
} }
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@@ -111,7 +94,7 @@ fn apply_aliases_rec(
rules, rules,
exports: (module.extra.exports.iter()) exports: (module.extra.exports.iter())
.map(|(k, v)| { .map(|(k, v)| {
(*k, resolve(*v, alias_map, injected_as, i).unwrap_or(*v)) (*k, resolve(v, alias_map, injected_as).unwrap_or(v.clone()))
}) })
.collect(), .collect(),
file: module.extra.file.clone(), file: module.extra.file.clone(),
@@ -121,18 +104,10 @@ fn apply_aliases_rec(
} }
pub fn apply_aliases( pub fn apply_aliases(
module: &ProjectModule, module: &ProjectModule<VName>,
alias_map: &AliasMap, alias_map: &AliasMap,
i: &Interner,
injected_as: &impl InjectedAsFn, injected_as: &impl InjectedAsFn,
updated: &impl UpdatedFn, updated: &impl UpdatedFn,
) -> ProjectModule { ) -> ProjectModule<VName> {
apply_aliases_rec( apply_aliases_rec(Substack::Bottom, module, alias_map, injected_as, updated)
Substack::Bottom,
module,
alias_map,
i,
injected_as,
updated,
)
} }

View File

@@ -7,21 +7,23 @@ use crate::interner::{Interner, Tok};
use crate::pipeline::error::{NotExported, ProjectError}; use crate::pipeline::error::{NotExported, ProjectError};
use crate::pipeline::project_tree::{split_path, ProjectModule, ProjectTree}; use crate::pipeline::project_tree::{split_path, ProjectModule, ProjectTree};
use crate::representations::tree::{ModMember, WalkErrorKind}; use crate::representations::tree::{ModMember, WalkErrorKind};
use crate::representations::VName;
use crate::utils::{pushed, unwrap_or, Substack}; use crate::utils::{pushed, unwrap_or, Substack};
/// Assert that a module identified by a path can see a given symbol /// Assert that a module identified by a path can see a given symbol
fn assert_visible( fn assert_visible(
source: &[Tok<String>], // must point to a file or submodule source: &[Tok<String>], // must point to a file or submodule
target: &[Tok<String>], // may point to a symbol or module of any kind target: &[Tok<String>], // may point to a symbol or module of any kind
project: &ProjectTree, project: &ProjectTree<VName>,
i: &Interner, i: &Interner,
) -> Result<(), Rc<dyn ProjectError>> { ) -> Result<(), Rc<dyn ProjectError>> {
let (tgt_item, tgt_path) = unwrap_or!(target.split_last(); return Ok(())); let (tgt_item, tgt_path) = unwrap_or!(target.split_last(); return Ok(()));
let shared_len = let shared_len =
source.iter().zip(tgt_path.iter()).take_while(|(a, b)| a == b).count(); source.iter().zip(tgt_path.iter()).take_while(|(a, b)| a == b).count();
let vis_ignored_len = usize::min(tgt_path.len(), shared_len + 1); let vis_ignored_len = usize::min(tgt_path.len(), shared_len + 1);
let private_root = let private_root = (project.0)
(project.0).walk(&tgt_path[..vis_ignored_len], false).unwrap_or_else(|e| { .walk_ref(&tgt_path[..vis_ignored_len], false)
.unwrap_or_else(|e| {
let path_slc = &tgt_path[..vis_ignored_len]; let path_slc = &tgt_path[..vis_ignored_len];
let bad_path = i.extern_all(path_slc).join("::"); let bad_path = i.extern_all(path_slc).join("::");
eprintln!( eprintln!(
@@ -32,7 +34,7 @@ fn assert_visible(
panic!("") panic!("")
}); });
let direct_parent = private_root let direct_parent = private_root
.walk(&tgt_path[vis_ignored_len..], true) .walk_ref(&tgt_path[vis_ignored_len..], true)
.map_err(|e| match e.kind { .map_err(|e| match e.kind {
WalkErrorKind::Missing => panic!("checked in parsing"), WalkErrorKind::Missing => panic!("checked in parsing"),
WalkErrorKind::Private => { WalkErrorKind::Private => {
@@ -70,8 +72,8 @@ fn assert_visible(
/// Populate target and alias maps from the module tree recursively /// Populate target and alias maps from the module tree recursively
fn collect_aliases_rec( fn collect_aliases_rec(
path: Substack<Tok<String>>, path: Substack<Tok<String>>,
module: &ProjectModule, module: &ProjectModule<VName>,
project: &ProjectTree, project: &ProjectTree<VName>,
alias_map: &mut AliasMap, alias_map: &mut AliasMap,
i: &Interner, i: &Interner,
updated: &impl UpdatedFn, updated: &impl UpdatedFn,
@@ -81,31 +83,29 @@ fn collect_aliases_rec(
if !updated(&mod_path_v) { if !updated(&mod_path_v) {
return Ok(()); return Ok(());
}; };
for (&name, &target_mod_name) in module.extra.imports_from.iter() { for (&name, target_mod_name) in module.extra.imports_from.iter() {
let target_mod_v = i.r(target_mod_name); let target_sym_v = pushed(target_mod_name, name);
let target_sym_v = pushed(target_mod_v, name);
assert_visible(&mod_path_v, &target_sym_v, project, i)?; assert_visible(&mod_path_v, &target_sym_v, project, i)?;
let sym_path_v = pushed(&mod_path_v, name); let sym_path_v = pushed(&mod_path_v, name);
let sym_path = i.i(&sym_path_v); let target_mod = (project.0.walk_ref(target_mod_name, false))
let target_mod = (project.0.walk(target_mod_v, false))
.expect("checked above in assert_visible"); .expect("checked above in assert_visible");
let target_sym = let target_sym = target_mod
*target_mod.extra.exports.get(&name).unwrap_or_else(|| { .extra
.exports
.get(&name)
.unwrap_or_else(|| {
panic!( panic!(
"error in {}, {} has no member {}", "error in {}, {} has no member {}",
i.extern_all(&mod_path_v).join("::"), i.extern_all(&mod_path_v).join("::"),
i.extern_all(target_mod_v).join("::"), i.extern_all(target_mod_name).join("::"),
i.r(name) i.r(name)
) )
}); })
alias_map.link(sym_path, target_sym); .clone();
alias_map.link(sym_path_v, target_sym);
} }
for (&name, entry) in module.items.iter() { for (&name, entry) in module.items.iter() {
let submodule = if let ModMember::Sub(s) = &entry.member { let submodule = unwrap_or!(&entry.member => ModMember::Sub; continue);
s.as_ref()
} else {
continue;
};
collect_aliases_rec( collect_aliases_rec(
path.push(name), path.push(name),
submodule, submodule,
@@ -120,8 +120,8 @@ fn collect_aliases_rec(
/// Populate target and alias maps from the module tree /// Populate target and alias maps from the module tree
pub fn collect_aliases( pub fn collect_aliases(
module: &ProjectModule, module: &ProjectModule<VName>,
project: &ProjectTree, project: &ProjectTree<VName>,
alias_map: &mut AliasMap, alias_map: &mut AliasMap,
i: &Interner, i: &Interner,
updated: &impl UpdatedFn, updated: &impl UpdatedFn,

View File

@@ -1,8 +1,8 @@
use trait_set::trait_set; use trait_set::trait_set;
use crate::interner::{Sym, Tok}; use crate::interner::Tok;
trait_set! { trait_set! {
pub trait InjectedAsFn = Fn(&[Tok<String>]) -> Option<Sym>; pub trait InjectedAsFn = Fn(&[Tok<String>]) -> Option<Vec<Tok<String>>>;
pub trait UpdatedFn = Fn(&[Tok<String>]) -> bool; pub trait UpdatedFn = Fn(&[Tok<String>]) -> bool;
} }

View File

@@ -7,18 +7,18 @@ use super::decls::{InjectedAsFn, UpdatedFn};
use crate::interner::Interner; use crate::interner::Interner;
use crate::pipeline::error::ProjectError; use crate::pipeline::error::ProjectError;
use crate::pipeline::project_tree::ProjectTree; use crate::pipeline::project_tree::ProjectTree;
use crate::representations::VName;
/// Follow import chains to locate the original name of all tokens, then /// Follow import chains to locate the original name of all tokens, then
/// replace these aliases with the original names throughout the tree /// replace these aliases with the original names throughout the tree
pub fn resolve_imports( pub fn resolve_imports(
project: ProjectTree, project: ProjectTree<VName>,
i: &Interner, i: &Interner,
injected_as: &impl InjectedAsFn, injected_as: &impl InjectedAsFn,
updated: &impl UpdatedFn, updated: &impl UpdatedFn,
) -> Result<ProjectTree, Rc<dyn ProjectError>> { ) -> Result<ProjectTree<VName>, Rc<dyn ProjectError>> {
let mut map = AliasMap::new(); let mut map = AliasMap::new();
collect_aliases(project.0.as_ref(), &project, &mut map, i, updated)?; collect_aliases(&project.0, &project, &mut map, i, updated)?;
let new_mod = let new_mod = apply_aliases(&project.0, &map, injected_as, updated);
apply_aliases(project.0.as_ref(), &map, i, injected_as, updated); Ok(ProjectTree(new_mod))
Ok(ProjectTree(Rc::new(new_mod)))
} }

View File

@@ -9,6 +9,6 @@ mod source_loader;
pub use parse_layer::parse_layer; pub use parse_layer::parse_layer;
pub use project_tree::{ pub use project_tree::{
collect_consts, collect_rules, from_const_tree, ConstTree, ProjectExt, collect_consts, collect_rules, from_const_tree, vname_to_sym_tree, ConstTree,
ProjectModule, ProjectTree, ProjectExt, ProjectModule, ProjectTree,
}; };

View File

@@ -3,8 +3,9 @@ use std::rc::Rc;
use super::error::ProjectError; use super::error::ProjectError;
use super::file_loader::IOResult; use super::file_loader::IOResult;
use super::{import_resolution, project_tree, source_loader, ProjectTree}; use super::{import_resolution, project_tree, source_loader, ProjectTree};
use crate::interner::{Interner, Sym, Tok}; use crate::interner::{Interner, Tok};
use crate::representations::sourcefile::FileEntry; use crate::representations::sourcefile::FileEntry;
use crate::representations::VName;
/// Using an IO callback, produce a project tree that includes the given /// Using an IO callback, produce a project tree that includes the given
/// target symbols or files if they're defined. /// target symbols or files if they're defined.
@@ -14,35 +15,32 @@ use crate::representations::sourcefile::FileEntry;
/// prelude which will be prepended to each individual file. Since the /// prelude which will be prepended to each individual file. Since the
/// prelude gets compiled with each file, normally it should be a glob /// prelude gets compiled with each file, normally it should be a glob
/// import pointing to a module in the environment. /// import pointing to a module in the environment.
pub fn parse_layer( pub fn parse_layer<'a>(
targets: &[Sym], targets: impl Iterator<Item = &'a [Tok<String>]>,
loader: &impl Fn(Sym) -> IOResult, loader: &impl Fn(&[Tok<String>]) -> IOResult,
environment: &ProjectTree, environment: &'a ProjectTree<VName>,
prelude: &[FileEntry], prelude: &[FileEntry],
i: &Interner, i: &Interner,
) -> Result<ProjectTree, Rc<dyn ProjectError>> { ) -> Result<ProjectTree<VName>, Rc<dyn ProjectError>> {
// A path is injected if it is walkable in the injected tree // A path is injected if it is walkable in the injected tree
let injected_as = |path: &[Tok<String>]| { let injected_as = |path: &[Tok<String>]| {
let (item, modpath) = path.split_last()?; let (item, modpath) = path.split_last()?;
let module = environment.0.walk(modpath, false).ok()?; let module = environment.0.walk_ref(modpath, false).ok()?;
let inj = module.extra.exports.get(item).copied()?; module.extra.exports.get(item).cloned()
Some(inj)
}; };
let injected_names = |path: Tok<Vec<Tok<String>>>| { let injected_names = |path: Tok<Vec<Tok<String>>>| {
let module = environment.0.walk(&i.r(path)[..], false).ok()?; let module = environment.0.walk_ref(&i.r(path)[..], false).ok()?;
Some(Rc::new(module.extra.exports.keys().copied().collect())) Some(Rc::new(module.extra.exports.keys().copied().collect()))
}; };
let source = let source =
source_loader::load_source(targets, prelude, i, loader, &|path| { source_loader::load_source(targets, prelude, i, loader, &|path| {
environment.0.walk(&i.r(path)[..], false).is_ok() environment.0.walk_ref(path, false).is_ok()
})?; })?;
let tree = project_tree::build_tree(source, i, prelude, &injected_names)?; let tree = project_tree::build_tree(source, i, prelude, &injected_names)?;
let sum = ProjectTree(Rc::new( let sum = ProjectTree(environment.0.clone().overlay(tree.0.clone()));
environment.0.as_ref().clone().overlay(tree.0.as_ref().clone()),
));
let resolvd = let resolvd =
import_resolution::resolve_imports(sum, i, &injected_as, &|path| { import_resolution::resolve_imports(sum, i, &injected_as, &|path| {
tree.0.walk(path, false).is_ok() tree.0.walk_ref(path, false).is_ok()
})?; })?;
// Addition among modules favours the left hand side. // Addition among modules favours the left hand side.
Ok(resolvd) Ok(resolvd)

View File

@@ -12,8 +12,9 @@ use crate::pipeline::error::ProjectError;
use crate::pipeline::source_loader::{LoadedSource, LoadedSourceTable}; use crate::pipeline::source_loader::{LoadedSource, LoadedSourceTable};
use crate::representations::sourcefile::{absolute_path, FileEntry, Member}; use crate::representations::sourcefile::{absolute_path, FileEntry, Member};
use crate::representations::tree::{ModEntry, ModMember, Module}; use crate::representations::tree::{ModEntry, ModMember, Module};
use crate::representations::{NameLike, VName};
use crate::utils::iter::{box_empty, box_once}; use crate::utils::iter::{box_empty, box_once};
use crate::utils::{pushed, Substack}; use crate::utils::{pushed, unwrap_or, Substack};
#[derive(Debug)] #[derive(Debug)]
struct ParsedSource<'a> { struct ParsedSource<'a> {
@@ -24,7 +25,7 @@ struct ParsedSource<'a> {
pub fn split_path<'a>( pub fn split_path<'a>(
path: &'a [Tok<String>], path: &'a [Tok<String>],
proj: &'a ProjectTree, proj: &'a ProjectTree<impl NameLike>,
) -> (&'a [Tok<String>], &'a [Tok<String>]) { ) -> (&'a [Tok<String>], &'a [Tok<String>]) {
let (end, body) = if let Some(s) = path.split_last() { let (end, body) = if let Some(s) = path.split_last() {
s s
@@ -32,9 +33,9 @@ pub fn split_path<'a>(
return (&[], &[]); return (&[], &[]);
}; };
let mut module = let mut module =
proj.0.walk(body, false).expect("invalid path cannot be split"); proj.0.walk_ref(body, false).expect("invalid path cannot be split");
if let ModMember::Sub(m) = &module.items[end].member { if let ModMember::Sub(m) = &module.items[end].member {
module = m.clone(); module = m;
} }
let file = let file =
module.extra.file.as_ref().map(|s| &path[..s.len()]).unwrap_or(path); module.extra.file.as_ref().map(|s| &path[..s.len()]).unwrap_or(path);
@@ -52,7 +53,7 @@ fn source_to_module(
// context // context
i: &Interner, i: &Interner,
filepath_len: usize, filepath_len: usize,
) -> Rc<Module<Expr, ProjectExt>> { ) -> Module<Expr<VName>, ProjectExt<VName>> {
let path_v = path.iter().rev_vec_clone(); let path_v = path.iter().rev_vec_clone();
let imports = data let imports = data
.iter() .iter()
@@ -70,13 +71,13 @@ fn source_to_module(
let mut abs_path = let mut abs_path =
absolute_path(&path_v, &imp_path_v, i).expect("tested in preparsing"); absolute_path(&path_v, &imp_path_v, i).expect("tested in preparsing");
let name = abs_path.pop().expect("importing the global context"); let name = abs_path.pop().expect("importing the global context");
(name, i.i(&abs_path)) (name, abs_path)
}) })
.collect::<HashMap<_, _>>(); .collect::<HashMap<_, _>>();
let exports = data let exports = data
.iter() .iter()
.flat_map(|ent| { .flat_map(|ent| {
let mk_ent = |name| (name, i.i(&pushed(&path_v, name))); let mk_ent = |name| (name, pushed(&path_v, name));
match ent { match ent {
FileEntry::Export(names) => Box::new(names.iter().copied().map(mk_ent)), FileEntry::Export(names) => Box::new(names.iter().copied().map(mk_ent)),
FileEntry::Exported(mem) => match mem { FileEntry::Exported(mem) => match mem {
@@ -86,8 +87,8 @@ fn source_to_module(
let mut names = Vec::new(); let mut names = Vec::new();
for e in rule.pattern.iter() { for e in rule.pattern.iter() {
e.visit_names(Substack::Bottom, &mut |n| { e.visit_names(Substack::Bottom, &mut |n| {
if let Some([name]) = i.r(n).strip_prefix(&path_v[..]) { if let Some([name]) = n.strip_prefix(&path_v[..]) {
names.push((*name, n)) names.push((*name, n.clone()))
} }
}) })
} }
@@ -109,14 +110,13 @@ fn source_to_module(
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let items = data let items = data
.into_iter() .into_iter()
.filter_map(|ent| match ent { .filter_map(|ent| {
FileEntry::Exported(Member::Namespace(ns)) => { let member_to_item = |exported, member| match member {
let prep_member = &preparsed.items[&ns.name].member; Member::Namespace(ns) => {
let new_prep = if let ModMember::Sub(s) = prep_member { let new_prep = unwrap_or!(
s.as_ref() &preparsed.items[&ns.name].member => ModMember::Sub;
} else {
panic!("preparsed missing a submodule") panic!("preparsed missing a submodule")
}; );
let module = source_to_module( let module = source_to_module(
path.push(ns.name), path.push(ns.name),
new_prep, new_prep,
@@ -125,42 +125,22 @@ fn source_to_module(
filepath_len, filepath_len,
); );
let member = ModMember::Sub(module); let member = ModMember::Sub(module);
Some((ns.name, ModEntry { exported: true, member })) Some((ns.name, ModEntry { exported, member }))
}, },
FileEntry::Internal(Member::Namespace(ns)) => { Member::Constant(Constant { name, value }) => {
let prep_member = &preparsed.items[&ns.name].member;
let new_prep = if let ModMember::Sub(s) = prep_member {
s.as_ref()
} else {
panic!("preparsed missing a submodule")
};
let module = source_to_module(
path.push(ns.name),
new_prep,
ns.body,
i,
filepath_len,
);
let member = ModMember::Sub(module);
Some((ns.name, ModEntry { exported: false, member }))
},
FileEntry::Exported(Member::Constant(Constant { name, value })) => {
let member = ModMember::Item(value); let member = ModMember::Item(value);
Some((name, ModEntry { exported: true, member })) Some((name, ModEntry { exported, member }))
},
FileEntry::Internal(Member::Constant(Constant { name, value })) => {
let member = ModMember::Item(value);
Some((name, ModEntry { exported: false, member }))
}, },
_ => None, _ => None,
};
match ent {
FileEntry::Exported(member) => member_to_item(true, member),
FileEntry::Internal(member) => member_to_item(false, member),
_ => None,
}
}) })
.collect::<HashMap<_, _>>(); .collect::<HashMap<_, _>>();
// println!( Module {
// "Constructing file-module {} with members ({})",
// i.extern_all(&path_v[..]).join("::"),
// exports.keys().map(|t| i.r(*t)).join(", ")
// );
Rc::new(Module {
imports, imports,
items, items,
extra: ProjectExt { extra: ProjectExt {
@@ -169,14 +149,14 @@ fn source_to_module(
rules, rules,
file: Some(path_v[..filepath_len].to_vec()), file: Some(path_v[..filepath_len].to_vec()),
}, },
}) }
} }
fn files_to_module( fn files_to_module(
path: Substack<Tok<String>>, path: Substack<Tok<String>>,
files: Vec<ParsedSource>, files: Vec<ParsedSource>,
i: &Interner, i: &Interner,
) -> Rc<Module<Expr, ProjectExt>> { ) -> Module<Expr<VName>, ProjectExt<VName>> {
let lvl = path.len(); let lvl = path.len();
debug_assert!( debug_assert!(
files.iter().map(|f| f.path.len()).max().unwrap() >= lvl, files.iter().map(|f| f.path.len()).max().unwrap() >= lvl,
@@ -186,7 +166,7 @@ fn files_to_module(
if files.len() == 1 && files[0].path.len() == lvl { if files.len() == 1 && files[0].path.len() == lvl {
return source_to_module( return source_to_module(
path, path,
files[0].loaded.preparsed.0.as_ref(), &files[0].loaded.preparsed.0,
files[0].parsed.clone(), files[0].parsed.clone(),
i, i,
path.len(), path.len(),
@@ -204,12 +184,9 @@ fn files_to_module(
(namespace, ModEntry { exported: true, member }) (namespace, ModEntry { exported: true, member })
}) })
.collect::<HashMap<_, _>>(); .collect::<HashMap<_, _>>();
let exports: HashMap<_, _> = items let exports: HashMap<_, _> =
.keys() items.keys().copied().map(|name| (name, pushed(&path_v, name))).collect();
.copied() Module {
.map(|name| (name, i.i(&pushed(&path_v, name))))
.collect();
Rc::new(Module {
items, items,
imports: vec![], imports: vec![],
extra: ProjectExt { extra: ProjectExt {
@@ -218,7 +195,7 @@ fn files_to_module(
rules: vec![], rules: vec![],
file: None, file: None,
}, },
}) }
} }
pub fn build_tree( pub fn build_tree(
@@ -226,17 +203,13 @@ pub fn build_tree(
i: &Interner, i: &Interner,
prelude: &[FileEntry], prelude: &[FileEntry],
injected: &impl InjectedOperatorsFn, injected: &impl InjectedOperatorsFn,
) -> Result<ProjectTree, Rc<dyn ProjectError>> { ) -> Result<ProjectTree<VName>, Rc<dyn ProjectError>> {
assert!(!files.is_empty(), "A tree requires at least one module"); assert!(!files.is_empty(), "A tree requires at least one module");
let ops_cache = collect_ops::mk_cache(&files, i, injected); let ops_cache = collect_ops::mk_cache(&files, i, injected);
let mut entries = files let mut entries = files
.iter() .iter()
.map(|(path, loaded)| { .map(|(path, loaded)| {
Ok(( Ok((path, loaded, parse_file(path, &files, &ops_cache, i, prelude)?))
i.r(*path),
loaded,
parse_file(*path, &files, &ops_cache, i, prelude)?,
))
}) })
.collect::<Result<Vec<_>, Rc<dyn ProjectError>>>()?; .collect::<Result<Vec<_>, Rc<dyn ProjectError>>>()?;
// sort by similarity, then longest-first // sort by similarity, then longest-first

View File

@@ -3,11 +3,12 @@ use std::rc::Rc;
use hashbrown::HashSet; use hashbrown::HashSet;
use trait_set::trait_set; use trait_set::trait_set;
use crate::interner::{Interner, Sym, Tok}; use crate::interner::{Interner, Tok};
use crate::pipeline::error::{ModuleNotFound, ProjectError}; use crate::pipeline::error::{ModuleNotFound, ProjectError};
use crate::pipeline::source_loader::LoadedSourceTable; use crate::pipeline::source_loader::LoadedSourceTable;
use crate::representations::tree::WalkErrorKind; use crate::representations::tree::WalkErrorKind;
use crate::utils::{split_max_prefix, unwrap_or, Cache}; use crate::utils::{split_max_prefix, unwrap_or, Cache};
use crate::Sym;
pub type OpsResult = Result<Rc<HashSet<Tok<String>>>, Rc<dyn ProjectError>>; pub type OpsResult = Result<Rc<HashSet<Tok<String>>>, Rc<dyn ProjectError>>;
pub type ExportedOpsCache<'a> = Cache<'a, Sym, OpsResult>; pub type ExportedOpsCache<'a> = Cache<'a, Sym, OpsResult>;
@@ -31,16 +32,13 @@ pub fn collect_exported_ops(
injected: &impl InjectedOperatorsFn, injected: &impl InjectedOperatorsFn,
) -> OpsResult { ) -> OpsResult {
let injected = injected(path).unwrap_or_else(|| Rc::new(HashSet::new())); let injected = injected(path).unwrap_or_else(|| Rc::new(HashSet::new()));
let is_file = |n: &[Tok<String>]| loaded.contains_key(&i.i(n));
let path_s = &i.r(path)[..]; let path_s = &i.r(path)[..];
let name_split = split_max_prefix(path_s, &is_file); let name_split = split_max_prefix(path_s, &|n| loaded.contains_key(n));
let (fpath_v, subpath_v) = unwrap_or!(name_split; return Ok(Rc::new( let (fpath, subpath) = unwrap_or!(name_split; return Ok(Rc::new(
(loaded.keys()) (loaded.keys())
.copied()
.filter_map(|modname| { .filter_map(|modname| {
let modname_s = i.r(modname); if path_s.len() == coprefix(path_s.iter(), modname.iter()) {
if path_s.len() == coprefix(path_s.iter(), modname_s.iter()) { Some(modname[path_s.len()])
Some(modname_s[path_s.len()])
} else { } else {
None None
} }
@@ -48,24 +46,24 @@ pub fn collect_exported_ops(
.chain(injected.iter().copied()) .chain(injected.iter().copied())
.collect::<HashSet<_>>(), .collect::<HashSet<_>>(),
))); )));
let fpath = i.i(fpath_v); let preparsed = &loaded[fpath].preparsed;
let preparsed = &loaded[&fpath].preparsed; let module =
let module = preparsed.0.walk(subpath_v, false).map_err(|walk_err| { preparsed.0.walk_ref(subpath, false).map_err(
match walk_err.kind { |walk_err| match walk_err.kind {
WalkErrorKind::Private => { WalkErrorKind::Private => {
unreachable!("visibility is not being checked here") unreachable!("visibility is not being checked here")
}, },
WalkErrorKind::Missing => ModuleNotFound { WalkErrorKind::Missing => ModuleNotFound {
file: i.extern_vec(fpath), file: i.extern_all(fpath),
subpath: (subpath_v.iter()) subpath: (subpath.iter())
.take(walk_err.pos) .take(walk_err.pos)
.map(|t| i.r(*t)) .map(|t| i.r(*t))
.cloned() .cloned()
.collect(), .collect(),
} }
.rc(), .rc(),
} },
})?; )?;
let out = (module.items.iter()) let out = (module.items.iter())
.filter(|(_, v)| v.exported) .filter(|(_, v)| v.exported)
.map(|(k, _)| *k) .map(|(k, _)| *k)

View File

@@ -19,7 +19,7 @@ fn tree_all_ops(
ops.extend(module.items.keys().copied()); ops.extend(module.items.keys().copied());
for ent in module.items.values() { for ent in module.items.values() {
if let ModMember::Sub(m) = &ent.member { if let ModMember::Sub(m) = &ent.member {
tree_all_ops(m.as_ref(), ops); tree_all_ops(m, ops);
} }
} }
} }
@@ -31,9 +31,9 @@ pub fn collect_ops_for(
ops_cache: &ExportedOpsCache, ops_cache: &ExportedOpsCache,
i: &Interner, i: &Interner,
) -> OpsResult { ) -> OpsResult {
let tree = &loaded[&i.i(file)].preparsed.0; let tree = &loaded[file].preparsed.0;
let mut ret = HashSet::new(); let mut ret = HashSet::new();
tree_all_ops(tree.as_ref(), &mut ret); tree_all_ops(tree, &mut ret);
tree.visit_all_imports(&mut |modpath, _module, import| { tree.visit_all_imports(&mut |modpath, _module, import| {
if let Some(n) = import.name { if let Some(n) = import.name {
ret.insert(n); ret.insert(n);

View File

@@ -1,15 +1,14 @@
use std::ops::Add; use std::ops::Add;
use std::rc::Rc;
use hashbrown::HashMap; use hashbrown::HashMap;
use super::{ProjectExt, ProjectModule, ProjectTree}; use super::{ProjectExt, ProjectModule, ProjectTree};
use crate::ast::{Clause, Expr}; use crate::ast::{Clause, Expr};
use crate::foreign::{Atom, Atomic, ExternFn}; use crate::foreign::{Atom, Atomic, ExternFn};
use crate::interner::{Interner, Tok}; use crate::interner::Tok;
use crate::representations::location::Location; use crate::representations::location::Location;
use crate::representations::tree::{ModEntry, ModMember, Module}; use crate::representations::tree::{ModEntry, ModMember, Module};
use crate::representations::Primitive; use crate::representations::{Primitive, VName};
use crate::utils::{pushed, Substack}; use crate::utils::{pushed, Substack};
/// A lightweight module tree that can be built declaratively by hand to /// A lightweight module tree that can be built declaratively by hand to
@@ -17,7 +16,7 @@ use crate::utils::{pushed, Substack};
/// added convenience /// added convenience
pub enum ConstTree { pub enum ConstTree {
/// A function or constant /// A function or constant
Const(Expr), Const(Expr<VName>),
/// A submodule /// A submodule
Tree(HashMap<Tok<String>, ConstTree>), Tree(HashMap<Tok<String>, ConstTree>),
} }
@@ -67,8 +66,7 @@ fn from_const_tree_rec(
path: Substack<Tok<String>>, path: Substack<Tok<String>>,
consts: HashMap<Tok<String>, ConstTree>, consts: HashMap<Tok<String>, ConstTree>,
file: &[Tok<String>], file: &[Tok<String>],
i: &Interner, ) -> ProjectModule<VName> {
) -> ProjectModule {
let mut items = HashMap::new(); let mut items = HashMap::new();
let path_v = path.iter().rev_vec_clone(); let path_v = path.iter().rev_vec_clone();
for (name, item) in consts { for (name, item) in consts {
@@ -76,17 +74,13 @@ fn from_const_tree_rec(
exported: true, exported: true,
member: match item { member: match item {
ConstTree::Const(c) => ModMember::Item(c), ConstTree::Const(c) => ModMember::Item(c),
ConstTree::Tree(t) => ModMember::Sub(Rc::new(from_const_tree_rec( ConstTree::Tree(t) =>
path.push(name), ModMember::Sub(from_const_tree_rec(path.push(name), t, file)),
t,
file,
i,
))),
}, },
}); });
} }
let exports = let exports =
items.keys().map(|name| (*name, i.i(&pushed(&path_v, *name)))).collect(); items.keys().map(|name| (*name, pushed(&path_v, *name))).collect();
Module { Module {
items, items,
imports: vec![], imports: vec![],
@@ -103,8 +97,7 @@ fn from_const_tree_rec(
pub fn from_const_tree( pub fn from_const_tree(
consts: HashMap<Tok<String>, ConstTree>, consts: HashMap<Tok<String>, ConstTree>,
file: &[Tok<String>], file: &[Tok<String>],
i: &Interner, ) -> ProjectTree<VName> {
) -> ProjectTree { let module = from_const_tree_rec(Substack::Bottom, consts, file);
let module = from_const_tree_rec(Substack::Bottom, consts, file, i); ProjectTree(module)
ProjectTree(Rc::new(module))
} }

View File

@@ -26,5 +26,6 @@ pub use build_tree::{build_tree, split_path};
pub use collect_ops::InjectedOperatorsFn; pub use collect_ops::InjectedOperatorsFn;
pub use const_tree::{from_const_tree, ConstTree}; pub use const_tree::{from_const_tree, ConstTree};
pub use tree::{ pub use tree::{
collect_consts, collect_rules, ProjectExt, ProjectModule, ProjectTree, collect_consts, collect_rules, vname_to_sym_tree, ProjectExt, ProjectModule,
ProjectTree,
}; };

View File

@@ -6,7 +6,7 @@ use crate::representations::sourcefile::{
}; };
use crate::representations::tree::{ModMember, Module}; use crate::representations::tree::{ModMember, Module};
use crate::utils::iter::box_once; use crate::utils::iter::box_once;
use crate::utils::{BoxedIter, Substack}; use crate::utils::{unwrap_or, BoxedIter, Substack};
fn member_rec( fn member_rec(
// level // level
@@ -21,20 +21,12 @@ fn member_rec(
) -> Member { ) -> Member {
match member { match member {
Member::Namespace(Namespace { name, body }) => { Member::Namespace(Namespace { name, body }) => {
let prepmember = &preparsed.items[&name].member; let subprep = unwrap_or!(
let subprep = if let ModMember::Sub(m) = prepmember { &preparsed.items[&name].member => ModMember::Sub;
m.clone()
} else {
unreachable!("This name must point to a namespace") unreachable!("This name must point to a namespace")
};
let new_body = entv_rec(
mod_stack.push(name),
subprep.as_ref(),
body,
path,
ops_cache,
i,
); );
let new_body =
entv_rec(mod_stack.push(name), subprep, body, path, ops_cache, i);
Member::Namespace(Namespace { name, body: new_body }) Member::Namespace(Namespace { name, body: new_body })
}, },
any => any, any => any,

View File

@@ -4,40 +4,35 @@ use super::add_prelude::add_prelude;
use super::collect_ops::{collect_ops_for, ExportedOpsCache}; use super::collect_ops::{collect_ops_for, ExportedOpsCache};
use super::normalize_imports::normalize_imports; use super::normalize_imports::normalize_imports;
use super::prefix::prefix; use super::prefix::prefix;
use crate::interner::{Interner, Sym}; use crate::interner::{Interner, Tok};
use crate::parse; use crate::parse;
use crate::pipeline::error::ProjectError; use crate::pipeline::error::ProjectError;
use crate::pipeline::source_loader::LoadedSourceTable; use crate::pipeline::source_loader::LoadedSourceTable;
use crate::representations::sourcefile::{normalize_namespaces, FileEntry}; use crate::representations::sourcefile::{normalize_namespaces, FileEntry};
pub fn parse_file( pub fn parse_file(
path: Sym, path: &[Tok<String>],
loaded: &LoadedSourceTable, loaded: &LoadedSourceTable,
ops_cache: &ExportedOpsCache, ops_cache: &ExportedOpsCache,
i: &Interner, i: &Interner,
prelude: &[FileEntry], prelude: &[FileEntry],
) -> Result<Vec<FileEntry>, Rc<dyn ProjectError>> { ) -> Result<Vec<FileEntry>, Rc<dyn ProjectError>> {
let ld = &loaded[&path]; let ld = &loaded[path];
// let ops_cache = collect_ops::mk_cache(loaded, i); // let ops_cache = collect_ops::mk_cache(loaded, i);
let ops = collect_ops_for(&i.r(path)[..], loaded, ops_cache, i)?; let ops = collect_ops_for(path, loaded, ops_cache, i)?;
let ops_vec = ops.iter().map(|t| i.r(*t)).cloned().collect::<Vec<_>>(); let ops_vec = ops.iter().map(|t| i.r(*t)).cloned().collect::<Vec<_>>();
let ctx = parse::ParsingContext { let ctx = parse::ParsingContext {
interner: i, interner: i,
ops: &ops_vec, ops: &ops_vec,
file: Rc::new(i.extern_vec(path)), file: Rc::new(i.extern_all(path)),
}; };
let entries = parse::parse(ld.text.as_str(), ctx) let entries = parse::parse(ld.text.as_str(), ctx)
.expect("This error should have been caught during loading"); .expect("This error should have been caught during loading");
let with_prelude = add_prelude(entries, &i.r(path)[..], prelude); let with_prelude = add_prelude(entries, path, prelude);
let impnormalized = normalize_imports( let impnormalized =
&ld.preparsed.0, normalize_imports(&ld.preparsed.0, with_prelude, path, ops_cache, i);
with_prelude,
&i.r(path)[..],
ops_cache,
i,
);
let nsnormalized = normalize_namespaces(Box::new(impnormalized.into_iter())) let nsnormalized = normalize_namespaces(Box::new(impnormalized.into_iter()))
.expect("This error should have been caught during preparsing"); .expect("This error should have been caught during preparsing");
let prefixed = prefix(nsnormalized, &i.r(path)[..], ops_cache, i); let prefixed = prefix(nsnormalized, path, ops_cache, i);
Ok(prefixed) Ok(prefixed)
} }

View File

@@ -1,5 +1,3 @@
use std::rc::Rc;
use super::collect_ops::ExportedOpsCache; use super::collect_ops::ExportedOpsCache;
use crate::ast::{Constant, Rule}; use crate::ast::{Constant, Rule};
use crate::interner::{Interner, Tok}; use crate::interner::{Interner, Tok};
@@ -16,14 +14,10 @@ fn member_rec(
ops_cache: &ExportedOpsCache, ops_cache: &ExportedOpsCache,
i: &Interner, i: &Interner,
) -> Member { ) -> Member {
// let except = |op| imported.contains(&op); let prefix = (path.iter())
let except = |_| false;
let prefix_v = path
.iter()
.copied() .copied()
.chain(mod_stack.iter().rev_vec_clone().into_iter()) .chain(mod_stack.iter().rev_vec_clone().into_iter())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let prefix = i.i(&prefix_v);
match data { match data {
Member::Namespace(Namespace { name, body }) => { Member::Namespace(Namespace { name, body }) => {
let new_body = entv_rec(mod_stack.push(name), body, path, ops_cache, i); let new_body = entv_rec(mod_stack.push(name), body, path, ops_cache, i);
@@ -31,16 +25,16 @@ fn member_rec(
}, },
Member::Constant(constant) => Member::Constant(Constant { Member::Constant(constant) => Member::Constant(Constant {
name: constant.name, name: constant.name,
value: constant.value.prefix(prefix, i, &except), value: constant.value.prefix(&prefix, &|_| false),
}), }),
Member::Rule(rule) => Member::Rule(Rule { Member::Rule(rule) => Member::Rule(Rule {
prio: rule.prio, prio: rule.prio,
pattern: Rc::new( pattern: (rule.pattern.into_iter())
rule.pattern.iter().map(|e| e.prefix(prefix, i, &except)).collect(), .map(|e| e.prefix(&prefix, &|_| false))
), .collect(),
template: Rc::new( template: (rule.template.into_iter())
rule.template.iter().map(|e| e.prefix(prefix, i, &except)).collect(), .map(|e| e.prefix(&prefix, &|_| false))
), .collect(),
}), }),
} }
} }

View File

@@ -1,28 +1,30 @@
use std::ops::Add; use std::ops::Add;
use std::rc::Rc;
use hashbrown::HashMap; use hashbrown::HashMap;
use crate::ast::{Expr, Rule}; use crate::ast::{Expr, Rule};
use crate::interner::{Interner, Sym, Tok}; use crate::interner::{Interner, Tok};
use crate::representations::tree::{ModMember, Module}; use crate::representations::tree::{ModMember, Module};
use crate::representations::NameLike;
use crate::tree::ModEntry;
use crate::utils::Substack; use crate::utils::Substack;
use crate::{Sym, VName};
/// Additional data about a loaded module beyond the list of constants and /// Additional data about a loaded module beyond the list of constants and
/// submodules /// submodules
#[derive(Clone, Debug, Default)] #[derive(Clone, Debug, Default)]
pub struct ProjectExt { pub struct ProjectExt<N: NameLike> {
/// Pairs each foreign token to the module it was imported from /// Pairs each foreign token to the module it was imported from
pub imports_from: HashMap<Tok<String>, Sym>, pub imports_from: HashMap<Tok<String>, N>,
/// Pairs each exported token to its original full name /// Pairs each exported token to its original full name
pub exports: HashMap<Tok<String>, Sym>, pub exports: HashMap<Tok<String>, N>,
/// All rules defined in this module, exported or not /// All rules defined in this module, exported or not
pub rules: Vec<Rule>, pub rules: Vec<Rule<N>>,
/// Filename, if known, for error reporting /// Filename, if known, for error reporting
pub file: Option<Vec<Tok<String>>>, pub file: Option<Vec<Tok<String>>>,
} }
impl Add for ProjectExt { impl<N: NameLike> Add for ProjectExt<N> {
type Output = Self; type Output = Self;
fn add(mut self, rhs: Self) -> Self::Output { fn add(mut self, rhs: Self) -> Self::Output {
@@ -38,33 +40,36 @@ impl Add for ProjectExt {
} }
/// A node in the tree describing the project /// A node in the tree describing the project
pub type ProjectModule = Module<Expr, ProjectExt>; pub type ProjectModule<N> = Module<Expr<N>, ProjectExt<N>>;
/// Module corresponding to the root of a project /// Module corresponding to the root of a project
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ProjectTree(pub Rc<ProjectModule>); pub struct ProjectTree<N: NameLike>(pub ProjectModule<N>);
fn collect_rules_rec(bag: &mut Vec<Rule>, module: &ProjectModule) { fn collect_rules_rec<N: NameLike>(
bag: &mut Vec<Rule<N>>,
module: &ProjectModule<N>,
) {
bag.extend(module.extra.rules.iter().cloned()); bag.extend(module.extra.rules.iter().cloned());
for item in module.items.values() { for item in module.items.values() {
if let ModMember::Sub(module) = &item.member { if let ModMember::Sub(module) = &item.member {
collect_rules_rec(bag, module.as_ref()); collect_rules_rec(bag, module);
} }
} }
} }
/// Collect the complete list of rules to be used by the rule repository from /// Collect the complete list of rules to be used by the rule repository from
/// the [ProjectTree] /// the [ProjectTree]
pub fn collect_rules(project: &ProjectTree) -> Vec<Rule> { pub fn collect_rules<N: NameLike>(project: &ProjectTree<N>) -> Vec<Rule<N>> {
let mut rules = Vec::new(); let mut rules = Vec::new();
collect_rules_rec(&mut rules, project.0.as_ref()); collect_rules_rec(&mut rules, &project.0);
rules rules
} }
fn collect_consts_rec( fn collect_consts_rec<N: NameLike>(
path: Substack<Tok<String>>, path: Substack<Tok<String>>,
bag: &mut HashMap<Sym, Expr>, bag: &mut HashMap<Sym, Expr<N>>,
module: &ProjectModule, module: &ProjectModule<N>,
i: &Interner, i: &Interner,
) { ) {
for (key, entry) in module.items.iter() { for (key, entry) in module.items.iter() {
@@ -81,11 +86,59 @@ fn collect_consts_rec(
} }
/// Extract the symbol table from a [ProjectTree] /// Extract the symbol table from a [ProjectTree]
pub fn collect_consts( pub fn collect_consts<N: NameLike>(
project: &ProjectTree, project: &ProjectTree<N>,
i: &Interner, i: &Interner,
) -> HashMap<Sym, Expr> { ) -> HashMap<Sym, Expr<N>> {
let mut consts = HashMap::new(); let mut consts = HashMap::new();
collect_consts_rec(Substack::Bottom, &mut consts, project.0.as_ref(), i); collect_consts_rec(Substack::Bottom, &mut consts, &project.0, i);
consts consts
} }
fn vname_to_sym_tree_rec(
tree: ProjectModule<VName>,
i: &Interner,
) -> ProjectModule<Sym> {
let process_expr = |ex: Expr<VName>| ex.transform_names(&|n| i.i(&n));
ProjectModule {
imports: tree.imports,
items: (tree.items.into_iter())
.map(|(k, ModEntry { exported, member })| {
(k, ModEntry {
exported,
member: match member {
ModMember::Sub(module) =>
ModMember::Sub(vname_to_sym_tree_rec(module, i)),
ModMember::Item(ex) => ModMember::Item(process_expr(ex)),
},
})
})
.collect(),
extra: ProjectExt {
imports_from: (tree.extra.imports_from.into_iter())
.map(|(k, v)| (k, i.i(&v)))
.collect(),
exports: (tree.extra.exports.into_iter())
.map(|(k, v)| (k, i.i(&v)))
.collect(),
rules: (tree.extra.rules.into_iter())
.map(|Rule { pattern, prio, template }| Rule {
pattern: pattern.into_iter().map(process_expr).collect(),
prio,
template: template.into_iter().map(process_expr).collect(),
})
.collect(),
file: tree.extra.file,
},
}
}
/// Convert a flexible vname-based tree to a more rigid but faster symbol-based
/// tree. The pipeline works with vnames, but the macro executor works with
/// symbols.
pub fn vname_to_sym_tree(
tree: ProjectTree<VName>,
i: &Interner,
) -> ProjectTree<Sym> {
ProjectTree(vname_to_sym_tree_rec(tree.0, i))
}

View File

@@ -3,22 +3,22 @@ use std::rc::Rc;
use super::loaded_source::{LoadedSource, LoadedSourceTable}; use super::loaded_source::{LoadedSource, LoadedSourceTable};
use super::preparse::preparse; use super::preparse::preparse;
use crate::interner::{Interner, Sym}; use crate::interner::{Interner, Tok};
use crate::pipeline::error::ProjectError; use crate::pipeline::error::{ProjectError, UnexpectedDirectory};
use crate::pipeline::file_loader::{load_text, IOResult, Loaded}; use crate::pipeline::file_loader::{IOResult, Loaded};
use crate::pipeline::import_abs_path::import_abs_path; use crate::pipeline::import_abs_path::import_abs_path;
use crate::representations::sourcefile::FileEntry; use crate::representations::sourcefile::FileEntry;
use crate::utils::split_max_prefix; use crate::utils::{split_max_prefix, unwrap_or};
/// Load the source at the given path or all within if it's a collection, /// Load the source at the given path or all within if it's a collection,
/// and all sources imported from these. /// and all sources imported from these.
fn load_abs_path_rec( fn load_abs_path_rec(
abs_path: Sym, abs_path: &[Tok<String>],
table: &mut LoadedSourceTable, table: &mut LoadedSourceTable,
prelude: &[FileEntry], prelude: &[FileEntry],
i: &Interner, i: &Interner,
get_source: &impl Fn(Sym) -> IOResult, get_source: &impl Fn(&[Tok<String>]) -> IOResult,
is_injected_module: &impl Fn(Sym) -> bool, is_injected_module: &impl Fn(&[Tok<String>]) -> bool,
) -> Result<(), Rc<dyn ProjectError>> { ) -> Result<(), Rc<dyn ProjectError>> {
// # Termination // # Termination
// //
@@ -29,23 +29,26 @@ fn load_abs_path_rec(
// contains no cycles. // contains no cycles.
// Termination: exit if entry already visited // Termination: exit if entry already visited
if table.contains_key(&abs_path) { if table.contains_key(abs_path) {
return Ok(()); return Ok(());
} }
// try splitting the path to file, swallowing any IO errors // try splitting the path to file, swallowing any IO errors
let is_file = |sym| get_source(sym).map(|l| l.is_code()).unwrap_or(false); let name_split = split_max_prefix(abs_path, &|p| {
let name_split = split_max_prefix(&i.r(abs_path)[..], &|p| is_file(i.i(p))); get_source(p).map(|l| l.is_code()).unwrap_or(false)
});
if let Some((filename, _)) = name_split { if let Some((filename, _)) = name_split {
// if the filename is valid, load, preparse and record this file // if the filename is valid, load, preparse and record this file
let text = load_text(i.i(filename), &get_source, i)?; let text = unwrap_or!(get_source(filename)? => Loaded::Code; {
return Err(UnexpectedDirectory { path: i.extern_all(filename) }.rc())
});
let preparsed = preparse( let preparsed = preparse(
filename.iter().map(|t| i.r(*t)).cloned().collect(), filename.iter().map(|t| i.r(*t)).cloned().collect(),
text.as_str(), text.as_str(),
prelude, prelude,
i, i,
)?; )?;
table.insert(i.i(filename), LoadedSource { table.insert(filename.to_vec(), LoadedSource {
text, text,
preparsed: preparsed.clone(), preparsed: preparsed.clone(),
}); });
@@ -55,7 +58,7 @@ fn load_abs_path_rec(
import_abs_path(filename, modpath, &import.nonglob_path(i), i)?; import_abs_path(filename, modpath, &import.nonglob_path(i), i)?;
// recurse on imported module // recurse on imported module
load_abs_path_rec( load_abs_path_rec(
i.i(&abs_pathv), &abs_pathv,
table, table,
prelude, prelude,
i, i,
@@ -70,20 +73,20 @@ fn load_abs_path_rec(
Ok(Loaded::Code(_)) => Ok(Loaded::Code(_)) =>
unreachable!("split_name returned None but the path is a file"), unreachable!("split_name returned None but the path is a file"),
Err(e) => { Err(e) => {
let parent = i.r(abs_path).split_last().expect("import path nonzero").1; let parent = abs_path.split_last().expect("import path nonzero").1;
// exit without error if it was injected, or raise any IO error that was // exit without error if it was injected, or raise any IO error that was
// previously swallowed // previously swallowed
return if is_injected_module(i.i(parent)) { Ok(()) } else { Err(e) }; return if is_injected_module(parent) { Ok(()) } else { Err(e) };
}, },
}; };
// recurse on all files and folders within // recurse on all files and folders within
for item in coll.iter() { for item in coll.iter() {
let abs_subpath = (i.r(abs_path).iter()) let abs_subpath = (abs_path.iter())
.copied() .copied()
.chain(iter::once(i.i(item))) .chain(iter::once(i.i(item)))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
load_abs_path_rec( load_abs_path_rec(
i.i(&abs_subpath), &abs_subpath,
table, table,
prelude, prelude,
i, i,
@@ -101,17 +104,17 @@ fn load_abs_path_rec(
/// is_injected_module must return false for injected symbols, but may return /// is_injected_module must return false for injected symbols, but may return
/// true for parents of injected modules that are not directly part of the /// true for parents of injected modules that are not directly part of the
/// injected data (the ProjectTree doesn't make a distinction between the two) /// injected data (the ProjectTree doesn't make a distinction between the two)
pub fn load_source( pub fn load_source<'a>(
targets: &[Sym], targets: impl Iterator<Item = &'a [Tok<String>]>,
prelude: &[FileEntry], prelude: &[FileEntry],
i: &Interner, i: &Interner,
get_source: &impl Fn(Sym) -> IOResult, get_source: &impl Fn(&[Tok<String>]) -> IOResult,
is_injected_module: &impl Fn(Sym) -> bool, is_injected_module: &impl Fn(&[Tok<String>]) -> bool,
) -> Result<LoadedSourceTable, Rc<dyn ProjectError>> { ) -> Result<LoadedSourceTable, Rc<dyn ProjectError>> {
let mut table = LoadedSourceTable::new(); let mut table = LoadedSourceTable::new();
for target in targets { for target in targets {
load_abs_path_rec( load_abs_path_rec(
*target, target,
&mut table, &mut table,
prelude, prelude,
i, i,

View File

@@ -2,7 +2,7 @@ use std::collections::HashMap;
use std::rc::Rc; use std::rc::Rc;
use super::preparse::Preparsed; use super::preparse::Preparsed;
use crate::interner::Sym; use crate::representations::VName;
#[derive(Debug)] #[derive(Debug)]
pub struct LoadedSource { pub struct LoadedSource {
@@ -10,4 +10,4 @@ pub struct LoadedSource {
pub preparsed: Preparsed, pub preparsed: Preparsed,
} }
pub type LoadedSourceTable = HashMap<Sym, LoadedSource>; pub type LoadedSourceTable = HashMap<VName, LoadedSource>;

View File

@@ -15,7 +15,7 @@ use crate::representations::sourcefile::{
use crate::representations::tree::{ModEntry, ModMember, Module}; use crate::representations::tree::{ModEntry, ModMember, Module};
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct Preparsed(pub Rc<Module<(), ()>>); pub struct Preparsed(pub Module<(), ()>);
/// Add an internal flat name if it does not exist yet /// Add an internal flat name if it does not exist yet
fn add_intern<K: Eq + Hash>(map: &mut HashMap<K, ModEntry<(), ()>>, k: K) { fn add_intern<K: Eq + Hash>(map: &mut HashMap<K, ModEntry<(), ()>>, k: K) {
@@ -33,22 +33,18 @@ fn add_export<K: Eq + Hash>(map: &mut HashMap<K, ModEntry<(), ()>>, k: K) {
} }
/// Convert source lines into a module /// Convert source lines into a module
fn to_module( fn to_module(src: &[FileEntry], prelude: &[FileEntry]) -> Module<(), ()> {
src: &[FileEntry],
prelude: &[FileEntry],
i: &Interner,
) -> Rc<Module<(), ()>> {
let all_src = || src.iter().chain(prelude.iter()); let all_src = || src.iter().chain(prelude.iter());
let imports = imports(all_src()).cloned().collect::<Vec<_>>(); let imports = imports(all_src()).cloned().collect::<Vec<_>>();
let mut items = all_src() let mut items = all_src()
.filter_map(|ent| match ent { .filter_map(|ent| match ent {
FileEntry::Internal(Member::Namespace(ns)) => { FileEntry::Internal(Member::Namespace(ns)) => {
let member = ModMember::Sub(to_module(&ns.body, prelude, i)); let member = ModMember::Sub(to_module(&ns.body, prelude));
let entry = ModEntry { exported: false, member }; let entry = ModEntry { exported: false, member };
Some((ns.name, entry)) Some((ns.name, entry))
}, },
FileEntry::Exported(Member::Namespace(ns)) => { FileEntry::Exported(Member::Namespace(ns)) => {
let member = ModMember::Sub(to_module(&ns.body, prelude, i)); let member = ModMember::Sub(to_module(&ns.body, prelude));
let entry = ModEntry { exported: true, member }; let entry = ModEntry { exported: true, member };
Some((ns.name, entry)) Some((ns.name, entry))
}, },
@@ -70,20 +66,20 @@ fn to_module(
FileEntry::Exported(Member::Constant(Constant { name, .. })) => FileEntry::Exported(Member::Constant(Constant { name, .. })) =>
add_export(&mut items, *name), add_export(&mut items, *name),
FileEntry::Internal(Member::Rule(rule)) => { FileEntry::Internal(Member::Rule(rule)) => {
let names = rule.collect_single_names(i); let names = rule.collect_single_names();
for name in names { for name in names {
add_intern(&mut items, name) add_intern(&mut items, name)
} }
}, },
FileEntry::Exported(Member::Rule(rule)) => { FileEntry::Exported(Member::Rule(rule)) => {
let names = rule.collect_single_names(i); let names = rule.collect_single_names();
for name in names { for name in names {
add_export(&mut items, name) add_export(&mut items, name)
} }
}, },
} }
} }
Rc::new(Module { imports, items, extra: () }) Module { imports, items, extra: () }
} }
/// Preparse the module. At this stage, only the imports and /// Preparse the module. At this stage, only the imports and
@@ -112,5 +108,5 @@ pub fn preparse(
} }
.rc() .rc()
})?; })?;
Ok(Preparsed(to_module(&normalized, prelude, i))) Ok(Preparsed(to_module(&normalized, prelude)))
} }

View File

@@ -1,9 +1,7 @@
//! Datastructures representing syntax as written //! Datastructures representing the units of macro execution
//! //!
//! These structures are produced by the parser, processed by the macro //! These structures are produced by the pipeline, processed by the macro
//! executor, and then converted to other usable formats. This module is public //! executor, and then converted to other usable formats.
//! in order to allow users to define injected libraries programmatically,
//! although going through the parser is usually preferable.
use std::hash::Hash; use std::hash::Hash;
use std::rc::Rc; use std::rc::Rc;
@@ -12,55 +10,62 @@ use itertools::Itertools;
use ordered_float::NotNan; use ordered_float::NotNan;
use super::location::Location; use super::location::Location;
use super::namelike::{NameLike, VName};
use super::primitive::Primitive; use super::primitive::Primitive;
use crate::interner::{InternedDisplay, Interner, Sym, Tok}; use crate::interner::{InternedDisplay, Interner, Tok};
use crate::utils::Substack; use crate::utils::{map_rc, Substack};
/// A [Clause] with associated metadata /// A [Clause] with associated metadata
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub struct Expr { pub struct Expr<N: NameLike> {
/// The actual value /// The actual value
pub value: Clause, pub value: Clause<N>,
/// Information about the code that produced this value /// Information about the code that produced this value
pub location: Location, pub location: Location,
} }
impl Expr { impl<N: NameLike> Expr<N> {
/// Obtain the contained clause /// Obtain the contained clause
pub fn into_clause(self) -> Clause { pub fn into_clause(self) -> Clause<N> {
self.value self.value
} }
/// Call the function on every name in this expression /// Call the function on every name in this expression
pub fn visit_names(&self, binds: Substack<Sym>, cb: &mut impl FnMut(Sym)) { pub fn visit_names(&self, binds: Substack<&N>, cb: &mut impl FnMut(&N)) {
let Expr { value, .. } = self; let Expr { value, .. } = self;
value.visit_names(binds, cb); value.visit_names(binds, cb);
} }
/// Process all names with the given mapper. /// Process all names with the given mapper.
/// Return a new object if anything was processed /// Return a new object if anything was processed
pub fn map_names(&self, pred: &impl Fn(Sym) -> Option<Sym>) -> Option<Self> { pub fn map_names(&self, pred: &impl Fn(&N) -> Option<N>) -> Option<Self> {
Some(Self { Some(Self {
value: self.value.map_names(pred)?, value: self.value.map_names(pred)?,
location: self.location.clone(), location: self.location.clone(),
}) })
} }
/// Transform from one name system to another
pub fn transform_names<O: NameLike>(self, pred: &impl Fn(N) -> O) -> Expr<O> {
Expr { value: self.value.transform_names(pred), location: self.location }
}
}
impl Expr<VName> {
/// Add the specified prefix to every Name /// Add the specified prefix to every Name
pub fn prefix( pub fn prefix(
&self, &self,
prefix: Sym, prefix: &[Tok<String>],
i: &Interner,
except: &impl Fn(Tok<String>) -> bool, except: &impl Fn(Tok<String>) -> bool,
) -> Self { ) -> Self {
Self { Self {
value: self.value.prefix(prefix, i, except), value: self.value.prefix(prefix, except),
location: self.location.clone(), location: self.location.clone(),
} }
} }
} }
impl InternedDisplay for Expr { impl<N: NameLike> InternedDisplay for Expr<N> {
fn fmt_i( fn fmt_i(
&self, &self,
f: &mut std::fmt::Formatter<'_>, f: &mut std::fmt::Formatter<'_>,
@@ -116,23 +121,23 @@ impl InternedDisplay for Placeholder {
/// An S-expression as read from a source file /// An S-expression as read from a source file
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub enum Clause { pub enum Clause<N: NameLike> {
/// A primitive /// A primitive
P(Primitive), P(Primitive),
/// A c-style name or an operator, eg. `+`, `i`, `foo::bar` /// A c-style name or an operator, eg. `+`, `i`, `foo::bar`
Name(Sym), Name(N),
/// A parenthesized exmrc_empty_slice()pression /// A parenthesized expression
/// eg. `(print out "hello")`, `[1, 2, 3]`, `{Some(t) => t}` /// eg. `(print out "hello")`, `[1, 2, 3]`, `{Some(t) => t}`
S(char, Rc<Vec<Expr>>), S(char, Rc<Vec<Expr<N>>>),
/// A function expression, eg. `\x. x + 1` /// A function expression, eg. `\x. x + 1`
Lambda(Rc<Expr>, Rc<Vec<Expr>>), Lambda(Rc<Expr<N>>, Rc<Vec<Expr<N>>>),
/// A placeholder for macros, eg. `$name`, `...$body`, `...$lhs:1` /// A placeholder for macros, eg. `$name`, `...$body`, `...$lhs:1`
Placeh(Placeholder), Placeh(Placeholder),
} }
impl Clause { impl<N: NameLike> Clause<N> {
/// Extract the expressions from an auto, lambda or S /// Extract the expressions from an auto, lambda or S
pub fn body(&self) -> Option<Rc<Vec<Expr>>> { pub fn body(&self) -> Option<Rc<Vec<Expr<N>>>> {
match self { match self {
Self::Lambda(_, body) | Self::S(_, body) => Some(body.clone()), Self::Lambda(_, body) | Self::S(_, body) => Some(body.clone()),
_ => None, _ => None,
@@ -140,7 +145,7 @@ impl Clause {
} }
/// Convert with identical meaning /// Convert with identical meaning
pub fn into_expr(self) -> Expr { pub fn into_expr(self) -> Expr<N> {
if let Self::S('(', body) = &self { if let Self::S('(', body) = &self {
if body.len() == 1 { if body.len() == 1 {
body[0].clone() body[0].clone()
@@ -153,7 +158,7 @@ impl Clause {
} }
/// Convert with identical meaning /// Convert with identical meaning
pub fn from_exprs(exprs: &[Expr]) -> Option<Clause> { pub fn from_exprs(exprs: &[Expr<N>]) -> Option<Self> {
if exprs.is_empty() { if exprs.is_empty() {
None None
} else if exprs.len() == 1 { } else if exprs.len() == 1 {
@@ -163,7 +168,7 @@ impl Clause {
} }
} }
/// Convert with identical meaning /// Convert with identical meaning
pub fn from_exprv(exprv: &Rc<Vec<Expr>>) -> Option<Clause> { pub fn from_exprv(exprv: &Rc<Vec<Expr<N>>>) -> Option<Clause<N>> {
if exprv.len() < 2 { if exprv.len() < 2 {
Self::from_exprs(exprv) Self::from_exprs(exprv)
} else { } else {
@@ -175,12 +180,12 @@ impl Clause {
/// It also finds a lot of things that aren't names, such as all /// It also finds a lot of things that aren't names, such as all
/// bound parameters. Generally speaking, this is not a very /// bound parameters. Generally speaking, this is not a very
/// sophisticated search. /// sophisticated search.
pub fn visit_names(&self, binds: Substack<Sym>, cb: &mut impl FnMut(Sym)) { pub fn visit_names(&self, binds: Substack<&N>, cb: &mut impl FnMut(&N)) {
match self { match self {
Clause::Lambda(arg, body) => { Clause::Lambda(arg, body) => {
arg.visit_names(binds, cb); arg.visit_names(binds, cb);
let new_binds = let new_binds =
if let Clause::Name(n) = arg.value { binds.push(n) } else { binds }; if let Clause::Name(n) = &arg.value { binds.push(n) } else { binds };
for x in body.iter() { for x in body.iter() {
x.visit_names(new_binds, cb) x.visit_names(new_binds, cb)
} }
@@ -190,8 +195,8 @@ impl Clause {
x.visit_names(binds, cb) x.visit_names(binds, cb)
}, },
Clause::Name(name) => Clause::Name(name) =>
if binds.iter().all(|x| x != name) { if binds.iter().all(|x| x != &name) {
cb(*name) cb(name)
}, },
_ => (), _ => (),
} }
@@ -199,10 +204,10 @@ impl Clause {
/// Process all names with the given mapper. /// Process all names with the given mapper.
/// Return a new object if anything was processed /// Return a new object if anything was processed
pub fn map_names(&self, pred: &impl Fn(Sym) -> Option<Sym>) -> Option<Self> { pub fn map_names(&self, pred: &impl Fn(&N) -> Option<N>) -> Option<Self> {
match self { match self {
Clause::P(_) | Clause::Placeh(_) => None, Clause::P(_) | Clause::Placeh(_) => None,
Clause::Name(name) => pred(*name).map(Clause::Name), Clause::Name(name) => pred(name).map(Clause::Name),
Clause::S(c, body) => { Clause::S(c, body) => {
let mut any_some = false; let mut any_some = false;
let new_body = body let new_body = body
@@ -238,29 +243,49 @@ impl Clause {
} }
} }
/// Transform from one name representation to another
pub fn transform_names<O: NameLike>(
self,
pred: &impl Fn(N) -> O,
) -> Clause<O> {
match self {
Self::Name(n) => Clause::Name(pred(n)),
Self::Placeh(p) => Clause::Placeh(p),
Self::P(p) => Clause::P(p),
Self::Lambda(n, b) => Clause::Lambda(
map_rc(n, |n| n.transform_names(pred)),
map_rc(b, |b| b.into_iter().map(|e| e.transform_names(pred)).collect()),
),
Self::S(c, b) => Clause::S(
c,
map_rc(b, |b| b.into_iter().map(|e| e.transform_names(pred)).collect()),
),
}
}
}
impl Clause<VName> {
/// Add the specified prefix to every Name /// Add the specified prefix to every Name
pub fn prefix( pub fn prefix(
&self, &self,
prefix: Sym, prefix: &[Tok<String>],
i: &Interner,
except: &impl Fn(Tok<String>) -> bool, except: &impl Fn(Tok<String>) -> bool,
) -> Self { ) -> Self {
self self
.map_names(&|name| { .map_names(&|name| {
let old = i.r(name); if except(name[0]) {
if except(old[0]) {
return None; return None;
} }
let mut new = i.r(prefix).clone(); let mut new = prefix.to_vec();
new.extend_from_slice(old); new.extend_from_slice(name);
Some(i.i(&new)) Some(new)
}) })
.unwrap_or_else(|| self.clone()) .unwrap_or_else(|| self.clone())
} }
} }
fn fmt_expr_seq<'a>( fn fmt_expr_seq<'a, N: NameLike>(
it: &mut impl Iterator<Item = &'a Expr>, it: &mut impl Iterator<Item = &'a Expr<N>>,
f: &mut std::fmt::Formatter<'_>, f: &mut std::fmt::Formatter<'_>,
i: &Interner, i: &Interner,
) -> std::fmt::Result { ) -> std::fmt::Result {
@@ -273,19 +298,7 @@ fn fmt_expr_seq<'a>(
Ok(()) Ok(())
} }
pub(crate) fn fmt_name( impl<N: NameLike> InternedDisplay for Clause<N> {
name: Sym,
f: &mut std::fmt::Formatter,
i: &Interner,
) -> std::fmt::Result {
let strings = i.r(name).iter().map(|t| i.r(*t).as_str());
for el in itertools::intersperse(strings, "::") {
write!(f, "{}", el)?
}
Ok(())
}
impl InternedDisplay for Clause {
fn fmt_i( fn fmt_i(
&self, &self,
f: &mut std::fmt::Formatter<'_>, f: &mut std::fmt::Formatter<'_>,
@@ -293,7 +306,7 @@ impl InternedDisplay for Clause {
) -> std::fmt::Result { ) -> std::fmt::Result {
match self { match self {
Self::P(p) => write!(f, "{:?}", p), Self::P(p) => write!(f, "{:?}", p),
Self::Name(name) => fmt_name(*name, f, i), Self::Name(name) => write!(f, "{}", name.to_strv(i).join("::")),
Self::S(del, items) => { Self::S(del, items) => {
f.write_str(&del.to_string())?; f.write_str(&del.to_string())?;
fmt_expr_seq(&mut items.iter(), f, i)?; fmt_expr_seq(&mut items.iter(), f, i)?;
@@ -317,54 +330,47 @@ impl InternedDisplay for Clause {
/// A substitution rule as read from the source /// A substitution rule as read from the source
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub struct Rule { pub struct Rule<N: NameLike> {
/// Tree fragment in the source code that activates this rule /// Tree fragment in the source code that activates this rule
pub pattern: Rc<Vec<Expr>>, pub pattern: Vec<Expr<N>>,
/// Influences the order in which rules are checked /// Influences the order in which rules are checked
pub prio: NotNan<f64>, pub prio: NotNan<f64>,
/// Tree fragment generated by this rule /// Tree fragment generated by this rule
pub template: Rc<Vec<Expr>>, pub template: Vec<Expr<N>>,
}
impl Rule {
/// Return a list of all names that don't contain a namespace separator `::`.
/// These are exported when the rule is exported
pub fn collect_single_names(&self, i: &Interner) -> Vec<Tok<String>> {
let mut names = Vec::new();
for e in self.pattern.iter() {
e.visit_names(Substack::Bottom, &mut |tok| {
let ns_name = i.r(tok);
let (name, excess) =
ns_name.split_first().expect("Namespaced name must not be empty");
if !excess.is_empty() {
return;
}
names.push(*name)
});
}
names
} }
impl Rule<VName> {
/// Namespace all tokens in the rule /// Namespace all tokens in the rule
pub fn prefix( pub fn prefix(
&self, &self,
prefix: Sym, prefix: &[Tok<String>],
i: &Interner,
except: &impl Fn(Tok<String>) -> bool, except: &impl Fn(Tok<String>) -> bool,
) -> Self { ) -> Self {
Self { Self {
prio: self.prio, prio: self.prio,
pattern: Rc::new( pattern: self.pattern.iter().map(|e| e.prefix(prefix, except)).collect(),
self.pattern.iter().map(|e| e.prefix(prefix, i, except)).collect(), template: (self.template.iter())
), .map(|e| e.prefix(prefix, except))
template: Rc::new( .collect(),
self.template.iter().map(|e| e.prefix(prefix, i, except)).collect(),
),
}
} }
} }
impl InternedDisplay for Rule { /// Return a list of all names that don't contain a namespace separator `::`.
/// These are exported when the rule is exported
pub fn collect_single_names(&self) -> Vec<Tok<String>> {
let mut names = Vec::new();
for e in self.pattern.iter() {
e.visit_names(Substack::Bottom, &mut |ns_name| {
if ns_name.len() == 1 {
names.push(ns_name[0])
}
});
}
names
}
}
impl<N: NameLike> InternedDisplay for Rule<N> {
fn fmt_i( fn fmt_i(
&self, &self,
f: &mut std::fmt::Formatter<'_>, f: &mut std::fmt::Formatter<'_>,
@@ -389,7 +395,7 @@ pub struct Constant {
/// Used to reference the constant /// Used to reference the constant
pub name: Tok<String>, pub name: Tok<String>,
/// The constant value inserted where the name is found /// The constant value inserted where the name is found
pub value: Expr, pub value: Expr<VName>,
} }
impl InternedDisplay for Constant { impl InternedDisplay for Constant {

View File

@@ -1,4 +1,5 @@
use super::{ast, ast_to_postmacro, interpreted, postmacro_to_interpreted}; use super::{ast, ast_to_postmacro, interpreted, postmacro_to_interpreted};
use crate::Sym;
#[allow(unused)] #[allow(unused)]
pub type AstError = ast_to_postmacro::Error; pub type AstError = ast_to_postmacro::Error;
@@ -6,7 +7,7 @@ pub type AstError = ast_to_postmacro::Error;
/// Attempt to convert the AST processed by macros into an executable format /// Attempt to convert the AST processed by macros into an executable format
#[allow(unused)] #[allow(unused)]
pub fn ast_to_interpreted( pub fn ast_to_interpreted(
ast: &ast::Expr, ast: &ast::Expr<Sym>,
) -> Result<interpreted::ExprInst, AstError> { ) -> Result<interpreted::ExprInst, AstError> {
let pmtree = ast_to_postmacro::expr(ast)?; let pmtree = ast_to_postmacro::expr(ast)?;
Ok(postmacro_to_interpreted::expr(&pmtree)) Ok(postmacro_to_interpreted::expr(&pmtree))

View File

@@ -3,8 +3,8 @@ use std::rc::Rc;
use super::location::Location; use super::location::Location;
use super::{ast, postmacro}; use super::{ast, postmacro};
use crate::interner::Sym;
use crate::utils::Substack; use crate::utils::Substack;
use crate::Sym;
#[derive(Clone)] #[derive(Clone)]
pub enum Error { pub enum Error {
@@ -42,7 +42,7 @@ impl Display for Error {
} }
/// Try to convert an expression from AST format to typed lambda /// Try to convert an expression from AST format to typed lambda
pub fn expr(expr: &ast::Expr) -> Result<postmacro::Expr, Error> { pub fn expr(expr: &ast::Expr<Sym>) -> Result<postmacro::Expr, Error> {
expr_rec(expr, Context::new()) expr_rec(expr, Context::new())
} }
@@ -66,7 +66,7 @@ impl<'a> Context<'a> {
/// Process an expression sequence /// Process an expression sequence
fn exprv_rec<'a>( fn exprv_rec<'a>(
v: &'a [ast::Expr], v: &'a [ast::Expr<Sym>],
ctx: Context<'a>, ctx: Context<'a>,
) -> Result<postmacro::Expr, Error> { ) -> Result<postmacro::Expr, Error> {
let (last, rest) = v.split_last().ok_or(Error::EmptyS)?; let (last, rest) = v.split_last().ok_or(Error::EmptyS)?;
@@ -81,7 +81,7 @@ fn exprv_rec<'a>(
/// Process an expression /// Process an expression
fn expr_rec<'a>( fn expr_rec<'a>(
ast::Expr { value, location }: &'a ast::Expr, ast::Expr { value, location }: &'a ast::Expr<Sym>,
ctx: Context<'a>, ctx: Context<'a>,
) -> Result<postmacro::Expr, Error> { ) -> Result<postmacro::Expr, Error> {
if let ast::Clause::S(paren, body) = value { if let ast::Clause::S(paren, body) = value {
@@ -98,7 +98,7 @@ fn expr_rec<'a>(
/// Process a clause /// Process a clause
fn clause_rec<'a>( fn clause_rec<'a>(
cls: &'a ast::Clause, cls: &'a ast::Clause<Sym>,
ctx: Context<'a>, ctx: Context<'a>,
) -> Result<postmacro::Clause, Error> { ) -> Result<postmacro::Clause, Error> {
match cls { match cls {

View File

@@ -11,8 +11,9 @@ use super::location::Location;
use super::path_set::PathSet; use super::path_set::PathSet;
use super::primitive::Primitive; use super::primitive::Primitive;
use super::Literal; use super::Literal;
use crate::interner::{InternedDisplay, Sym}; use crate::interner::InternedDisplay;
use crate::utils::sym2string; use crate::utils::sym2string;
use crate::Sym;
// TODO: implement Debug, Eq and Hash with cycle detection // TODO: implement Debug, Eq and Hash with cycle detection

View File

@@ -4,6 +4,7 @@ pub mod ast_to_postmacro;
pub mod interpreted; pub mod interpreted;
pub mod literal; pub mod literal;
pub mod location; pub mod location;
mod namelike;
pub mod path_set; pub mod path_set;
pub mod postmacro; pub mod postmacro;
pub mod postmacro_to_interpreted; pub mod postmacro_to_interpreted;
@@ -13,5 +14,6 @@ pub mod tree;
pub use literal::Literal; pub use literal::Literal;
pub use location::Location; pub use location::Location;
pub use namelike::{NameLike, Sym, VName};
pub use path_set::PathSet; pub use path_set::PathSet;
pub use primitive::Primitive; pub use primitive::Primitive;

View File

@@ -0,0 +1,34 @@
use crate::interner::{Interner, Tok};
/// A mutable representation of a namespaced identifier.
///
/// These names may be relative or otherwise partially processed.
///
/// See also [Sym]
pub type VName = Vec<Tok<String>>;
/// An interned representation of a namespaced identifier.
///
/// These names are always absolute.
///
/// See also [VName]
pub type Sym = Tok<Vec<Tok<String>>>;
/// An abstraction over tokenized vs non-tokenized names so that they can be
/// handled together in datastructures
pub trait NameLike: 'static + Clone + Eq {
/// Fully resolve the name for printing
fn to_strv(&self, i: &Interner) -> Vec<String>;
}
impl NameLike for Sym {
fn to_strv(&self, i: &Interner) -> Vec<String> {
i.extern_vec(*self)
}
}
impl NameLike for VName {
fn to_strv(&self, i: &Interner) -> Vec<String> {
i.extern_all(self)
}
}

View File

@@ -2,7 +2,7 @@ use std::fmt::Debug;
use std::ops::Add; use std::ops::Add;
use std::rc::Rc; use std::rc::Rc;
use crate::utils::Side; use crate::utils::{rc_to_owned, Side};
/// A branching path selecting some placeholders (but at least one) in a Lambda /// A branching path selecting some placeholders (but at least one) in a Lambda
/// expression /// expression
@@ -41,8 +41,7 @@ impl Add<Side> for PathSet {
type Output = Self; type Output = Self;
fn add(self, rhs: Side) -> Self::Output { fn add(self, rhs: Side) -> Self::Output {
let PathSet { steps, next } = self; let PathSet { steps, next } = self;
let mut new_steps = let mut new_steps = rc_to_owned(steps);
Rc::try_unwrap(steps).unwrap_or_else(|rc| rc.as_ref().clone());
new_steps.insert(0, rhs); new_steps.insert(0, rhs);
Self { steps: Rc::new(new_steps), next } Self { steps: Rc::new(new_steps), next }
} }

View File

@@ -3,8 +3,8 @@ use std::rc::Rc;
use super::location::Location; use super::location::Location;
use super::primitive::Primitive; use super::primitive::Primitive;
use crate::interner::Sym;
use crate::utils::string_from_charset; use crate::utils::string_from_charset;
use crate::Sym;
/// Indicates whether either side needs to be wrapped. Syntax whose end is /// Indicates whether either side needs to be wrapped. Syntax whose end is
/// ambiguous on that side must use parentheses, or forward the flag /// ambiguous on that side must use parentheses, or forward the flag

View File

@@ -3,9 +3,11 @@ use std::iter;
use itertools::{Either, Itertools}; use itertools::{Either, Itertools};
use super::namelike::VName;
use crate::ast::{Constant, Rule}; use crate::ast::{Constant, Rule};
use crate::interner::{Interner, Sym, Tok}; use crate::interner::{Interner, Tok};
use crate::utils::{unwrap_or, BoxedIter}; use crate::utils::{unwrap_or, BoxedIter};
use crate::Sym;
/// An import pointing at another module, either specifying the symbol to be /// An import pointing at another module, either specifying the symbol to be
/// imported or importing all available symbols with a globstar (*) /// imported or importing all available symbols with a globstar (*)
@@ -50,7 +52,7 @@ pub struct Namespace {
pub enum Member { pub enum Member {
/// A substitution rule. Rules apply even when they're not in scope, if the /// A substitution rule. Rules apply even when they're not in scope, if the
/// absolute names are present eg. because they're produced by other rules /// absolute names are present eg. because they're produced by other rules
Rule(Rule), Rule(Rule<VName>),
/// A constant (or function) associated with a name /// A constant (or function) associated with a name
Constant(Constant), Constant(Constant),
/// A prefixed set of other entries /// A prefixed set of other entries

View File

@@ -2,8 +2,8 @@
//! //!
//! Used by various stages of the pipeline with different parameters //! Used by various stages of the pipeline with different parameters
use std::ops::Add; use std::ops::Add;
use std::rc::Rc;
use duplicate::duplicate_item;
use hashbrown::HashMap; use hashbrown::HashMap;
use super::sourcefile::Import; use super::sourcefile::Import;
@@ -16,7 +16,7 @@ pub enum ModMember<TItem: Clone, TExt: Clone> {
/// Arbitrary data /// Arbitrary data
Item(TItem), Item(TItem),
/// A child module /// A child module
Sub(Rc<Module<TItem, TExt>>), Sub(Module<TItem, TExt>),
} }
/// Data about a name in a [Module] /// Data about a name in a [Module]
@@ -61,18 +61,25 @@ pub struct WalkError {
pub type ModPath<'a> = Substack<'a, Tok<String>>; pub type ModPath<'a> = Substack<'a, Tok<String>>;
impl<TItem: Clone, TExt: Clone> Module<TItem, TExt> { impl<TItem: Clone, TExt: Clone> Module<TItem, TExt> {
/// Return the module at the end of the given path. /// Return the module at the end of the given path
pub fn walk( #[allow(clippy::needless_arbitrary_self_type)] // duplicate
self: &Rc<Self>, #[duplicate_item(
method reference(type) dereference(expr) map_method;
[walk] [type] [expr] [remove];
[walk_ref] [&type] [*expr] [get];
[walk_mut] [&mut type] [*expr] [get_mut];
)]
pub fn method(
self: reference([Self]),
path: &[Tok<String>], path: &[Tok<String>],
require_exported: bool, require_exported: bool,
) -> Result<Rc<Self>, WalkError> { ) -> Result<reference([Self]), WalkError> {
let mut cur = self; let mut cur = self;
for (pos, step) in path.iter().enumerate() { for (pos, step) in path.iter().enumerate() {
if let Some(ModEntry { member: ModMember::Sub(next), exported }) = if let Some(ModEntry { member: ModMember::Sub(next), exported }) =
cur.items.get(step) cur.items.map_method(step)
{ {
if require_exported && !exported { if require_exported && !dereference([exported]) {
return Err(WalkError { pos, kind: WalkErrorKind::Private }); return Err(WalkError { pos, kind: WalkErrorKind::Private });
} }
cur = next cur = next
@@ -80,7 +87,7 @@ impl<TItem: Clone, TExt: Clone> Module<TItem, TExt> {
return Err(WalkError { pos, kind: WalkErrorKind::Missing }); return Err(WalkError { pos, kind: WalkErrorKind::Missing });
} }
} }
Ok(cur.clone()) Ok(cur)
} }
fn visit_all_imports_rec<E>( fn visit_all_imports_rec<E>(
@@ -118,21 +125,16 @@ impl<TItem: Clone, TExt: Clone> Module<TItem, TExt> {
let mut new_items = HashMap::new(); let mut new_items = HashMap::new();
for (key, right) in items { for (key, right) in items {
// if both contain a submodule // if both contain a submodule
if let Some(left) = self.items.remove(&key) { match (self.items.remove(&key), right) {
if let ModMember::Sub(rsub) = &right.member { (
if let ModMember::Sub(lsub) = &left.member { Some(ModEntry { member: ModMember::Sub(lsub), .. }),
// merge them with rhs exportedness ModEntry { member: ModMember::Sub(rsub), exported },
let new_mod = lsub.as_ref().clone().overlay(rsub.as_ref().clone()); ) => new_items.insert(key, ModEntry {
new_items.insert(key, ModEntry { exported,
exported: right.exported, member: ModMember::Sub(lsub.overlay(rsub)),
member: ModMember::Sub(Rc::new(new_mod)), }),
}); (_, right) => new_items.insert(key, right),
continue; };
}
}
}
// otherwise right shadows left
new_items.insert(key, right);
} }
new_items.extend(self.items.into_iter()); new_items.extend(self.items.into_iter());
self.imports.extend(imports.into_iter()); self.imports.extend(imports.into_iter());

View File

@@ -2,12 +2,15 @@ use std::rc::Rc;
use super::state::State; use super::state::State;
use crate::ast::Expr; use crate::ast::Expr;
use crate::Sym;
pub type RuleExpr = Expr<Sym>;
/// Cacheable optimized structures for matching patterns on slices. This is /// Cacheable optimized structures for matching patterns on slices. This is
/// injected to allow experimentation in the matcher implementation. /// injected to allow experimentation in the matcher implementation.
pub trait Matcher { pub trait Matcher {
/// Build matcher for a pattern /// Build matcher for a pattern
fn new(pattern: Rc<Vec<Expr>>) -> Self; fn new(pattern: Rc<Vec<RuleExpr>>) -> Self;
/// Apply matcher to a token sequence /// Apply matcher to a token sequence
fn apply<'a>(&self, source: &'a [Expr]) -> Option<State<'a>>; fn apply<'a>(&self, source: &'a [RuleExpr]) -> Option<State<'a>>;
} }

View File

@@ -1,12 +1,12 @@
use super::scal_match::scalv_match; use super::scal_match::scalv_match;
use super::shared::AnyMatcher; use super::shared::AnyMatcher;
use super::vec_match::vec_match; use super::vec_match::vec_match;
use crate::ast::Expr; use crate::rule::matcher::RuleExpr;
use crate::rule::state::State; use crate::rule::state::State;
pub fn any_match<'a>( pub fn any_match<'a>(
matcher: &AnyMatcher, matcher: &AnyMatcher,
seq: &'a [Expr], seq: &'a [RuleExpr],
) -> Option<State<'a>> { ) -> Option<State<'a>> {
match matcher { match matcher {
AnyMatcher::Scalar(scalv) => scalv_match(scalv, seq), AnyMatcher::Scalar(scalv) => scalv_match(scalv, seq),

View File

@@ -1,16 +1,18 @@
use itertools::Itertools; use itertools::Itertools;
use super::shared::{AnyMatcher, ScalMatcher, VecMatcher}; use super::shared::{AnyMatcher, ScalMatcher, VecMatcher};
use crate::ast::{Clause, Expr, PHClass, Placeholder}; use crate::ast::{Clause, PHClass, Placeholder};
use crate::interner::Tok; use crate::interner::Tok;
use crate::rule::matcher::RuleExpr;
use crate::rule::vec_attrs::vec_attrs; use crate::rule::vec_attrs::vec_attrs;
use crate::utils::Side; use crate::utils::Side;
pub type MaxVecSplit<'a> = (&'a [Expr], (Tok<String>, u64, bool), &'a [Expr]); pub type MaxVecSplit<'a> =
(&'a [RuleExpr], (Tok<String>, u64, bool), &'a [RuleExpr]);
/// Derive the details of the central vectorial and the two sides from a /// Derive the details of the central vectorial and the two sides from a
/// slice of Expr's /// slice of Expr's
fn split_at_max_vec(pattern: &[Expr]) -> Option<MaxVecSplit> { fn split_at_max_vec(pattern: &[RuleExpr]) -> Option<MaxVecSplit> {
let rngidx = pattern.iter().position_max_by_key(|expr| { let rngidx = pattern.iter().position_max_by_key(|expr| {
vec_attrs(expr).map(|attrs| attrs.1 as i64).unwrap_or(-1) vec_attrs(expr).map(|attrs| attrs.1 as i64).unwrap_or(-1)
})?; })?;
@@ -21,11 +23,11 @@ fn split_at_max_vec(pattern: &[Expr]) -> Option<MaxVecSplit> {
vec_attrs(placeh).map(|attrs| (left, attrs, right)) vec_attrs(placeh).map(|attrs| (left, attrs, right))
} }
fn scal_cnt<'a>(iter: impl Iterator<Item = &'a Expr>) -> usize { fn scal_cnt<'a>(iter: impl Iterator<Item = &'a RuleExpr>) -> usize {
iter.take_while(|expr| vec_attrs(expr).is_none()).count() iter.take_while(|expr| vec_attrs(expr).is_none()).count()
} }
pub fn mk_any(pattern: &[Expr]) -> AnyMatcher { pub fn mk_any(pattern: &[RuleExpr]) -> AnyMatcher {
let left_split = scal_cnt(pattern.iter()); let left_split = scal_cnt(pattern.iter());
if pattern.len() <= left_split { if pattern.len() <= left_split {
return AnyMatcher::Scalar(mk_scalv(pattern)); return AnyMatcher::Scalar(mk_scalv(pattern));
@@ -41,12 +43,12 @@ pub fn mk_any(pattern: &[Expr]) -> AnyMatcher {
} }
/// Pattern MUST NOT contain vectorial placeholders /// Pattern MUST NOT contain vectorial placeholders
fn mk_scalv(pattern: &[Expr]) -> Vec<ScalMatcher> { fn mk_scalv(pattern: &[RuleExpr]) -> Vec<ScalMatcher> {
pattern.iter().map(mk_scalar).collect() pattern.iter().map(mk_scalar).collect()
} }
/// Pattern MUST start and end with a vectorial placeholder /// Pattern MUST start and end with a vectorial placeholder
fn mk_vec(pattern: &[Expr]) -> VecMatcher { fn mk_vec(pattern: &[RuleExpr]) -> VecMatcher {
debug_assert!(!pattern.is_empty(), "pattern cannot be empty"); debug_assert!(!pattern.is_empty(), "pattern cannot be empty");
debug_assert!( debug_assert!(
pattern.first().map(vec_attrs).is_some(), pattern.first().map(vec_attrs).is_some(),
@@ -97,7 +99,7 @@ fn mk_vec(pattern: &[Expr]) -> VecMatcher {
} }
/// Pattern MUST NOT be a vectorial placeholder /// Pattern MUST NOT be a vectorial placeholder
fn mk_scalar(pattern: &Expr) -> ScalMatcher { fn mk_scalar(pattern: &RuleExpr) -> ScalMatcher {
match &pattern.value { match &pattern.value {
Clause::P(p) => ScalMatcher::P(p.clone()), Clause::P(p) => ScalMatcher::P(p.clone()),
Clause::Name(n) => ScalMatcher::Name(*n), Clause::Name(n) => ScalMatcher::Name(*n),

View File

@@ -1,11 +1,12 @@
use super::any_match::any_match; use super::any_match::any_match;
use super::shared::ScalMatcher; use super::shared::ScalMatcher;
use crate::ast::{Clause, Expr}; use crate::ast::Clause;
use crate::rule::matcher::RuleExpr;
use crate::rule::state::{State, StateEntry}; use crate::rule::state::{State, StateEntry};
pub fn scal_match<'a>( pub fn scal_match<'a>(
matcher: &ScalMatcher, matcher: &ScalMatcher,
expr: &'a Expr, expr: &'a RuleExpr,
) -> Option<State<'a>> { ) -> Option<State<'a>> {
match (matcher, &expr.value) { match (matcher, &expr.value) {
(ScalMatcher::P(p1), Clause::P(p2)) if p1 == p2 => Some(State::new()), (ScalMatcher::P(p1), Clause::P(p2)) if p1 == p2 => Some(State::new()),
@@ -25,7 +26,7 @@ pub fn scal_match<'a>(
pub fn scalv_match<'a>( pub fn scalv_match<'a>(
matchers: &[ScalMatcher], matchers: &[ScalMatcher],
seq: &'a [Expr], seq: &'a [RuleExpr],
) -> Option<State<'a>> { ) -> Option<State<'a>> {
if seq.len() != matchers.len() { if seq.len() != matchers.len() {
return None; return None;

View File

@@ -3,12 +3,12 @@ use std::rc::Rc;
use super::any_match::any_match; use super::any_match::any_match;
use super::build::mk_any; use super::build::mk_any;
use crate::ast::Expr; use crate::interner::{InternedDisplay, Interner, Tok};
use crate::interner::{InternedDisplay, Interner, Sym, Tok};
use crate::representations::Primitive; use crate::representations::Primitive;
use crate::rule::matcher::Matcher; use crate::rule::matcher::{Matcher, RuleExpr};
use crate::rule::state::State; use crate::rule::state::State;
use crate::utils::{sym2string, unwrap_or, Side}; use crate::utils::{sym2string, unwrap_or, Side};
use crate::Sym;
pub enum ScalMatcher { pub enum ScalMatcher {
P(Primitive), P(Primitive),
@@ -54,11 +54,11 @@ pub enum AnyMatcher {
Vec { left: Vec<ScalMatcher>, mid: VecMatcher, right: Vec<ScalMatcher> }, Vec { left: Vec<ScalMatcher>, mid: VecMatcher, right: Vec<ScalMatcher> },
} }
impl Matcher for AnyMatcher { impl Matcher for AnyMatcher {
fn new(pattern: Rc<Vec<Expr>>) -> Self { fn new(pattern: Rc<Vec<RuleExpr>>) -> Self {
mk_any(&pattern) mk_any(&pattern)
} }
fn apply<'a>(&self, source: &'a [Expr]) -> Option<State<'a>> { fn apply<'a>(&self, source: &'a [RuleExpr]) -> Option<State<'a>> {
any_match(self, source) any_match(self, source)
} }
} }
@@ -183,11 +183,11 @@ impl InternedDisplay for AnyMatcher {
/// vectorial placeholders and handles the scalars on leaves. /// vectorial placeholders and handles the scalars on leaves.
pub struct VectreeMatcher(AnyMatcher); pub struct VectreeMatcher(AnyMatcher);
impl Matcher for VectreeMatcher { impl Matcher for VectreeMatcher {
fn new(pattern: Rc<Vec<Expr>>) -> Self { fn new(pattern: Rc<Vec<RuleExpr>>) -> Self {
Self(AnyMatcher::new(pattern)) Self(AnyMatcher::new(pattern))
} }
fn apply<'a>(&self, source: &'a [Expr]) -> Option<State<'a>> { fn apply<'a>(&self, source: &'a [RuleExpr]) -> Option<State<'a>> {
self.0.apply(source) self.0.apply(source)
} }
} }

View File

@@ -4,13 +4,13 @@ use itertools::Itertools;
use super::scal_match::scalv_match; use super::scal_match::scalv_match;
use super::shared::VecMatcher; use super::shared::VecMatcher;
use crate::ast::Expr; use crate::rule::matcher::RuleExpr;
use crate::rule::state::{State, StateEntry}; use crate::rule::state::{State, StateEntry};
use crate::utils::unwrap_or; use crate::utils::unwrap_or;
pub fn vec_match<'a>( pub fn vec_match<'a>(
matcher: &VecMatcher, matcher: &VecMatcher,
seq: &'a [Expr], seq: &'a [RuleExpr],
) -> Option<State<'a>> { ) -> Option<State<'a>> {
match matcher { match matcher {
VecMatcher::Placeh { key, nonzero } => { VecMatcher::Placeh { key, nonzero } => {

View File

@@ -1,24 +1,24 @@
use std::rc::Rc;
use hashbrown::HashMap; use hashbrown::HashMap;
use itertools::Itertools; use itertools::Itertools;
use super::matcher::RuleExpr;
use super::vec_attrs::vec_attrs; use super::vec_attrs::vec_attrs;
use super::RuleError; use super::RuleError;
use crate::ast::{Clause, Expr, PHClass, Placeholder, Rule}; use crate::ast::{Clause, Expr, PHClass, Placeholder, Rule};
use crate::interner::{Interner, Tok}; use crate::interner::{Interner, Tok};
use crate::representations::location::Location; use crate::representations::location::Location;
use crate::Sym;
/// Ensure that the rule's source begins and ends with a vectorial without /// Ensure that the rule's source begins and ends with a vectorial without
/// changing its meaning /// changing its meaning
fn pad(mut rule: Rule, i: &Interner) -> Rule { fn pad(mut rule: Rule<Sym>, i: &Interner) -> Rule<Sym> {
let class: PHClass = PHClass::Vec { nonzero: false, prio: 0 }; let class: PHClass = PHClass::Vec { nonzero: false, prio: 0 };
let empty: &[Expr] = &[]; let empty: &[Expr<Sym>] = &[];
let prefix: &[Expr] = &[Expr { let prefix: &[Expr<Sym>] = &[Expr {
location: Location::Unknown, location: Location::Unknown,
value: Clause::Placeh(Placeholder { name: i.i("::prefix"), class }), value: Clause::Placeh(Placeholder { name: i.i("::prefix"), class }),
}]; }];
let suffix: &[Expr] = &[Expr { let suffix: &[Expr<Sym>] = &[Expr {
location: Location::Unknown, location: Location::Unknown,
value: Clause::Placeh(Placeholder { name: i.i("::suffix"), class }), value: Clause::Placeh(Placeholder { name: i.i("::suffix"), class }),
}]; }];
@@ -28,22 +28,14 @@ fn pad(mut rule: Rule, i: &Interner) -> Rule {
let suffix_explicit = vec_attrs(rule_tail).is_some(); let suffix_explicit = vec_attrs(rule_tail).is_some();
let prefix_v = if prefix_explicit { empty } else { prefix }; let prefix_v = if prefix_explicit { empty } else { prefix };
let suffix_v = if suffix_explicit { empty } else { suffix }; let suffix_v = if suffix_explicit { empty } else { suffix };
rule.pattern = Rc::new( rule.pattern = (prefix_v.iter().cloned())
prefix_v .chain(rule.pattern.into_iter())
.iter() .chain(suffix_v.iter().cloned())
.chain(rule.pattern.iter()) .collect();
.chain(suffix_v.iter()) rule.template = (prefix_v.iter().cloned())
.cloned() .chain(rule.template.into_iter())
.collect(), .chain(suffix_v.iter().cloned())
); .collect();
rule.template = Rc::new(
prefix_v
.iter()
.chain(rule.template.iter())
.chain(suffix_v.iter())
.cloned()
.collect(),
);
rule rule
} }
@@ -62,7 +54,7 @@ impl From<PHClass> for PHType {
} }
fn check_rec_expr( fn check_rec_expr(
expr: &Expr, expr: &RuleExpr,
types: &mut HashMap<Tok<String>, PHType>, types: &mut HashMap<Tok<String>, PHType>,
in_template: bool, in_template: bool,
) -> Result<(), RuleError> { ) -> Result<(), RuleError> {
@@ -95,7 +87,7 @@ fn check_rec_expr(
} }
fn check_rec_exprv( fn check_rec_exprv(
exprv: &[Expr], exprv: &[RuleExpr],
types: &mut HashMap<Tok<String>, PHType>, types: &mut HashMap<Tok<String>, PHType>,
in_template: bool, in_template: bool,
) -> Result<(), RuleError> { ) -> Result<(), RuleError> {
@@ -115,7 +107,10 @@ fn check_rec_exprv(
} }
} }
pub fn prepare_rule(rule: Rule, i: &Interner) -> Result<Rule, RuleError> { pub fn prepare_rule(
rule: Rule<Sym>,
i: &Interner,
) -> Result<Rule<Sym>, RuleError> {
// Dimension check // Dimension check
let mut types = HashMap::new(); let mut types = HashMap::new();
check_rec_exprv(&rule.pattern, &mut types, false)?; check_rec_exprv(&rule.pattern, &mut types, false)?;

View File

@@ -5,19 +5,20 @@ use std::rc::Rc;
use hashbrown::HashSet; use hashbrown::HashSet;
use ordered_float::NotNan; use ordered_float::NotNan;
use super::matcher::Matcher; use super::matcher::{Matcher, RuleExpr};
use super::prepare_rule::prepare_rule; use super::prepare_rule::prepare_rule;
use super::state::apply_exprv; use super::state::apply_exprv;
use super::{update_first_seq, RuleError, VectreeMatcher}; use super::{update_first_seq, RuleError, VectreeMatcher};
use crate::ast::{Expr, Rule}; use crate::ast::Rule;
use crate::interner::{InternedDisplay, Interner, Sym}; use crate::interner::{InternedDisplay, Interner};
use crate::utils::Substack; use crate::utils::Substack;
use crate::Sym;
#[derive(Debug)] #[derive(Debug)]
pub struct CachedRule<M: Matcher> { pub struct CachedRule<M: Matcher> {
matcher: M, matcher: M,
pattern: Rc<Vec<Expr>>, pattern: Vec<RuleExpr>,
template: Rc<Vec<Expr>>, template: Vec<RuleExpr>,
} }
impl<M: InternedDisplay + Matcher> InternedDisplay for CachedRule<M> { impl<M: InternedDisplay + Matcher> InternedDisplay for CachedRule<M> {
@@ -48,9 +49,9 @@ pub struct Repository<M: Matcher> {
impl<M: Matcher> Repository<M> { impl<M: Matcher> Repository<M> {
/// Build a new repository to hold the given set of rules /// Build a new repository to hold the given set of rules
pub fn new( pub fn new(
mut rules: Vec<Rule>, mut rules: Vec<Rule<Sym>>,
i: &Interner, i: &Interner,
) -> Result<Self, (Rule, RuleError)> { ) -> Result<Self, (Rule<Sym>, RuleError)> {
rules.sort_by_key(|r| -r.prio); rules.sort_by_key(|r| -r.prio);
let cache = rules let cache = rules
.into_iter() .into_iter()
@@ -60,10 +61,10 @@ impl<M: Matcher> Repository<M> {
let mut glossary = HashSet::new(); let mut glossary = HashSet::new();
for e in rule.pattern.iter() { for e in rule.pattern.iter() {
e.visit_names(Substack::Bottom, &mut |op| { e.visit_names(Substack::Bottom, &mut |op| {
glossary.insert(op); glossary.insert(*op);
}) })
} }
let matcher = M::new(rule.pattern.clone()); let matcher = M::new(Rc::new(rule.pattern.clone()));
let prep = CachedRule { let prep = CachedRule {
matcher, matcher,
pattern: rule.pattern, pattern: rule.pattern,
@@ -76,10 +77,10 @@ impl<M: Matcher> Repository<M> {
} }
/// Attempt to run each rule in priority order once /// Attempt to run each rule in priority order once
pub fn step(&self, code: &Expr) -> Option<Expr> { pub fn step(&self, code: &RuleExpr) -> Option<RuleExpr> {
let mut glossary = HashSet::new(); let mut glossary = HashSet::new();
code.visit_names(Substack::Bottom, &mut |op| { code.visit_names(Substack::Bottom, &mut |op| {
glossary.insert(op); glossary.insert(*op);
}); });
for (rule, deps, _) in self.cache.iter() { for (rule, deps, _) in self.cache.iter() {
if !deps.is_subset(&glossary) { if !deps.is_subset(&glossary) {
@@ -100,7 +101,7 @@ impl<M: Matcher> Repository<M> {
/// Keep running the matching rule with the highest priority until no /// Keep running the matching rule with the highest priority until no
/// rules match. WARNING: this function might not terminate /// rules match. WARNING: this function might not terminate
#[allow(unused)] #[allow(unused)]
pub fn pass(&self, code: &Expr) -> Option<Expr> { pub fn pass(&self, code: &RuleExpr) -> Option<RuleExpr> {
if let Some(mut processed) = self.step(code) { if let Some(mut processed) = self.step(code) {
while let Some(out) = self.step(&processed) { while let Some(out) = self.step(&processed) {
processed = out processed = out
@@ -114,7 +115,11 @@ impl<M: Matcher> Repository<M> {
/// Attempt to run each rule in priority order `limit` times. Returns /// Attempt to run each rule in priority order `limit` times. Returns
/// the final tree and the number of iterations left to the limit. /// the final tree and the number of iterations left to the limit.
#[allow(unused)] #[allow(unused)]
pub fn long_step(&self, code: &Expr, mut limit: usize) -> (Expr, usize) { pub fn long_step(
&self,
code: &RuleExpr,
mut limit: usize,
) -> (RuleExpr, usize) {
if limit == 0 { if limit == 0 {
return (code.clone(), 0); return (code.clone(), 0);
} }

View File

@@ -3,18 +3,19 @@ use std::rc::Rc;
use hashbrown::HashMap; use hashbrown::HashMap;
use itertools::Itertools; use itertools::Itertools;
use super::matcher::RuleExpr;
use crate::ast::{Clause, Expr, PHClass, Placeholder}; use crate::ast::{Clause, Expr, PHClass, Placeholder};
use crate::interner::Tok; use crate::interner::Tok;
use crate::utils::unwrap_or; use crate::utils::unwrap_or;
#[derive(Clone, Copy, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
pub enum StateEntry<'a> { pub enum StateEntry<'a> {
Vec(&'a [Expr]), Vec(&'a [RuleExpr]),
Scalar(&'a Expr), Scalar(&'a RuleExpr),
} }
pub type State<'a> = HashMap<Tok<String>, StateEntry<'a>>; pub type State<'a> = HashMap<Tok<String>, StateEntry<'a>>;
pub fn apply_exprv(template: &[Expr], state: &State) -> Vec<Expr> { pub fn apply_exprv(template: &[RuleExpr], state: &State) -> Vec<RuleExpr> {
template template
.iter() .iter()
.map(|e| apply_expr(e, state)) .map(|e| apply_expr(e, state))
@@ -22,7 +23,7 @@ pub fn apply_exprv(template: &[Expr], state: &State) -> Vec<Expr> {
.collect() .collect()
} }
pub fn apply_expr(template: &Expr, state: &State) -> Vec<Expr> { pub fn apply_expr(template: &RuleExpr, state: &State) -> Vec<RuleExpr> {
let Expr { location, value } = template; let Expr { location, value } = template;
match value { match value {
Clause::P(_) | Clause::Name(_) => vec![template.clone()], Clause::P(_) | Clause::Name(_) => vec![template.clone()],

View File

@@ -1,15 +1,17 @@
use std::rc::Rc; use std::rc::Rc;
use super::matcher::RuleExpr;
use crate::ast::{Clause, Expr}; use crate::ast::{Clause, Expr};
use crate::utils::replace_first; use crate::utils::replace_first;
use crate::Sym;
/// Traverse the tree, calling pred on every sibling list until it returns /// Traverse the tree, calling pred on every sibling list until it returns
/// some vec then replace the sibling list with that vec and return true /// some vec then replace the sibling list with that vec and return true
/// return false if pred never returned some /// return false if pred never returned some
pub fn exprv<F: FnMut(Rc<Vec<Expr>>) -> Option<Rc<Vec<Expr>>>>( pub fn exprv<F: FnMut(Rc<Vec<RuleExpr>>) -> Option<Rc<Vec<RuleExpr>>>>(
input: Rc<Vec<Expr>>, input: Rc<Vec<RuleExpr>>,
pred: &mut F, pred: &mut F,
) -> Option<Rc<Vec<Expr>>> { ) -> Option<Rc<Vec<RuleExpr>>> {
if let Some(v) = pred(input.clone()) { if let Some(v) = pred(input.clone()) {
return Some(v); return Some(v);
} }
@@ -17,18 +19,18 @@ pub fn exprv<F: FnMut(Rc<Vec<Expr>>) -> Option<Rc<Vec<Expr>>>>(
.map(|i| Rc::new(i.collect())) .map(|i| Rc::new(i.collect()))
} }
pub fn expr<F: FnMut(Rc<Vec<Expr>>) -> Option<Rc<Vec<Expr>>>>( pub fn expr<F: FnMut(Rc<Vec<RuleExpr>>) -> Option<Rc<Vec<RuleExpr>>>>(
input: &Expr, input: &RuleExpr,
pred: &mut F, pred: &mut F,
) -> Option<Expr> { ) -> Option<RuleExpr> {
clause(&input.value, pred) clause(&input.value, pred)
.map(|value| Expr { value, location: input.location.clone() }) .map(|value| Expr { value, location: input.location.clone() })
} }
pub fn clause<F: FnMut(Rc<Vec<Expr>>) -> Option<Rc<Vec<Expr>>>>( pub fn clause<F: FnMut(Rc<Vec<RuleExpr>>) -> Option<Rc<Vec<RuleExpr>>>>(
c: &Clause, c: &Clause<Sym>,
pred: &mut F, pred: &mut F,
) -> Option<Clause> { ) -> Option<Clause<Sym>> {
match c { match c {
Clause::P(_) | Clause::Placeh { .. } | Clause::Name { .. } => None, Clause::P(_) | Clause::Placeh { .. } | Clause::Name { .. } => None,
Clause::Lambda(arg, body) => Clause::Lambda(arg, body) =>

View File

@@ -1,9 +1,10 @@
use crate::ast::{Clause, Expr, PHClass, Placeholder}; use super::matcher::RuleExpr;
use crate::ast::{Clause, PHClass, Placeholder};
use crate::interner::Tok; use crate::interner::Tok;
/// Returns the name, priority and nonzero of the expression if it is /// Returns the name, priority and nonzero of the expression if it is
/// a vectorial placeholder /// a vectorial placeholder
pub fn vec_attrs(expr: &Expr) -> Option<(Tok<String>, u64, bool)> { pub fn vec_attrs(expr: &RuleExpr) -> Option<(Tok<String>, u64, bool)> {
if let Clause::Placeh(Placeholder { if let Clause::Placeh(Placeholder {
class: PHClass::Vec { prio, nonzero }, class: PHClass::Vec { prio, nonzero },
name, name,

View File

@@ -6,9 +6,10 @@ use super::conv::conv;
use super::io::io; use super::io::io;
use super::num::num; use super::num::num;
use super::str::str; use super::str::str;
use crate::interner::{Interner, Sym}; use crate::interner::Interner;
use crate::pipeline::file_loader::mk_embed_cache; use crate::pipeline::file_loader::mk_embed_cache;
use crate::pipeline::{from_const_tree, parse_layer, ProjectTree}; use crate::pipeline::{from_const_tree, parse_layer, ProjectTree};
use crate::representations::VName;
use crate::sourcefile::{FileEntry, Import}; use crate::sourcefile::{FileEntry, Import};
/// Feature flags for the STL. /// Feature flags for the STL.
@@ -29,29 +30,28 @@ struct StlEmbed;
/// Build the standard library used by the interpreter by combining the other /// Build the standard library used by the interpreter by combining the other
/// libraries /// libraries
pub fn mk_stl(i: &Interner, options: StlOptions) -> ProjectTree { pub fn mk_stl(i: &Interner, options: StlOptions) -> ProjectTree<VName> {
let const_tree = from_const_tree( let const_tree = from_const_tree(
HashMap::from([( HashMap::from([(
i.i("std"), i.i("std"),
io(i, options.impure) + conv(i) + bool(i) + str(i) + num(i), io(i, options.impure) + conv(i) + bool(i) + str(i) + num(i),
)]), )]),
&[i.i("std")], &[i.i("std")],
i,
); );
let ld_cache = mk_embed_cache::<StlEmbed>(".orc", i); let ld_cache = mk_embed_cache::<StlEmbed>(".orc", i);
parse_layer( let targets = StlEmbed::iter()
&StlEmbed::iter() .map(|path| {
.map(|path| -> Sym { path
let segtoks = path
.strip_suffix(".orc") .strip_suffix(".orc")
.expect("the embed is filtered for suffix") .expect("the embed is filtered for suffix")
.split('/') .split('/')
.map(|segment| i.i(segment)) .map(|segment| i.i(segment))
.collect::<Vec<_>>(); .collect::<Vec<_>>()
i.i(&segtoks[..])
}) })
.collect::<Vec<_>>()[..], .collect::<Vec<_>>();
&|p| ld_cache.find(&p), parse_layer(
targets.iter().map(|v| &v[..]),
&|p| ld_cache.find(p),
&const_tree, &const_tree,
&[], &[],
i, i,

View File

@@ -1,3 +1,4 @@
use std::borrow::Borrow;
use std::cell::RefCell; use std::cell::RefCell;
use std::hash::Hash; use std::hash::Hash;
@@ -23,19 +24,23 @@ impl<'a, I: Eq + Hash + Clone, O: Clone> Cache<'a, I, O> {
} }
/// Produce and cache a result by cloning I if necessary /// Produce and cache a result by cloning I if necessary
pub fn find(&self, i: &I) -> O { pub fn find<Q: ?Sized>(&self, q: &Q) -> O
where
Q: Eq + Hash + ToOwned<Owned = I>,
I: Borrow<Q>,
{
let closure = &self.closure; let closure = &self.closure;
if let Some(v) = self.store.borrow().get(i) { if let Some(v) = self.store.borrow().get(q) {
return v.clone(); return v.clone();
} }
// In the moment of invocation the refcell is on immutable // In the moment of invocation the refcell is on immutable
// this is important for recursive calculations // this is important for recursive calculations
let result = closure(i.clone(), self); let result = closure(q.to_owned(), self);
let mut store = self.store.borrow_mut(); let mut store = self.store.borrow_mut();
store store
.raw_entry_mut() .raw_entry_mut()
.from_key(i) .from_key(q)
.or_insert_with(|| (i.clone(), result)) .or_insert_with(|| (q.to_owned(), result))
.1 .1
.clone() .clone()
} }

View File

@@ -1,6 +1,7 @@
mod cache; mod cache;
mod print_nname; mod print_nname;
mod pushed; mod pushed;
mod rc_to_owned;
mod replace_first; mod replace_first;
mod side; mod side;
mod split_max_prefix; mod split_max_prefix;
@@ -11,6 +12,7 @@ mod unwrap_or;
pub use cache::Cache; pub use cache::Cache;
pub use print_nname::sym2string; pub use print_nname::sym2string;
pub use pushed::pushed; pub use pushed::pushed;
pub use rc_to_owned::{map_rc, rc_to_owned};
pub use replace_first::replace_first; pub use replace_first::replace_first;
pub use side::Side; pub use side::Side;
pub use split_max_prefix::split_max_prefix; pub use split_max_prefix::split_max_prefix;

View File

@@ -1,6 +1,7 @@
use itertools::Itertools; use itertools::Itertools;
use crate::interner::{Interner, Sym}; use crate::interner::Interner;
use crate::Sym;
/// Print symbols to :: delimited strings /// Print symbols to :: delimited strings
pub fn sym2string(t: Sym, i: &Interner) -> String { pub fn sym2string(t: Sym, i: &Interner) -> String {

9
src/utils/rc_to_owned.rs Normal file
View File

@@ -0,0 +1,9 @@
use std::rc::Rc;
pub fn rc_to_owned<T: Clone>(rc: Rc<T>) -> T {
Rc::try_unwrap(rc).unwrap_or_else(|rc| rc.as_ref().clone())
}
pub fn map_rc<T: Clone, U>(rc: Rc<T>, pred: impl FnOnce(T) -> U) -> Rc<U> {
Rc::new(pred(rc_to_owned(rc)))
}

View File

@@ -1,6 +1,17 @@
/// A macro version of [Option::unwrap_or_else] which supports flow /// A macro version of [Option::unwrap_or_else] which supports flow
/// control statements such as `return` and `break` in the "else" branch. /// control statements such as `return` and `break` in the "else" branch.
///
/// ```ignore
/// crate::unwrap_or!(Some(1); return)
/// ```
///
/// It also supports unwrapping concrete variants of other enums /// It also supports unwrapping concrete variants of other enums
///
/// ```ignore
/// use crate::representations::Literal;
///
/// crate::unwrap_or!(Literal::Usize(2) => Literal::Number; return)
/// ```
macro_rules! unwrap_or { macro_rules! unwrap_or {
($m:expr; $fail:expr) => {{ ($m:expr; $fail:expr) => {{
if let Some(res) = ($m) { res } else { $fail } if let Some(res) = ($m) { res } else { $fail }