bug fixes and performance improvements

This commit is contained in:
2023-05-07 22:35:38 +01:00
parent f3ce910f66
commit a604e40bad
167 changed files with 5965 additions and 4229 deletions

View File

@@ -0,0 +1,52 @@
use crate::representations::sourcefile::{Member, FileEntry};
use crate::interner::Token;
fn member_rec(
// object
member: Member,
// context
path: &[Token<String>],
prelude: &[FileEntry],
) -> Member {
match member {
Member::Namespace(name, body) => {
let new_body = entv_rec(
body,
path,
prelude
);
Member::Namespace(name, new_body)
},
any => any
}
}
fn entv_rec(
// object
data: Vec<FileEntry>,
// context
mod_path: &[Token<String>],
prelude: &[FileEntry],
) -> Vec<FileEntry> {
prelude.iter().cloned()
.chain(data.into_iter()
.map(|ent| match ent {
FileEntry::Exported(mem) => FileEntry::Exported(member_rec(
mem, mod_path, prelude
)),
FileEntry::Internal(mem) => FileEntry::Internal(member_rec(
mem, mod_path, prelude
)),
any => any
})
)
.collect()
}
pub fn add_prelude(
data: Vec<FileEntry>,
path: &[Token<String>],
prelude: &[FileEntry],
) -> Vec<FileEntry> {
entv_rec(data, path, prelude)
}

View File

@@ -0,0 +1,215 @@
use std::rc::Rc;
use hashbrown::HashMap;
use crate::pipeline::error::ProjectError;
use crate::interner::{Token, Interner};
use crate::utils::iter::{box_once, box_empty};
use crate::utils::{Substack, pushed};
use crate::ast::{Expr, Constant};
use crate::pipeline::source_loader::{LoadedSourceTable, LoadedSource};
use crate::representations::tree::{Module, ModMember, ModEntry};
use crate::representations::sourcefile::{FileEntry, Member, absolute_path};
use super::collect_ops::InjectedOperatorsFn;
use super::{collect_ops, ProjectTree, ProjectExt};
use super::parse_file::parse_file;
#[derive(Debug)]
struct ParsedSource<'a> {
path: Vec<Token<String>>,
loaded: &'a LoadedSource,
parsed: Vec<FileEntry>
}
pub fn split_path<'a>(path: &'a [Token<String>], proj: &'a ProjectTree)
-> (&'a [Token<String>], &'a [Token<String>])
{
let (end, body) = if let Some(s) = path.split_last() {s}
else {return (&[], &[])};
let mut module = proj.0.walk(body, false).expect("invalid path cannot be split");
if let ModMember::Sub(m) = &module.items[end].member {
module = m.clone();
}
let file = module.extra.file.as_ref()
.map(|s| &path[..s.len()])
.unwrap_or(&path[..]);
let subpath = &path[file.len()..];
(file, subpath)
}
/// Convert normalized, prefixed source into a module
fn source_to_module(
// level
path: Substack<Token<String>>,
preparsed: &Module<impl Clone, impl Clone>,
// data
data: Vec<FileEntry>,
// context
i: &Interner,
filepath_len: usize,
) -> Rc<Module<Expr, ProjectExt>> {
let path_v = path.iter().rev_vec_clone();
let imports = data.iter()
.filter_map(|ent| if let FileEntry::Import(impv) = ent {
Some(impv.iter())
} else {None})
.flatten()
.cloned()
.collect::<Vec<_>>();
let imports_from = imports.iter()
.map(|imp| {
let mut imp_path_v = i.r(imp.path).clone();
imp_path_v.push(imp.name.expect("imports normalized"));
let mut abs_path = absolute_path(
&path_v,
&imp_path_v,
i, &|n| preparsed.items.contains_key(&n)
).expect("tested in preparsing");
let name = abs_path.pop().expect("importing the global context");
(name, i.i(&abs_path))
})
.collect::<HashMap<_, _>>();
let exports = data.iter()
.flat_map(|ent| {
let mk_ent = |name| (name, i.i(&pushed(&path_v, name)));
match ent {
FileEntry::Export(names)
=> Box::new(names.iter().copied().map(mk_ent)),
FileEntry::Exported(mem) => match mem {
Member::Constant(constant) => box_once(mk_ent(constant.name)),
Member::Namespace(name, _) => box_once(mk_ent(*name)),
Member::Rule(rule) => {
let mut names = Vec::new();
for e in rule.source.iter() {
e.visit_names(Substack::Bottom, &mut |n| {
if let Some([name]) = i.r(n).strip_prefix(&path_v[..]) {
names.push((*name, n))
}
})
}
Box::new(names.into_iter())
}
}
_ => box_empty()
}
})
.collect::<HashMap<_, _>>();
let rules = data.iter()
.filter_map(|ent| match ent {
FileEntry::Exported(Member::Rule(rule)) => Some(rule),
FileEntry::Internal(Member::Rule(rule)) => Some(rule),
_ => None,
})
.cloned()
.collect::<Vec<_>>();
let items = data.into_iter()
.filter_map(|ent| match ent {
FileEntry::Exported(Member::Namespace(name, body)) => {
let prep_member = &preparsed.items[&name].member;
let new_prep = if let ModMember::Sub(s) = prep_member {s.as_ref()}
else { panic!("preparsed missing a submodule") };
let module = source_to_module(
path.push(name),
new_prep, body, i, filepath_len
);
let member = ModMember::Sub(module);
Some((name, ModEntry{ exported: true, member }))
}
FileEntry::Internal(Member::Namespace(name, body)) => {
let prep_member = &preparsed.items[&name].member;
let new_prep = if let ModMember::Sub(s) = prep_member {s.as_ref()}
else { panic!("preparsed missing a submodule") };
let module = source_to_module(
path.push(name),
new_prep, body, i, filepath_len
);
let member = ModMember::Sub(module);
Some((name, ModEntry{ exported: false, member }))
}
FileEntry::Exported(Member::Constant(Constant{ name, value })) => {
let member = ModMember::Item(value);
Some((name, ModEntry{ exported: true, member }))
}
FileEntry::Internal(Member::Constant(Constant{ name, value })) => {
let member = ModMember::Item(value);
Some((name, ModEntry{ exported: false, member }))
}
_ => None,
})
.collect::<HashMap<_, _>>();
Rc::new(Module {
imports,
items,
extra: ProjectExt {
imports_from,
exports,
rules,
file: Some(path_v[..filepath_len].to_vec())
}
})
}
fn files_to_module(
path: Substack<Token<String>>,
files: &[ParsedSource],
i: &Interner
) -> Rc<Module<Expr, ProjectExt>> {
let lvl = path.len();
let path_v = path.iter().rev_vec_clone();
if files.len() == 1 && files[0].path.len() == lvl {
return source_to_module(
path,
files[0].loaded.preparsed.0.as_ref(),
files[0].parsed.clone(),
i, path.len()
)
}
let items = files.group_by(|a, b| a.path[lvl] == b.path[lvl]).into_iter()
.map(|files| {
let namespace = files[0].path[lvl];
let subpath = path.push(namespace);
let module = files_to_module(subpath, files, i);
let member = ModMember::Sub(module);
(namespace, ModEntry{ exported: true, member })
})
.collect::<HashMap<_, _>>();
let exports = items.keys()
.copied()
.map(|name| (name, i.i(&pushed(&path_v, name))))
.collect();
Rc::new(Module{
items,
imports: vec![],
extra: ProjectExt {
exports,
imports_from: HashMap::new(),
rules: vec![], file: None,
}
})
}
pub fn build_tree<'a>(
files: LoadedSourceTable,
i: &Interner,
prelude: &[FileEntry],
injected: &impl InjectedOperatorsFn,
) -> Result<ProjectTree, Rc<dyn ProjectError>> {
let ops_cache = collect_ops::mk_cache(&files, i, injected);
let mut entries = files.iter()
.map(|(path, loaded)| Ok((
i.r(*path),
loaded,
parse_file(*path, &files, &ops_cache, i, prelude)?
)))
.collect::<Result<Vec<_>, Rc<dyn ProjectError>>>()?;
// sort by similarity, then longest-first
entries.sort_unstable_by(|a, b| a.0.cmp(&b.0).reverse());
let files = entries.into_iter()
.map(|(path, loaded, parsed)| ParsedSource{
loaded, parsed,
path: path.clone()
})
.collect::<Vec<_>>();
Ok(ProjectTree(files_to_module(Substack::Bottom, &files, i)))
}

View File

@@ -0,0 +1,75 @@
use std::rc::Rc;
use hashbrown::HashSet;
use crate::representations::tree::WalkErrorKind;
use crate::pipeline::source_loader::LoadedSourceTable;
use crate::pipeline::error::{ProjectError, ModuleNotFound};
use crate::interner::{Token, Interner};
use crate::utils::Cache;
use crate::pipeline::split_name::split_name;
pub type OpsResult = Result<Rc<HashSet<Token<String>>>, Rc<dyn ProjectError>>;
pub type ExportedOpsCache<'a> = Cache<'a, Token<Vec<Token<String>>>, OpsResult>;
pub trait InjectedOperatorsFn = Fn(
Token<Vec<Token<String>>>
) -> Option<Rc<HashSet<Token<String>>>>;
fn coprefix<T: Eq>(
l: impl Iterator<Item = T>,
r: impl Iterator<Item = T>
) -> usize {
l.zip(r).take_while(|(a, b)| a == b).count()
}
/// Collect all names exported by the module at the specified path
pub fn collect_exported_ops(
path: Token<Vec<Token<String>>>,
loaded: &LoadedSourceTable,
i: &Interner,
injected: &impl InjectedOperatorsFn
) -> OpsResult {
if let Some(i) = injected(path) {return Ok(i)}
let is_file = |n: &[Token<String>]| loaded.contains_key(&i.i(n));
let path_s = &i.r(path)[..];
let name_split = split_name(path_s, &is_file);
let (fpath_v, subpath_v) = if let Some(f) = name_split {f} else {
return Ok(Rc::new(loaded.keys().copied()
.filter_map(|modname| {
let modname_s = i.r(modname);
if path_s.len() == coprefix(path_s.iter(), modname_s.iter()) {
Some(modname_s[path_s.len()])
} else {None}
})
.collect::<HashSet<_>>()
))
};
let fpath = i.i(fpath_v);
let preparsed = &loaded[&fpath].preparsed;
let module = preparsed.0.walk(&subpath_v, false)
.map_err(|walk_err| match walk_err.kind {
WalkErrorKind::Private => unreachable!("visibility is not being checked here"),
WalkErrorKind::Missing => ModuleNotFound{
file: i.extern_vec(fpath),
subpath: subpath_v.into_iter()
.take(walk_err.pos)
.map(|t| i.r(*t))
.cloned()
.collect()
}.rc(),
})?;
Ok(Rc::new(module.items.iter()
.filter(|(_, v)| v.exported)
.map(|(k, _)| *k)
.collect()
))
}
pub fn mk_cache<'a>(
loaded: &'a LoadedSourceTable,
i: &'a Interner,
injected: &'a impl InjectedOperatorsFn,
) -> ExportedOpsCache<'a> {
Cache::new(|path, _this| collect_exported_ops(path, loaded, i, injected))
}

View File

@@ -0,0 +1,8 @@
mod exported_ops;
mod ops_for;
pub use exported_ops::{
ExportedOpsCache, OpsResult, InjectedOperatorsFn,
collect_exported_ops, mk_cache
};
pub use ops_for::collect_ops_for;

View File

@@ -0,0 +1,49 @@
use std::rc::Rc;
use hashbrown::HashSet;
use crate::parse::is_op;
use crate::pipeline::error::ProjectError;
use crate::pipeline::source_loader::LoadedSourceTable;
use crate::interner::{Token, Interner};
use crate::representations::tree::{Module, ModMember};
use crate::pipeline::import_abs_path::import_abs_path;
use super::exported_ops::{ExportedOpsCache, OpsResult};
/// Collect all operators and names, exported or local, defined in this
/// tree.
fn tree_all_ops(
module: &Module<impl Clone, impl Clone>,
ops: &mut HashSet<Token<String>>
) {
ops.extend(module.items.keys().copied());
for ent in module.items.values() {
if let ModMember::Sub(m) = &ent.member {
tree_all_ops(m.as_ref(), ops);
}
}
}
/// Collect all names imported in this file
pub fn collect_ops_for(
file: &[Token<String>],
loaded: &LoadedSourceTable,
ops_cache: &ExportedOpsCache,
i: &Interner
) -> OpsResult {
let tree = &loaded[&i.i(file)].preparsed.0;
let mut ret = HashSet::new();
tree_all_ops(tree.as_ref(), &mut ret);
tree.visit_all_imports(&mut |modpath, module, import| {
if let Some(n) = import.name { ret.insert(n); } else {
let path = import_abs_path(
&file, modpath, module, &i.r(import.path)[..], i
).expect("This error should have been caught during loading");
ret.extend(ops_cache.find(&i.i(&path))?.iter().copied());
}
Ok::<_, Rc<dyn ProjectError>>(())
})?;
ret.drain_filter(|t| !is_op(i.r(*t)));
Ok(Rc::new(ret))
}

View File

@@ -0,0 +1,93 @@
use std::{ops::Add, rc::Rc};
use hashbrown::HashMap;
use crate::representations::tree::{ModEntry, ModMember, Module};
use crate::representations::Primitive;
use crate::representations::location::Location;
use crate::foreign::ExternFn;
use crate::interner::{Token, Interner};
use crate::ast::{Expr, Clause};
use crate::utils::{Substack, pushed};
use super::{ProjectModule, ProjectExt, ProjectTree};
pub enum ConstTree {
Const(Expr),
Tree(HashMap<Token<String>, ConstTree>)
}
impl ConstTree {
pub fn xfn(xfn: impl ExternFn + 'static) -> Self {
Self::Const(Expr{
location: Location::Unknown,
value: Clause::P(Primitive::ExternFn(Box::new(xfn)))
})
}
pub fn tree(
arr: impl IntoIterator<Item = (Token<String>, Self)>
) -> Self {
Self::Tree(arr.into_iter().collect())
}
}
impl Add for ConstTree {
type Output = ConstTree;
fn add(self, rhs: ConstTree) -> Self::Output {
if let (Self::Tree(t1), Self::Tree(mut t2)) = (self, rhs) {
let mut product = HashMap::new();
for (key, i1) in t1 {
if let Some(i2) = t2.remove(&key) {
product.insert(key, i1 + i2);
} else {
product.insert(key, i1);
}
}
product.extend(t2.into_iter());
Self::Tree(product)
} else {
panic!("cannot combine tree and value fields")
}
}
}
fn from_const_tree_rec(
path: Substack<Token<String>>,
consts: HashMap<Token<String>, ConstTree>,
file: &[Token<String>],
i: &Interner,
) -> ProjectModule {
let mut items = HashMap::new();
let path_v = path.iter().rev_vec_clone();
for (name, item) in consts {
items.insert(name, ModEntry{
exported: true,
member: match item {
ConstTree::Const(c) => ModMember::Item(c),
ConstTree::Tree(t) => ModMember::Sub(Rc::new(
from_const_tree_rec(path.push(name), t, file, i)
)),
}
});
}
let exports = items.keys()
.map(|name| (*name, i.i(&pushed(&path_v, *name))))
.collect();
Module {
items,
imports: vec![],
extra: ProjectExt {
exports,
file: Some(file.to_vec()),
..Default::default()
}
}
}
pub fn from_const_tree(
consts: HashMap<Token<String>, ConstTree>,
file: &[Token<String>],
i: &Interner,
) -> ProjectTree {
let module = from_const_tree_rec(Substack::Bottom, consts, file, i);
ProjectTree(Rc::new(module))
}

View File

@@ -0,0 +1,38 @@
/* FILE SEPARATION BOUNDARY
Collect all operators accessible in each file, parse the files with
correct tokenization, resolve glob imports, convert expressions to
refer to tokens with (local) absolute path, and connect them into a
single tree.
The module checks for imports from missing modules (including submodules).
All other errors must be checked later.
Injection strategy:
Return all items of the given module in the injected tree for `injected`
The output of this stage is a tree, which can simply be overlaid with
the injected tree
*/
mod collect_ops;
mod parse_file;
mod build_tree;
mod normalize_imports;
mod prefix;
mod tree;
mod const_tree;
mod add_prelude;
pub use collect_ops::InjectedOperatorsFn;
pub use const_tree::{
ConstTree, from_const_tree,
};
pub use tree::{
ProjectExt, ProjectModule, ProjectTree, collect_consts, collect_rules
};
pub use build_tree::{
build_tree, split_path
};

View File

@@ -0,0 +1,84 @@
use crate::representations::tree::{Module, ModMember};
use crate::representations::sourcefile::{Member, FileEntry, Import};
use crate::utils::BoxedIter;
use crate::utils::{Substack, iter::box_once};
use crate::interner::{Interner, Token};
use crate::pipeline::import_abs_path::import_abs_path;
use super::collect_ops::ExportedOpsCache;
fn member_rec(
// level
mod_stack: Substack<Token<String>>,
preparsed: &Module<impl Clone, impl Clone>,
// object
member: Member,
// context
path: &[Token<String>],
ops_cache: &ExportedOpsCache,
i: &Interner
) -> Member {
match member {
Member::Namespace(name, body) => {
let prepmember = &preparsed.items[&name].member;
let subprep = if let ModMember::Sub(m) = prepmember {m.clone()}
else {unreachable!("This name must point to a namespace")};
let new_body = entv_rec(
mod_stack.push(name),
subprep.as_ref(),
body,
path, ops_cache, i
);
Member::Namespace(name, new_body)
},
any => any
}
}
fn entv_rec(
// level
mod_stack: Substack<Token<String>>,
preparsed: &Module<impl Clone, impl Clone>,
// object
data: Vec<FileEntry>,
// context
mod_path: &[Token<String>],
ops_cache: &ExportedOpsCache,
i: &Interner
) -> Vec<FileEntry> {
data.into_iter()
.map(|ent| match ent {
FileEntry::Import(imps) => FileEntry::Import(imps.into_iter()
.flat_map(|import| if let Import{ name: None, path } = import {
let p = import_abs_path(
mod_path, mod_stack, preparsed, &i.r(path)[..], i
).expect("Should have emerged in preparsing");
let names = ops_cache.find(&i.i(&p))
.expect("Should have emerged in second parsing");
let imports = names.iter()
.map(move |&n| Import{ name: Some(n), path })
.collect::<Vec<_>>();
Box::new(imports.into_iter()) as BoxedIter<Import>
} else {box_once(import)})
.collect()
),
FileEntry::Exported(mem) => FileEntry::Exported(member_rec(
mod_stack, preparsed, mem, mod_path, ops_cache, i
)),
FileEntry::Internal(mem) => FileEntry::Internal(member_rec(
mod_stack, preparsed, mem, mod_path, ops_cache, i
)),
any => any
})
.collect()
}
pub fn normalize_imports(
preparsed: &Module<impl Clone, impl Clone>,
data: Vec<FileEntry>,
path: &[Token<String>],
ops_cache: &ExportedOpsCache,
i: &Interner
) -> Vec<FileEntry> {
entv_rec(Substack::Bottom, preparsed, data, path, ops_cache, i)
}

View File

@@ -0,0 +1,44 @@
use std::rc::Rc;
use crate::parse;
use crate::pipeline::error::ProjectError;
use crate::representations::sourcefile::{FileEntry, normalize_namespaces};
use crate::pipeline::source_loader::LoadedSourceTable;
use crate::interner::{Token, Interner};
use super::add_prelude::add_prelude;
use super::collect_ops::{ExportedOpsCache, collect_ops_for};
use super::normalize_imports::normalize_imports;
use super::prefix::prefix;
pub fn parse_file(
path: Token<Vec<Token<String>>>,
loaded: &LoadedSourceTable,
ops_cache: &ExportedOpsCache,
i: &Interner,
prelude: &[FileEntry],
) -> Result<Vec<FileEntry>, Rc<dyn ProjectError>> {
let ld = &loaded[&path];
// let ops_cache = collect_ops::mk_cache(loaded, i);
let ops = collect_ops_for(&i.r(path)[..], loaded, ops_cache, i)?;
let ops_vec = ops.iter()
.map(|t| i.r(*t))
.cloned()
.collect::<Vec<_>>();
let ctx = parse::ParsingContext{
interner: i,
ops: &ops_vec,
file: Rc::new(i.extern_vec(path))
};
let entries = parse::parse(ld.text.as_str(), ctx)
.expect("This error should have been caught during loading");
let with_prelude = add_prelude(entries, &i.r(path)[..], prelude);
let impnormalized = normalize_imports(
&ld.preparsed.0, with_prelude, &i.r(path)[..], ops_cache, i
);
let nsnormalized = normalize_namespaces(
Box::new(impnormalized.into_iter()), i
).expect("This error should have been caught during preparsing");
let prefixed = prefix(nsnormalized, &i.r(path)[..], ops_cache, i);
Ok(prefixed)
}

View File

@@ -0,0 +1,82 @@
use std::rc::Rc;
use crate::ast::{Constant, Rule};
use crate::interner::{Token, Interner};
use crate::utils::Substack;
use crate::representations::sourcefile::{Member, FileEntry};
use super::collect_ops::ExportedOpsCache;
fn member_rec(
// level
mod_stack: Substack<Token<String>>,
// object
data: Member,
// context
path: &[Token<String>],
ops_cache: &ExportedOpsCache,
i: &Interner
) -> Member {
// let except = |op| imported.contains(&op);
let except = |_| false;
let prefix_v = path.iter().copied()
.chain(mod_stack.iter().rev_vec_clone().into_iter())
.collect::<Vec<_>>();
let prefix = i.i(&prefix_v);
match data {
Member::Namespace(name, body) => {
let new_body = entv_rec(
mod_stack.push(name),
body,
path, ops_cache, i
);
Member::Namespace(name, new_body)
}
Member::Constant(constant) => Member::Constant(Constant{
name: constant.name,
value: constant.value.prefix(prefix, i, &except)
}),
Member::Rule(rule) => Member::Rule(Rule{
prio: rule.prio,
source: Rc::new(rule.source.iter()
.map(|e| e.prefix(prefix, i, &except))
.collect()
),
target: Rc::new(rule.target.iter()
.map(|e| e.prefix(prefix, i, &except))
.collect()
),
})
}
}
fn entv_rec(
// level
mod_stack: Substack<Token<String>>,
// object
data: Vec<FileEntry>,
// context
path: &[Token<String>],
ops_cache: &ExportedOpsCache,
i: &Interner
) -> Vec<FileEntry> {
data.into_iter().map(|fe| match fe {
FileEntry::Exported(mem) => FileEntry::Exported(member_rec(
mod_stack, mem, path, ops_cache, i
)),
FileEntry::Internal(mem) => FileEntry::Internal(member_rec(
mod_stack, mem, path, ops_cache, i
)),
// XXX should [FileEntry::Export] be prefixed?
any => any
}).collect()
}
pub fn prefix(
data: Vec<FileEntry>,
path: &[Token<String>],
ops_cache: &ExportedOpsCache,
i: &Interner
) -> Vec<FileEntry> {
entv_rec(Substack::Bottom, data, path, ops_cache, i)
}

View File

@@ -0,0 +1,87 @@
use std::{ops::Add, rc::Rc};
use hashbrown::HashMap;
use crate::representations::tree::{Module, ModMember};
use crate::ast::{Rule, Expr};
use crate::interner::{Token, Interner};
use crate::utils::Substack;
#[derive(Clone, Debug, Default)]
pub struct ProjectExt{
/// Pairs each foreign token to the module it was imported from
pub imports_from: HashMap<Token<String>, Token<Vec<Token<String>>>>,
/// Pairs each exported token to its original full name.
pub exports: HashMap<Token<String>, Token<Vec<Token<String>>>>,
/// All rules defined in this module, exported or not
pub rules: Vec<Rule>,
/// Filename, if known, for error reporting
pub file: Option<Vec<Token<String>>>
}
impl Add for ProjectExt {
type Output = Self;
fn add(mut self, rhs: Self) -> Self::Output {
let ProjectExt{ imports_from, exports, rules, file } = rhs;
self.imports_from.extend(imports_from.into_iter());
self.exports.extend(exports.into_iter());
self.rules.extend(rules.into_iter());
if file.is_some() { self.file = file }
self
}
}
pub type ProjectModule = Module<Expr, ProjectExt>;
pub struct ProjectTree(pub Rc<ProjectModule>);
fn collect_rules_rec(bag: &mut Vec<Rule>, module: &ProjectModule) {
bag.extend(module.extra.rules.iter().cloned());
for item in module.items.values() {
if let ModMember::Sub(module) = &item.member {
collect_rules_rec(bag, module.as_ref());
}
}
}
pub fn collect_rules(project: &ProjectTree) -> Vec<Rule> {
let mut rules = Vec::new();
collect_rules_rec(&mut rules, project.0.as_ref());
rules
}
fn collect_consts_rec(
path: Substack<Token<String>>,
bag: &mut HashMap<Token<Vec<Token<String>>>, Expr>,
module: &ProjectModule,
i: &Interner
) {
for (key, entry) in module.items.iter() {
match &entry.member {
ModMember::Item(expr) => {
let mut name = path.iter().rev_vec_clone();
name.push(*key);
bag.insert(i.i(&name), expr.clone());
}
ModMember::Sub(module) => {
collect_consts_rec(
path.push(*key),
bag, module, i
)
}
}
}
}
pub fn collect_consts(project: &ProjectTree, i: &Interner)
-> HashMap<Token<Vec<Token<String>>>, Expr>
{
let mut consts = HashMap::new();
collect_consts_rec(
Substack::Bottom,
&mut consts,
project.0.as_ref(),
i
);
consts
}