Converted Interner to work with Rc-s

- Interner no longer contains unsafe code
- Tokens now hold a reference to the value they represent directly

This will enable many future improvements
This commit is contained in:
2023-08-19 14:03:05 +01:00
parent ab0b57b1b8
commit 0b887ced70
62 changed files with 592 additions and 762 deletions

View File

@@ -24,10 +24,10 @@ impl ProjectError for FileLoadingError {
fn description(&self) -> &str {
"Neither a file nor a directory could be read from the requested path"
}
fn one_position(&self, _i: &Interner) -> crate::Location {
fn one_position(&self) -> crate::Location {
Location::File(Rc::new(self.path.clone()))
}
fn message(&self, _i: &Interner) -> String {
fn message(&self) -> String {
format!("File: {}\nDirectory: {}", self.file, self.dir)
}
}
@@ -89,9 +89,9 @@ pub fn load_file(root: &Path, path: &[impl AsRef<str>]) -> IOResult {
}
/// Generates a cached file loader for a directory
pub fn mk_dir_cache(root: PathBuf, i: &Interner) -> Cache<VName, IOResult> {
pub fn mk_dir_cache(root: PathBuf) -> Cache<'static, VName, IOResult> {
Cache::new(move |vname: VName, _this| -> IOResult {
let path = vname.iter().map(|t| i.r(*t).as_str()).collect::<Vec<_>>();
let path = vname.iter().map(|t| t.as_str()).collect::<Vec<_>>();
load_file(&root, &path)
})
}
@@ -130,12 +130,11 @@ pub fn load_embed<T: 'static + RustEmbed>(path: &str, ext: &str) -> IOResult {
}
/// Generates a cached file loader for a [RustEmbed]
pub fn mk_embed_cache<'a, T: 'static + RustEmbed>(
ext: &'a str,
i: &'a Interner,
) -> Cache<'a, Vec<Stok>, IOResult> {
pub fn mk_embed_cache<T: 'static + RustEmbed>(
ext: &str,
) -> Cache<'_, Vec<Stok>, IOResult> {
Cache::new(move |vname: VName, _this| -> IOResult {
let path = i.extern_all(&vname).join("/");
let path = Interner::extern_all(&vname).join("/");
load_embed::<T>(&path, ext)
})
}

View File

@@ -12,10 +12,9 @@ pub fn import_abs_path(
// path of module within file
let mod_pathv = mod_stack.iter().rev_vec_clone();
// path of module within compilation
let abs_pathv = src_path
.iter()
.copied()
.chain(mod_pathv.iter().copied())
let abs_pathv = (src_path.iter())
.chain(mod_pathv.iter())
.cloned()
.collect::<Vec<_>>();
// preload-target path relative to module
// preload-target path within compilation

View File

@@ -17,7 +17,7 @@ fn resolve_rec(
Some(alias.clone())
} else if let Some((foot, body)) = namespace.split_last() {
let mut new_beginning = resolve_rec(body, alias_map)?;
new_beginning.push(*foot);
new_beginning.push(foot.clone());
Some(new_beginning)
} else {
None
@@ -61,7 +61,7 @@ fn apply_aliases_rec(
ModMember::Item(expr) =>
ModMember::Item(process_expr(expr, alias_map, injected_as)),
ModMember::Sub(module) => {
let subpath = path.push(*name);
let subpath = path.push(name.clone());
let new_mod = if !updated(&subpath.iter().rev_vec_clone()) {
module.clone()
} else {
@@ -70,7 +70,7 @@ fn apply_aliases_rec(
ModMember::Sub(new_mod)
},
};
(*name, ModEntry { exported: *exported, member })
(name.clone(), ModEntry { exported: *exported, member })
})
.collect::<HashMap<_, _>>();
let rules = (module.extra.rules.iter())
@@ -94,7 +94,7 @@ fn apply_aliases_rec(
rules,
exports: (module.extra.exports.iter())
.map(|(k, v)| {
(*k, resolve(v, alias_map, injected_as).unwrap_or(v.clone()))
(k.clone(), resolve(v, alias_map, injected_as).unwrap_or(v.clone()))
})
.collect(),
file: module.extra.file.clone(),

View File

@@ -105,13 +105,13 @@ fn collect_aliases_rec(
if !updated(&mod_path_v) {
return Ok(());
};
for (&name, target_mod_name) in module.extra.imports_from.iter() {
let target_sym_v = pushed(target_mod_name, name);
for (name, target_mod_name) in module.extra.imports_from.iter() {
let target_sym_v = pushed(target_mod_name, name.clone());
assert_visible(&mod_path_v, &target_sym_v, project)?;
let sym_path_v = pushed(&mod_path_v, name);
let sym_path_v = pushed(&mod_path_v, name.clone());
let target_mod = (project.0.walk_ref(target_mod_name, false))
.expect("checked above in assert_visible");
let target_sym = (target_mod.extra.exports.get(&name))
let target_sym = (target_mod.extra.exports.get(name))
.ok_or_else(|| {
let file_len =
target_mod.extra.file.as_ref().unwrap_or(target_mod_name).len();
@@ -125,10 +125,10 @@ fn collect_aliases_rec(
.clone();
alias_map.link(sym_path_v, target_sym);
}
for (&name, entry) in module.items.iter() {
for (name, entry) in module.items.iter() {
let submodule = unwrap_or!(&entry.member => ModMember::Sub; continue);
collect_aliases_rec(
path.push(name),
path.push(name.clone()),
submodule,
project,
alias_map,

View File

@@ -30,8 +30,8 @@ pub fn parse_layer<'a>(
module.extra.exports.get(item).cloned()
};
let injected_names = |path: Tok<Vec<Tok<String>>>| {
let module = environment.0.walk_ref(&i.r(path)[..], false).ok()?;
Some(Rc::new(module.extra.exports.keys().copied().collect()))
let module = environment.0.walk_ref(&path, false).ok()?;
Some(Rc::new(module.extra.exports.keys().cloned().collect()))
};
let source =
source_loader::load_source(targets, prelude, i, loader, &|path| {

View File

@@ -76,7 +76,8 @@ fn source_to_module(
let imports_from = (imports.iter())
.map(|imp| -> ProjectResult<_> {
let mut imp_path_v = imp.path.clone();
imp_path_v.push(imp.name.expect("glob imports had just been resolved"));
imp_path_v
.push(imp.name.clone().expect("glob imports had just been resolved"));
let mut abs_path = absolute_path(&path_v, &imp_path_v, i)
.expect("should have failed in preparsing");
let name = abs_path.pop().ok_or_else(|| {
@@ -92,19 +93,19 @@ fn source_to_module(
.collect::<Result<HashMap<_, _>, _>>()?;
let exports = (data.iter())
.flat_map(|ent| {
let mk_ent = |name| (name, pushed(&path_v, name));
let mk_ent = |name: Tok<String>| (name.clone(), pushed(&path_v, name));
match ent {
FileEntry::Export(names) => Box::new(names.iter().copied().map(mk_ent)),
FileEntry::Export(names) => Box::new(names.iter().cloned().map(mk_ent)),
FileEntry::Exported(mem) => match mem {
Member::Constant(constant) => box_once(mk_ent(constant.name)),
Member::Module(ns) => box_once(mk_ent(ns.name)),
Member::Constant(constant) => box_once(mk_ent(constant.name.clone())),
Member::Module(ns) => box_once(mk_ent(ns.name.clone())),
Member::Rule(rule) => {
let mut names = Vec::new();
for e in rule.pattern.iter() {
e.search_all(&mut |e| {
if let Clause::Name(n) = &e.value {
if let Some([name]) = n.strip_prefix(&path_v[..]) {
names.push((*name, n.clone()))
names.push((name.clone(), n.clone()))
}
}
None::<()>
@@ -134,7 +135,7 @@ fn source_to_module(
panic!("Preparsed should include entries for all submodules")
);
let module = match source_to_module(
path.push(ns.name),
path.push(ns.name.clone()),
new_prep,
ns.body,
i,
@@ -144,7 +145,7 @@ fn source_to_module(
Ok(t) => t,
};
let member = ModMember::Sub(module);
Some(Ok((ns.name, ModEntry { exported, member })))
Some(Ok((ns.name.clone(), ModEntry { exported, member })))
},
Member::Constant(Constant { name, value }) => {
let member = ModMember::Item(value);
@@ -184,7 +185,7 @@ fn files_to_module(
let path_v = path.iter().rev_vec_clone();
if files.len() == 1 && files[0].path.len() == lvl {
return source_to_module(
path,
path.clone(),
&files[0].loaded.preparsed.0,
files[0].parsed.clone(),
i,
@@ -192,18 +193,19 @@ fn files_to_module(
);
}
let items = (files.into_iter())
.group_by(|f| f.path[lvl])
.group_by(|f| f.path[lvl].clone())
.into_iter()
.map(|(namespace, files)| -> ProjectResult<_> {
let subpath = path.push(namespace);
let subpath = path.push(namespace.clone());
let files_v = files.collect::<Vec<_>>();
let module = files_to_module(subpath, files_v, i)?;
let member = ModMember::Sub(module);
Ok((namespace, ModEntry { exported: true, member }))
})
.collect::<Result<HashMap<_, _>, _>>()?;
let exports: HashMap<_, _> =
items.keys().copied().map(|name| (name, pushed(&path_v, name))).collect();
let exports: HashMap<_, _> = (items.keys())
.map(|name| (name.clone(), pushed(&path_v, name.clone())))
.collect();
Ok(Module {
items,
imports: vec![],
@@ -223,7 +225,7 @@ pub fn build_tree(
injected: &impl InjectedOperatorsFn,
) -> ProjectResult<ProjectTree<VName>> {
assert!(!files.is_empty(), "A tree requires at least one module");
let ops_cache = collect_ops::mk_cache(&files, i, injected);
let ops_cache = collect_ops::mk_cache(&files, injected);
let mut entries = files
.iter()
.map(|(path, loaded)| {

View File

@@ -4,7 +4,7 @@ use hashbrown::HashSet;
use trait_set::trait_set;
use crate::error::{NotFound, ProjectError, ProjectResult};
use crate::interner::{Interner, Tok};
use crate::interner::Tok;
use crate::pipeline::source_loader::LoadedSourceTable;
use crate::representations::tree::WalkErrorKind;
use crate::utils::{split_max_prefix, Cache};
@@ -28,22 +28,21 @@ fn coprefix<T: Eq>(
pub fn collect_exported_ops(
path: Sym,
loaded: &LoadedSourceTable,
i: &Interner,
injected: &impl InjectedOperatorsFn,
) -> OpsResult {
let injected = injected(path).unwrap_or_else(|| Rc::new(HashSet::new()));
let path_s = &i.r(path)[..];
match split_max_prefix(path_s, &|n| loaded.contains_key(n)) {
let injected =
injected(path.clone()).unwrap_or_else(|| Rc::new(HashSet::new()));
match split_max_prefix(&path, &|n| loaded.contains_key(n)) {
None => {
let ops = (loaded.keys())
.filter_map(|modname| {
if path_s.len() == coprefix(path_s.iter(), modname.iter()) {
Some(modname[path_s.len()])
if path.len() == coprefix(path.iter(), modname.iter()) {
Some(modname[path.len()].clone())
} else {
None
}
})
.chain(injected.iter().copied())
.chain(injected.iter().cloned())
.collect::<HashSet<_>>();
Ok(Rc::new(ops))
},
@@ -64,8 +63,8 @@ pub fn collect_exported_ops(
)?;
let out = (module.items.iter())
.filter(|(_, v)| v.exported)
.map(|(k, _)| *k)
.chain(injected.iter().copied())
.map(|(k, _)| k.clone())
.chain(injected.iter().cloned())
.collect::<HashSet<_>>();
Ok(Rc::new(out))
},
@@ -74,8 +73,7 @@ pub fn collect_exported_ops(
pub fn mk_cache<'a>(
loaded: &'a LoadedSourceTable,
i: &'a Interner,
injected: &'a impl InjectedOperatorsFn,
) -> ExportedOpsCache<'a> {
Cache::new(|path, _this| collect_exported_ops(path, loaded, i, injected))
Cache::new(|path, _this| collect_exported_ops(path, loaded, injected))
}

View File

@@ -16,7 +16,7 @@ fn tree_all_ops(
module: &Module<impl Clone, impl Clone>,
ops: &mut HashSet<Tok<String>>,
) {
ops.extend(module.items.keys().copied());
ops.extend(module.items.keys().cloned());
for ent in module.items.values() {
if let ModMember::Sub(m) = &ent.member {
tree_all_ops(m, ops);
@@ -40,16 +40,14 @@ pub fn collect_ops_for(
let mut ret = HashSet::new();
tree_all_ops(tree, &mut ret);
tree.visit_all_imports(&mut |modpath, _m, import| -> ProjectResult<()> {
if let Some(n) = import.name {
ret.insert(n);
if let Some(n) = &import.name {
ret.insert(n.clone());
} else {
let path = i.expect(
import_abs_path(file, modpath, &import.path, i),
"This error should have been caught during loading",
);
ret.extend(ops_cache.find(&i.i(&path))?.iter().copied());
let path = import_abs_path(file, modpath, &import.path, i)
.expect("This error should have been caught during loading");
ret.extend(ops_cache.find(&i.i(&path))?.iter().cloned());
}
Ok(())
})?;
Ok(Rc::new(ret.into_iter().filter(|t| is_op(i.r(*t))).collect()))
Ok(Rc::new(ret.into_iter().filter(|t| is_op(&**t)).collect()))
}

View File

@@ -25,8 +25,8 @@ fn member_rec(
&preparsed.items[&name].member => ModMember::Sub;
unreachable!("This name must point to a namespace")
);
let new_body =
entv_rec(mod_stack.push(name), subprep, body, path, ops_cache, i);
let new_stack = mod_stack.push(name.clone());
let new_body = entv_rec(new_stack, subprep, body, path, ops_cache, i);
Member::Module(ModuleBlock { name, body: new_body })
},
any => any,
@@ -58,16 +58,12 @@ fn entv_rec(
.into_iter()
.flat_map(|import| {
if let Import { name: None, path } = import {
let p = i.expect(
import_abs_path(mod_path, mod_stack, &path, i),
"Should have emerged in preparsing",
);
let names = i.expect(
ops_cache.find(&i.i(&p)),
"Should have emerged in second parsing",
);
let p = import_abs_path(mod_path, mod_stack.clone(), &path, i)
.expect("Should have emerged in preparsing");
let names = (ops_cache.find(&i.i(&p)))
.expect("Should have emerged in second parsing");
let imports = (names.iter())
.map(|&n| Import { name: Some(n), path: path.clone() })
.map(|n| Import { name: Some(n.clone()), path: path.clone() })
.collect::<Vec<_>>();
Box::new(imports.into_iter()) as BoxedIter<Import>
} else {
@@ -77,10 +73,10 @@ fn entv_rec(
.collect(),
),
FileEntry::Exported(mem) => FileEntry::Exported(member_rec(
mod_stack, preparsed, mem, mod_path, ops_cache, i,
mod_stack.clone(), preparsed, mem, mod_path, ops_cache, i,
)),
FileEntry::Internal(mem) => FileEntry::Internal(member_rec(
mod_stack, preparsed, mem, mod_path, ops_cache, i,
mod_stack.clone(), preparsed, mem, mod_path, ops_cache, i,
)),
any => any,
})

View File

@@ -28,16 +28,14 @@ pub fn parse_file(
let ld = &loaded[path];
// let ops_cache = collect_ops::mk_cache(loaded, i);
let ops = collect_ops_for(path, loaded, ops_cache, i)?;
let ops_vec = ops.iter().map(|t| i.r(*t)).cloned().collect::<Vec<_>>();
let ops_vec = ops.iter().map(|t| (**t).clone()).collect::<Vec<_>>();
let ctx = parse::ParsingContext {
interner: i,
ops: &ops_vec,
file: Rc::new(i.extern_all(path)),
file: Rc::new(Interner::extern_all(path)),
};
let entries = i.expect(
parse::parse2(ld.text.as_str(), ctx),
"This error should have been caught during loading",
);
let entries = parse::parse2(ld.text.as_str(), ctx)
.expect("This error should have been caught during loading");
let with_prelude = add_prelude(entries, path, prelude);
let impnormalized =
normalize_imports(&ld.preparsed.0, with_prelude, path, ops_cache, i);

View File

@@ -15,12 +15,13 @@ fn member_rec(
i: &Interner,
) -> Member {
let prefix = (path.iter())
.copied()
.cloned()
.chain(mod_stack.iter().rev_vec_clone().into_iter())
.collect::<Vec<_>>();
match data {
Member::Module(ModuleBlock { name, body }) => {
let new_body = entv_rec(mod_stack.push(name), body, path, ops_cache, i);
let new_stack = mod_stack.push(name.clone());
let new_body = entv_rec(new_stack, body, path, ops_cache, i);
Member::Module(ModuleBlock { name, body: new_body })
},
Member::Constant(constant) => Member::Constant(Constant {
@@ -49,15 +50,15 @@ fn entv_rec(
ops_cache: &ExportedOpsCache,
i: &Interner,
) -> Vec<FileEntry> {
data
.into_iter()
.map(|fe| match fe {
FileEntry::Exported(mem) =>
FileEntry::Exported(member_rec(mod_stack, mem, path, ops_cache, i)),
FileEntry::Internal(mem) =>
FileEntry::Internal(member_rec(mod_stack, mem, path, ops_cache, i)),
// XXX should [FileEntry::Export] be prefixed?
any => any,
(data.into_iter())
.map(|fe| {
let (mem, wrapper): (Member, fn(Member) -> FileEntry) = match fe {
FileEntry::Exported(mem) => (mem, FileEntry::Exported),
FileEntry::Internal(mem) => (mem, FileEntry::Internal),
// XXX should [FileEntry::Export] be prefixed?
any => return any,
};
wrapper(member_rec(mod_stack.clone(), mem, path, ops_cache, i))
})
.collect()
}

View File

@@ -44,7 +44,7 @@ fn load_abs_path_rec(
return Err(UnexpectedDirectory { path: filename.to_vec() }.rc())
});
let preparsed = preparse(
filename.iter().map(|t| i.r(*t)).cloned().collect(),
Interner::extern_all(filename),
text.as_str(),
prelude,
i,
@@ -87,7 +87,7 @@ fn load_abs_path_rec(
// recurse on all files and folders within
for item in coll.iter() {
let abs_subpath = (abs_path.iter())
.copied()
.cloned()
.chain(iter::once(i.i(item)))
.collect::<Vec<_>>();
load_abs_path_rec(

View File

@@ -39,12 +39,12 @@ fn to_module(src: &[FileEntry], prelude: &[FileEntry]) -> Module<(), ()> {
FileEntry::Internal(Member::Module(ns)) => {
let member = ModMember::Sub(to_module(&ns.body, prelude));
let entry = ModEntry { exported: false, member };
Some((ns.name, entry))
Some((ns.name.clone(), entry))
},
FileEntry::Exported(Member::Module(ns)) => {
let member = ModMember::Sub(to_module(&ns.body, prelude));
let entry = ModEntry { exported: true, member };
Some((ns.name, entry))
Some((ns.name.clone(), entry))
},
_ => None,
})
@@ -57,12 +57,12 @@ fn to_module(src: &[FileEntry], prelude: &[FileEntry]) -> Module<(), ()> {
| FileEntry::Exported(Member::Module(_)) => (),
FileEntry::Export(tokv) =>
for tok in tokv {
add_export(&mut items, *tok)
add_export(&mut items, tok.clone())
},
FileEntry::Internal(Member::Constant(Constant { name, .. })) =>
add_intern(&mut items, *name),
add_intern(&mut items, name.clone()),
FileEntry::Exported(Member::Constant(Constant { name, .. })) =>
add_export(&mut items, *name),
add_export(&mut items, name.clone()),
FileEntry::Internal(Member::Rule(rule)) => {
let names = rule.collect_single_names();
for name in names {