Converted Interner to work with Rc-s
- Interner no longer contains unsafe code - Tokens now hold a reference to the value they represent directly This will enable many future improvements
This commit is contained in:
@@ -76,7 +76,8 @@ fn source_to_module(
|
||||
let imports_from = (imports.iter())
|
||||
.map(|imp| -> ProjectResult<_> {
|
||||
let mut imp_path_v = imp.path.clone();
|
||||
imp_path_v.push(imp.name.expect("glob imports had just been resolved"));
|
||||
imp_path_v
|
||||
.push(imp.name.clone().expect("glob imports had just been resolved"));
|
||||
let mut abs_path = absolute_path(&path_v, &imp_path_v, i)
|
||||
.expect("should have failed in preparsing");
|
||||
let name = abs_path.pop().ok_or_else(|| {
|
||||
@@ -92,19 +93,19 @@ fn source_to_module(
|
||||
.collect::<Result<HashMap<_, _>, _>>()?;
|
||||
let exports = (data.iter())
|
||||
.flat_map(|ent| {
|
||||
let mk_ent = |name| (name, pushed(&path_v, name));
|
||||
let mk_ent = |name: Tok<String>| (name.clone(), pushed(&path_v, name));
|
||||
match ent {
|
||||
FileEntry::Export(names) => Box::new(names.iter().copied().map(mk_ent)),
|
||||
FileEntry::Export(names) => Box::new(names.iter().cloned().map(mk_ent)),
|
||||
FileEntry::Exported(mem) => match mem {
|
||||
Member::Constant(constant) => box_once(mk_ent(constant.name)),
|
||||
Member::Module(ns) => box_once(mk_ent(ns.name)),
|
||||
Member::Constant(constant) => box_once(mk_ent(constant.name.clone())),
|
||||
Member::Module(ns) => box_once(mk_ent(ns.name.clone())),
|
||||
Member::Rule(rule) => {
|
||||
let mut names = Vec::new();
|
||||
for e in rule.pattern.iter() {
|
||||
e.search_all(&mut |e| {
|
||||
if let Clause::Name(n) = &e.value {
|
||||
if let Some([name]) = n.strip_prefix(&path_v[..]) {
|
||||
names.push((*name, n.clone()))
|
||||
names.push((name.clone(), n.clone()))
|
||||
}
|
||||
}
|
||||
None::<()>
|
||||
@@ -134,7 +135,7 @@ fn source_to_module(
|
||||
panic!("Preparsed should include entries for all submodules")
|
||||
);
|
||||
let module = match source_to_module(
|
||||
path.push(ns.name),
|
||||
path.push(ns.name.clone()),
|
||||
new_prep,
|
||||
ns.body,
|
||||
i,
|
||||
@@ -144,7 +145,7 @@ fn source_to_module(
|
||||
Ok(t) => t,
|
||||
};
|
||||
let member = ModMember::Sub(module);
|
||||
Some(Ok((ns.name, ModEntry { exported, member })))
|
||||
Some(Ok((ns.name.clone(), ModEntry { exported, member })))
|
||||
},
|
||||
Member::Constant(Constant { name, value }) => {
|
||||
let member = ModMember::Item(value);
|
||||
@@ -184,7 +185,7 @@ fn files_to_module(
|
||||
let path_v = path.iter().rev_vec_clone();
|
||||
if files.len() == 1 && files[0].path.len() == lvl {
|
||||
return source_to_module(
|
||||
path,
|
||||
path.clone(),
|
||||
&files[0].loaded.preparsed.0,
|
||||
files[0].parsed.clone(),
|
||||
i,
|
||||
@@ -192,18 +193,19 @@ fn files_to_module(
|
||||
);
|
||||
}
|
||||
let items = (files.into_iter())
|
||||
.group_by(|f| f.path[lvl])
|
||||
.group_by(|f| f.path[lvl].clone())
|
||||
.into_iter()
|
||||
.map(|(namespace, files)| -> ProjectResult<_> {
|
||||
let subpath = path.push(namespace);
|
||||
let subpath = path.push(namespace.clone());
|
||||
let files_v = files.collect::<Vec<_>>();
|
||||
let module = files_to_module(subpath, files_v, i)?;
|
||||
let member = ModMember::Sub(module);
|
||||
Ok((namespace, ModEntry { exported: true, member }))
|
||||
})
|
||||
.collect::<Result<HashMap<_, _>, _>>()?;
|
||||
let exports: HashMap<_, _> =
|
||||
items.keys().copied().map(|name| (name, pushed(&path_v, name))).collect();
|
||||
let exports: HashMap<_, _> = (items.keys())
|
||||
.map(|name| (name.clone(), pushed(&path_v, name.clone())))
|
||||
.collect();
|
||||
Ok(Module {
|
||||
items,
|
||||
imports: vec![],
|
||||
@@ -223,7 +225,7 @@ pub fn build_tree(
|
||||
injected: &impl InjectedOperatorsFn,
|
||||
) -> ProjectResult<ProjectTree<VName>> {
|
||||
assert!(!files.is_empty(), "A tree requires at least one module");
|
||||
let ops_cache = collect_ops::mk_cache(&files, i, injected);
|
||||
let ops_cache = collect_ops::mk_cache(&files, injected);
|
||||
let mut entries = files
|
||||
.iter()
|
||||
.map(|(path, loaded)| {
|
||||
|
||||
@@ -4,7 +4,7 @@ use hashbrown::HashSet;
|
||||
use trait_set::trait_set;
|
||||
|
||||
use crate::error::{NotFound, ProjectError, ProjectResult};
|
||||
use crate::interner::{Interner, Tok};
|
||||
use crate::interner::Tok;
|
||||
use crate::pipeline::source_loader::LoadedSourceTable;
|
||||
use crate::representations::tree::WalkErrorKind;
|
||||
use crate::utils::{split_max_prefix, Cache};
|
||||
@@ -28,22 +28,21 @@ fn coprefix<T: Eq>(
|
||||
pub fn collect_exported_ops(
|
||||
path: Sym,
|
||||
loaded: &LoadedSourceTable,
|
||||
i: &Interner,
|
||||
injected: &impl InjectedOperatorsFn,
|
||||
) -> OpsResult {
|
||||
let injected = injected(path).unwrap_or_else(|| Rc::new(HashSet::new()));
|
||||
let path_s = &i.r(path)[..];
|
||||
match split_max_prefix(path_s, &|n| loaded.contains_key(n)) {
|
||||
let injected =
|
||||
injected(path.clone()).unwrap_or_else(|| Rc::new(HashSet::new()));
|
||||
match split_max_prefix(&path, &|n| loaded.contains_key(n)) {
|
||||
None => {
|
||||
let ops = (loaded.keys())
|
||||
.filter_map(|modname| {
|
||||
if path_s.len() == coprefix(path_s.iter(), modname.iter()) {
|
||||
Some(modname[path_s.len()])
|
||||
if path.len() == coprefix(path.iter(), modname.iter()) {
|
||||
Some(modname[path.len()].clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.chain(injected.iter().copied())
|
||||
.chain(injected.iter().cloned())
|
||||
.collect::<HashSet<_>>();
|
||||
Ok(Rc::new(ops))
|
||||
},
|
||||
@@ -64,8 +63,8 @@ pub fn collect_exported_ops(
|
||||
)?;
|
||||
let out = (module.items.iter())
|
||||
.filter(|(_, v)| v.exported)
|
||||
.map(|(k, _)| *k)
|
||||
.chain(injected.iter().copied())
|
||||
.map(|(k, _)| k.clone())
|
||||
.chain(injected.iter().cloned())
|
||||
.collect::<HashSet<_>>();
|
||||
Ok(Rc::new(out))
|
||||
},
|
||||
@@ -74,8 +73,7 @@ pub fn collect_exported_ops(
|
||||
|
||||
pub fn mk_cache<'a>(
|
||||
loaded: &'a LoadedSourceTable,
|
||||
i: &'a Interner,
|
||||
injected: &'a impl InjectedOperatorsFn,
|
||||
) -> ExportedOpsCache<'a> {
|
||||
Cache::new(|path, _this| collect_exported_ops(path, loaded, i, injected))
|
||||
Cache::new(|path, _this| collect_exported_ops(path, loaded, injected))
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ fn tree_all_ops(
|
||||
module: &Module<impl Clone, impl Clone>,
|
||||
ops: &mut HashSet<Tok<String>>,
|
||||
) {
|
||||
ops.extend(module.items.keys().copied());
|
||||
ops.extend(module.items.keys().cloned());
|
||||
for ent in module.items.values() {
|
||||
if let ModMember::Sub(m) = &ent.member {
|
||||
tree_all_ops(m, ops);
|
||||
@@ -40,16 +40,14 @@ pub fn collect_ops_for(
|
||||
let mut ret = HashSet::new();
|
||||
tree_all_ops(tree, &mut ret);
|
||||
tree.visit_all_imports(&mut |modpath, _m, import| -> ProjectResult<()> {
|
||||
if let Some(n) = import.name {
|
||||
ret.insert(n);
|
||||
if let Some(n) = &import.name {
|
||||
ret.insert(n.clone());
|
||||
} else {
|
||||
let path = i.expect(
|
||||
import_abs_path(file, modpath, &import.path, i),
|
||||
"This error should have been caught during loading",
|
||||
);
|
||||
ret.extend(ops_cache.find(&i.i(&path))?.iter().copied());
|
||||
let path = import_abs_path(file, modpath, &import.path, i)
|
||||
.expect("This error should have been caught during loading");
|
||||
ret.extend(ops_cache.find(&i.i(&path))?.iter().cloned());
|
||||
}
|
||||
Ok(())
|
||||
})?;
|
||||
Ok(Rc::new(ret.into_iter().filter(|t| is_op(i.r(*t))).collect()))
|
||||
Ok(Rc::new(ret.into_iter().filter(|t| is_op(&**t)).collect()))
|
||||
}
|
||||
|
||||
@@ -25,8 +25,8 @@ fn member_rec(
|
||||
&preparsed.items[&name].member => ModMember::Sub;
|
||||
unreachable!("This name must point to a namespace")
|
||||
);
|
||||
let new_body =
|
||||
entv_rec(mod_stack.push(name), subprep, body, path, ops_cache, i);
|
||||
let new_stack = mod_stack.push(name.clone());
|
||||
let new_body = entv_rec(new_stack, subprep, body, path, ops_cache, i);
|
||||
Member::Module(ModuleBlock { name, body: new_body })
|
||||
},
|
||||
any => any,
|
||||
@@ -58,16 +58,12 @@ fn entv_rec(
|
||||
.into_iter()
|
||||
.flat_map(|import| {
|
||||
if let Import { name: None, path } = import {
|
||||
let p = i.expect(
|
||||
import_abs_path(mod_path, mod_stack, &path, i),
|
||||
"Should have emerged in preparsing",
|
||||
);
|
||||
let names = i.expect(
|
||||
ops_cache.find(&i.i(&p)),
|
||||
"Should have emerged in second parsing",
|
||||
);
|
||||
let p = import_abs_path(mod_path, mod_stack.clone(), &path, i)
|
||||
.expect("Should have emerged in preparsing");
|
||||
let names = (ops_cache.find(&i.i(&p)))
|
||||
.expect("Should have emerged in second parsing");
|
||||
let imports = (names.iter())
|
||||
.map(|&n| Import { name: Some(n), path: path.clone() })
|
||||
.map(|n| Import { name: Some(n.clone()), path: path.clone() })
|
||||
.collect::<Vec<_>>();
|
||||
Box::new(imports.into_iter()) as BoxedIter<Import>
|
||||
} else {
|
||||
@@ -77,10 +73,10 @@ fn entv_rec(
|
||||
.collect(),
|
||||
),
|
||||
FileEntry::Exported(mem) => FileEntry::Exported(member_rec(
|
||||
mod_stack, preparsed, mem, mod_path, ops_cache, i,
|
||||
mod_stack.clone(), preparsed, mem, mod_path, ops_cache, i,
|
||||
)),
|
||||
FileEntry::Internal(mem) => FileEntry::Internal(member_rec(
|
||||
mod_stack, preparsed, mem, mod_path, ops_cache, i,
|
||||
mod_stack.clone(), preparsed, mem, mod_path, ops_cache, i,
|
||||
)),
|
||||
any => any,
|
||||
})
|
||||
|
||||
@@ -28,16 +28,14 @@ pub fn parse_file(
|
||||
let ld = &loaded[path];
|
||||
// let ops_cache = collect_ops::mk_cache(loaded, i);
|
||||
let ops = collect_ops_for(path, loaded, ops_cache, i)?;
|
||||
let ops_vec = ops.iter().map(|t| i.r(*t)).cloned().collect::<Vec<_>>();
|
||||
let ops_vec = ops.iter().map(|t| (**t).clone()).collect::<Vec<_>>();
|
||||
let ctx = parse::ParsingContext {
|
||||
interner: i,
|
||||
ops: &ops_vec,
|
||||
file: Rc::new(i.extern_all(path)),
|
||||
file: Rc::new(Interner::extern_all(path)),
|
||||
};
|
||||
let entries = i.expect(
|
||||
parse::parse2(ld.text.as_str(), ctx),
|
||||
"This error should have been caught during loading",
|
||||
);
|
||||
let entries = parse::parse2(ld.text.as_str(), ctx)
|
||||
.expect("This error should have been caught during loading");
|
||||
let with_prelude = add_prelude(entries, path, prelude);
|
||||
let impnormalized =
|
||||
normalize_imports(&ld.preparsed.0, with_prelude, path, ops_cache, i);
|
||||
|
||||
@@ -15,12 +15,13 @@ fn member_rec(
|
||||
i: &Interner,
|
||||
) -> Member {
|
||||
let prefix = (path.iter())
|
||||
.copied()
|
||||
.cloned()
|
||||
.chain(mod_stack.iter().rev_vec_clone().into_iter())
|
||||
.collect::<Vec<_>>();
|
||||
match data {
|
||||
Member::Module(ModuleBlock { name, body }) => {
|
||||
let new_body = entv_rec(mod_stack.push(name), body, path, ops_cache, i);
|
||||
let new_stack = mod_stack.push(name.clone());
|
||||
let new_body = entv_rec(new_stack, body, path, ops_cache, i);
|
||||
Member::Module(ModuleBlock { name, body: new_body })
|
||||
},
|
||||
Member::Constant(constant) => Member::Constant(Constant {
|
||||
@@ -49,15 +50,15 @@ fn entv_rec(
|
||||
ops_cache: &ExportedOpsCache,
|
||||
i: &Interner,
|
||||
) -> Vec<FileEntry> {
|
||||
data
|
||||
.into_iter()
|
||||
.map(|fe| match fe {
|
||||
FileEntry::Exported(mem) =>
|
||||
FileEntry::Exported(member_rec(mod_stack, mem, path, ops_cache, i)),
|
||||
FileEntry::Internal(mem) =>
|
||||
FileEntry::Internal(member_rec(mod_stack, mem, path, ops_cache, i)),
|
||||
// XXX should [FileEntry::Export] be prefixed?
|
||||
any => any,
|
||||
(data.into_iter())
|
||||
.map(|fe| {
|
||||
let (mem, wrapper): (Member, fn(Member) -> FileEntry) = match fe {
|
||||
FileEntry::Exported(mem) => (mem, FileEntry::Exported),
|
||||
FileEntry::Internal(mem) => (mem, FileEntry::Internal),
|
||||
// XXX should [FileEntry::Export] be prefixed?
|
||||
any => return any,
|
||||
};
|
||||
wrapper(member_rec(mod_stack.clone(), mem, path, ops_cache, i))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user