Preparation for sharing

- rustfmt
- clippy
- comments
- README
This commit is contained in:
2023-05-25 19:14:24 +01:00
parent e99ade92ba
commit bc2714aad8
144 changed files with 3734 additions and 3243 deletions

View File

@@ -1,23 +1,19 @@
use crate::representations::sourcefile::{Member, FileEntry};
use crate::interner::Token;
use crate::interner::Tok;
use crate::representations::sourcefile::{FileEntry, Member, Namespace};
fn member_rec(
// object
member: Member,
// context
path: &[Token<String>],
path: &[Tok<String>],
prelude: &[FileEntry],
) -> Member {
match member {
Member::Namespace(name, body) => {
let new_body = entv_rec(
body,
path,
prelude
);
Member::Namespace(name, new_body)
Member::Namespace(Namespace { name, body }) => {
let new_body = entv_rec(body, path, prelude);
Member::Namespace(Namespace { name, body: new_body })
},
any => any
any => any,
}
}
@@ -25,28 +21,26 @@ fn entv_rec(
// object
data: Vec<FileEntry>,
// context
mod_path: &[Token<String>],
mod_path: &[Tok<String>],
prelude: &[FileEntry],
) -> Vec<FileEntry> {
prelude.iter().cloned()
.chain(data.into_iter()
.map(|ent| match ent {
FileEntry::Exported(mem) => FileEntry::Exported(member_rec(
mem, mod_path, prelude
)),
FileEntry::Internal(mem) => FileEntry::Internal(member_rec(
mem, mod_path, prelude
)),
any => any
})
)
prelude
.iter()
.cloned()
.chain(data.into_iter().map(|ent| match ent {
FileEntry::Exported(mem) =>
FileEntry::Exported(member_rec(mem, mod_path, prelude)),
FileEntry::Internal(mem) =>
FileEntry::Internal(member_rec(mem, mod_path, prelude)),
any => any,
}))
.collect()
}
pub fn add_prelude(
data: Vec<FileEntry>,
path: &[Token<String>],
path: &[Tok<String>],
prelude: &[FileEntry],
) -> Vec<FileEntry> {
entv_rec(data, path, prelude)
}
}

View File

@@ -2,38 +2,41 @@ use std::rc::Rc;
use hashbrown::HashMap;
use crate::pipeline::error::ProjectError;
use crate::interner::{Token, Interner};
use crate::utils::iter::{box_once, box_empty};
use crate::utils::{Substack, pushed};
use crate::ast::{Expr, Constant};
use crate::pipeline::source_loader::{LoadedSourceTable, LoadedSource};
use crate::representations::tree::{Module, ModMember, ModEntry};
use crate::representations::sourcefile::{FileEntry, Member, absolute_path};
use super::collect_ops::InjectedOperatorsFn;
use super::{collect_ops, ProjectTree, ProjectExt};
use super::parse_file::parse_file;
use super::{collect_ops, ProjectExt, ProjectTree};
use crate::ast::{Constant, Expr};
use crate::interner::{Interner, Tok};
use crate::pipeline::error::ProjectError;
use crate::pipeline::source_loader::{LoadedSource, LoadedSourceTable};
use crate::representations::sourcefile::{absolute_path, FileEntry, Member};
use crate::representations::tree::{ModEntry, ModMember, Module};
use crate::utils::iter::{box_empty, box_once};
use crate::utils::{pushed, Substack};
#[derive(Debug)]
struct ParsedSource<'a> {
path: Vec<Token<String>>,
path: Vec<Tok<String>>,
loaded: &'a LoadedSource,
parsed: Vec<FileEntry>
parsed: Vec<FileEntry>,
}
pub fn split_path<'a>(path: &'a [Token<String>], proj: &'a ProjectTree)
-> (&'a [Token<String>], &'a [Token<String>])
{
let (end, body) = if let Some(s) = path.split_last() {s}
else {return (&[], &[])};
let mut module = proj.0.walk(body, false).expect("invalid path cannot be split");
pub fn split_path<'a>(
path: &'a [Tok<String>],
proj: &'a ProjectTree,
) -> (&'a [Tok<String>], &'a [Tok<String>]) {
let (end, body) = if let Some(s) = path.split_last() {
s
} else {
return (&[], &[]);
};
let mut module =
proj.0.walk(body, false).expect("invalid path cannot be split");
if let ModMember::Sub(m) = &module.items[end].member {
module = m.clone();
}
let file = module.extra.file.as_ref()
.map(|s| &path[..s.len()])
.unwrap_or(&path[..]);
let file =
module.extra.file.as_ref().map(|s| &path[..s.len()]).unwrap_or(path);
let subpath = &path[file.len()..];
(file, subpath)
}
@@ -41,7 +44,7 @@ pub fn split_path<'a>(path: &'a [Token<String>], proj: &'a ProjectTree)
/// Convert normalized, prefixed source into a module
fn source_to_module(
// level
path: Substack<Token<String>>,
path: Substack<Tok<String>>,
preparsed: &Module<impl Clone, impl Clone>,
// data
data: Vec<FileEntry>,
@@ -50,35 +53,38 @@ fn source_to_module(
filepath_len: usize,
) -> Rc<Module<Expr, ProjectExt>> {
let path_v = path.iter().rev_vec_clone();
let imports = data.iter()
.filter_map(|ent| if let FileEntry::Import(impv) = ent {
Some(impv.iter())
} else {None})
let imports = data
.iter()
.filter_map(|ent| {
if let FileEntry::Import(impv) = ent {
Some(impv.iter())
} else {
None
}
})
.flatten()
.cloned()
.collect::<Vec<_>>();
let imports_from = imports.iter()
let imports_from = imports
.iter()
.map(|imp| {
let mut imp_path_v = i.r(imp.path).clone();
imp_path_v.push(imp.name.expect("imports normalized"));
let mut abs_path = absolute_path(
&path_v,
&imp_path_v,
i, &|n| preparsed.items.contains_key(&n)
).expect("tested in preparsing");
let mut abs_path =
absolute_path(&path_v, &imp_path_v, i).expect("tested in preparsing");
let name = abs_path.pop().expect("importing the global context");
(name, i.i(&abs_path))
})
.collect::<HashMap<_, _>>();
let exports = data.iter()
let exports = data
.iter()
.flat_map(|ent| {
let mk_ent = |name| (name, i.i(&pushed(&path_v, name)));
match ent {
FileEntry::Export(names)
=> Box::new(names.iter().copied().map(mk_ent)),
FileEntry::Export(names) => Box::new(names.iter().copied().map(mk_ent)),
FileEntry::Exported(mem) => match mem {
Member::Constant(constant) => box_once(mk_ent(constant.name)),
Member::Namespace(name, _) => box_once(mk_ent(*name)),
Member::Namespace(ns) => box_once(mk_ent(ns.name)),
Member::Rule(rule) => {
let mut names = Vec::new();
for e in rule.source.iter() {
@@ -89,13 +95,14 @@ fn source_to_module(
})
}
Box::new(names.into_iter())
}
}
_ => box_empty()
},
},
_ => box_empty(),
}
})
.collect::<HashMap<_, _>>();
let rules = data.iter()
let rules = data
.iter()
.filter_map(|ent| match ent {
FileEntry::Exported(Member::Rule(rule)) => Some(rule),
FileEntry::Internal(Member::Rule(rule)) => Some(rule),
@@ -103,38 +110,51 @@ fn source_to_module(
})
.cloned()
.collect::<Vec<_>>();
let items = data.into_iter()
let items = data
.into_iter()
.filter_map(|ent| match ent {
FileEntry::Exported(Member::Namespace(name, body)) => {
let prep_member = &preparsed.items[&name].member;
let new_prep = if let ModMember::Sub(s) = prep_member {s.as_ref()}
else { panic!("preparsed missing a submodule") };
FileEntry::Exported(Member::Namespace(ns)) => {
let prep_member = &preparsed.items[&ns.name].member;
let new_prep = if let ModMember::Sub(s) = prep_member {
s.as_ref()
} else {
panic!("preparsed missing a submodule")
};
let module = source_to_module(
path.push(name),
new_prep, body, i, filepath_len
path.push(ns.name),
new_prep,
ns.body,
i,
filepath_len,
);
let member = ModMember::Sub(module);
Some((name, ModEntry{ exported: true, member }))
}
FileEntry::Internal(Member::Namespace(name, body)) => {
let prep_member = &preparsed.items[&name].member;
let new_prep = if let ModMember::Sub(s) = prep_member {s.as_ref()}
else { panic!("preparsed missing a submodule") };
Some((ns.name, ModEntry { exported: true, member }))
},
FileEntry::Internal(Member::Namespace(ns)) => {
let prep_member = &preparsed.items[&ns.name].member;
let new_prep = if let ModMember::Sub(s) = prep_member {
s.as_ref()
} else {
panic!("preparsed missing a submodule")
};
let module = source_to_module(
path.push(name),
new_prep, body, i, filepath_len
path.push(ns.name),
new_prep,
ns.body,
i,
filepath_len,
);
let member = ModMember::Sub(module);
Some((name, ModEntry{ exported: false, member }))
}
FileEntry::Exported(Member::Constant(Constant{ name, value })) => {
Some((ns.name, ModEntry { exported: false, member }))
},
FileEntry::Exported(Member::Constant(Constant { name, value })) => {
let member = ModMember::Item(value);
Some((name, ModEntry{ exported: true, member }))
}
FileEntry::Internal(Member::Constant(Constant{ name, value })) => {
Some((name, ModEntry { exported: true, member }))
},
FileEntry::Internal(Member::Constant(Constant { name, value })) => {
let member = ModMember::Item(value);
Some((name, ModEntry{ exported: false, member }))
}
Some((name, ModEntry { exported: false, member }))
},
_ => None,
})
.collect::<HashMap<_, _>>();
@@ -150,15 +170,15 @@ fn source_to_module(
imports_from,
exports,
rules,
file: Some(path_v[..filepath_len].to_vec())
}
file: Some(path_v[..filepath_len].to_vec()),
},
})
}
fn files_to_module(
path: Substack<Token<String>>,
path: Substack<Tok<String>>,
files: &[ParsedSource],
i: &Interner
i: &Interner,
) -> Rc<Module<Expr, ProjectExt>> {
let lvl = path.len();
let path_v = path.iter().rev_vec_clone();
@@ -167,19 +187,22 @@ fn files_to_module(
path,
files[0].loaded.preparsed.0.as_ref(),
files[0].parsed.clone(),
i, path.len()
)
i,
path.len(),
);
}
let items = files.group_by(|a, b| a.path[lvl] == b.path[lvl]).into_iter()
let items = files
.group_by(|a, b| a.path[lvl] == b.path[lvl])
.map(|files| {
let namespace = files[0].path[lvl];
let subpath = path.push(namespace);
let module = files_to_module(subpath, files, i);
let member = ModMember::Sub(module);
(namespace, ModEntry{ exported: true, member })
(namespace, ModEntry { exported: true, member })
})
.collect::<HashMap<_, _>>();
let exports: HashMap<_, _> = items.keys()
let exports: HashMap<_, _> = items
.keys()
.copied()
.map(|name| (name, i.i(&pushed(&path_v, name))))
.collect();
@@ -188,38 +211,44 @@ fn files_to_module(
// i.extern_all(&path_v[..]).join("::"),
// exports.keys().map(|t| i.r(*t)).join(", ")
// );
Rc::new(Module{
Rc::new(Module {
items,
imports: vec![],
extra: ProjectExt {
exports,
imports_from: HashMap::new(),
rules: vec![], file: None,
}
rules: vec![],
file: None,
},
})
}
pub fn build_tree<'a>(
pub fn build_tree(
files: LoadedSourceTable,
i: &Interner,
prelude: &[FileEntry],
injected: &impl InjectedOperatorsFn,
) -> Result<ProjectTree, Rc<dyn ProjectError>> {
let ops_cache = collect_ops::mk_cache(&files, i, injected);
let mut entries = files.iter()
.map(|(path, loaded)| Ok((
i.r(*path),
loaded,
parse_file(*path, &files, &ops_cache, i, prelude)?
)))
let mut entries = files
.iter()
.map(|(path, loaded)| {
Ok((
i.r(*path),
loaded,
parse_file(*path, &files, &ops_cache, i, prelude)?,
))
})
.collect::<Result<Vec<_>, Rc<dyn ProjectError>>>()?;
// sort by similarity, then longest-first
entries.sort_unstable_by(|a, b| a.0.cmp(&b.0).reverse());
let files = entries.into_iter()
.map(|(path, loaded, parsed)| ParsedSource{
loaded, parsed,
path: path.clone()
entries.sort_unstable_by(|a, b| a.0.cmp(b.0).reverse());
let files = entries
.into_iter()
.map(|(path, loaded, parsed)| ParsedSource {
loaded,
parsed,
path: path.clone(),
})
.collect::<Vec<_>>();
Ok(ProjectTree(files_to_module(Substack::Bottom, &files, i)))
}
}

View File

@@ -4,73 +4,80 @@ use std::rc::Rc;
use hashbrown::HashSet;
use itertools::Itertools;
use crate::representations::tree::WalkErrorKind;
use crate::interner::{Interner, Sym, Tok};
use crate::pipeline::error::{ModuleNotFound, ProjectError};
use crate::pipeline::source_loader::LoadedSourceTable;
use crate::pipeline::error::{ProjectError, ModuleNotFound};
use crate::interner::{Token, Interner};
use crate::utils::Cache;
use crate::pipeline::split_name::split_name;
use crate::representations::tree::WalkErrorKind;
use crate::utils::Cache;
pub type OpsResult = Result<Rc<HashSet<Token<String>>>, Rc<dyn ProjectError>>;
pub type ExportedOpsCache<'a> = Cache<'a, Token<Vec<Token<String>>>, OpsResult>;
pub type OpsResult = Result<Rc<HashSet<Tok<String>>>, Rc<dyn ProjectError>>;
pub type ExportedOpsCache<'a> = Cache<'a, Sym, OpsResult>;
pub trait InjectedOperatorsFn = Fn(
Token<Vec<Token<String>>>
) -> Option<Rc<HashSet<Token<String>>>>;
pub trait InjectedOperatorsFn = Fn(Sym) -> Option<Rc<HashSet<Tok<String>>>>;
fn coprefix<T: Eq>(
l: impl Iterator<Item = T>,
r: impl Iterator<Item = T>
r: impl Iterator<Item = T>,
) -> usize {
l.zip(r).take_while(|(a, b)| a == b).count()
}
/// Collect all names exported by the module at the specified path
pub fn collect_exported_ops(
path: Token<Vec<Token<String>>>,
path: Sym,
loaded: &LoadedSourceTable,
i: &Interner,
injected: &impl InjectedOperatorsFn
injected: &impl InjectedOperatorsFn,
) -> OpsResult {
if let Some(ops) = injected(path) {
if path == i.i(&[i.i("prelude")][..]) {
println!("%%% Prelude exported ops %%%");
println!("{}", ops.iter().map(|t| i.r(*t)).join(", "));
}
return Ok(ops)
return Ok(ops);
}
let is_file = |n: &[Token<String>]| loaded.contains_key(&i.i(n));
let is_file = |n: &[Tok<String>]| loaded.contains_key(&i.i(n));
let path_s = &i.r(path)[..];
let name_split = split_name(path_s, &is_file);
let (fpath_v, subpath_v) = if let Some(f) = name_split {f} else {
return Ok(Rc::new(loaded.keys().copied()
.filter_map(|modname| {
let modname_s = i.r(modname);
if path_s.len() == coprefix(path_s.iter(), modname_s.iter()) {
Some(modname_s[path_s.len()])
} else {None}
})
.collect::<HashSet<_>>()
))
let (fpath_v, subpath_v) = if let Some(f) = name_split {
f
} else {
return Ok(Rc::new(
loaded
.keys()
.copied()
.filter_map(|modname| {
let modname_s = i.r(modname);
if path_s.len() == coprefix(path_s.iter(), modname_s.iter()) {
Some(modname_s[path_s.len()])
} else {
None
}
})
.collect::<HashSet<_>>(),
));
};
let fpath = i.i(fpath_v);
let preparsed = &loaded[&fpath].preparsed;
let module = preparsed.0.walk(&subpath_v, false)
.map_err(|walk_err| match walk_err.kind {
WalkErrorKind::Private => unreachable!("visibility is not being checked here"),
WalkErrorKind::Missing => ModuleNotFound{
let module = preparsed.0.walk(subpath_v, false).map_err(|walk_err| {
match walk_err.kind {
WalkErrorKind::Private =>
unreachable!("visibility is not being checked here"),
WalkErrorKind::Missing => ModuleNotFound {
file: i.extern_vec(fpath),
subpath: subpath_v.into_iter()
subpath: subpath_v
.iter()
.take(walk_err.pos)
.map(|t| i.r(*t))
.cloned()
.collect()
}.rc(),
})?;
let out: HashSet<_> = module.items.iter()
.filter(|(_, v)| v.exported)
.map(|(k, _)| *k)
.collect();
.collect(),
}
.rc(),
}
})?;
let out: HashSet<_> =
module.items.iter().filter(|(_, v)| v.exported).map(|(k, _)| *k).collect();
if path == i.i(&[i.i("prelude")][..]) {
println!("%%% Prelude exported ops %%%");
println!("{}", out.iter().map(|t| i.r(*t)).join(", "));
@@ -83,7 +90,5 @@ pub fn mk_cache<'a>(
i: &'a Interner,
injected: &'a impl InjectedOperatorsFn,
) -> ExportedOpsCache<'a> {
Cache::new(|path, _this| {
collect_exported_ops(path, loaded, i, injected)
})
}
Cache::new(|path, _this| collect_exported_ops(path, loaded, i, injected))
}

View File

@@ -2,7 +2,7 @@ mod exported_ops;
mod ops_for;
pub use exported_ops::{
ExportedOpsCache, OpsResult, InjectedOperatorsFn,
collect_exported_ops, mk_cache
collect_exported_ops, mk_cache, ExportedOpsCache, InjectedOperatorsFn,
OpsResult,
};
pub use ops_for::collect_ops_for;
pub use ops_for::collect_ops_for;

View File

@@ -3,20 +3,19 @@ use std::rc::Rc;
use hashbrown::HashSet;
use itertools::Itertools;
use super::exported_ops::{ExportedOpsCache, OpsResult};
use crate::interner::{Interner, Tok};
use crate::parse::is_op;
use crate::pipeline::error::ProjectError;
use crate::pipeline::source_loader::LoadedSourceTable;
use crate::interner::{Token, Interner};
use crate::representations::tree::{Module, ModMember};
use crate::pipeline::import_abs_path::import_abs_path;
use super::exported_ops::{ExportedOpsCache, OpsResult};
use crate::pipeline::source_loader::LoadedSourceTable;
use crate::representations::tree::{ModMember, Module};
/// Collect all operators and names, exported or local, defined in this
/// tree.
fn tree_all_ops(
module: &Module<impl Clone, impl Clone>,
ops: &mut HashSet<Token<String>>
ops: &mut HashSet<Tok<String>>,
) {
ops.extend(module.items.keys().copied());
for ent in module.items.values() {
@@ -28,21 +27,22 @@ fn tree_all_ops(
/// Collect all names imported in this file
pub fn collect_ops_for(
file: &[Token<String>],
file: &[Tok<String>],
loaded: &LoadedSourceTable,
ops_cache: &ExportedOpsCache,
i: &Interner
i: &Interner,
) -> OpsResult {
let tree = &loaded[&i.i(file)].preparsed.0;
let mut ret = HashSet::new();
println!("collecting ops for {}", i.extern_all(file).join("::"));
tree_all_ops(tree.as_ref(), &mut ret);
tree.visit_all_imports(&mut |modpath, module, import| {
if let Some(n) = import.name { ret.insert(n); } else {
tree.visit_all_imports(&mut |modpath, _module, import| {
if let Some(n) = import.name {
ret.insert(n);
} else {
println!("\tglob import from {}", i.extern_vec(import.path).join("::"));
let path = import_abs_path(
&file, modpath, module, &i.r(import.path)[..], i
).expect("This error should have been caught during loading");
let path = import_abs_path(file, modpath, &i.r(import.path)[..], i)
.expect("This error should have been caught during loading");
ret.extend(ops_cache.find(&i.i(&path))?.iter().copied());
}
Ok::<_, Rc<dyn ProjectError>>(())
@@ -53,4 +53,4 @@ pub fn collect_ops_for(
println!("{}", ret.iter().map(|t| i.r(*t)).join(", "))
}
Ok(Rc::new(ret))
}
}

View File

@@ -1,26 +1,26 @@
use std::{ops::Add, rc::Rc};
use std::ops::Add;
use std::rc::Rc;
use hashbrown::HashMap;
use super::{ProjectExt, ProjectModule, ProjectTree};
use crate::ast::{Clause, Expr};
use crate::foreign::{Atom, Atomic, ExternFn};
use crate::interner::{Interner, Tok};
use crate::representations::location::Location;
use crate::representations::tree::{ModEntry, ModMember, Module};
use crate::representations::Primitive;
use crate::representations::location::Location;
use crate::foreign::{ExternFn, Atomic, Atom};
use crate::interner::{Token, Interner};
use crate::ast::{Expr, Clause};
use crate::utils::{Substack, pushed};
use super::{ProjectModule, ProjectExt, ProjectTree};
use crate::utils::{pushed, Substack};
pub enum ConstTree {
Const(Expr),
Tree(HashMap<Token<String>, ConstTree>)
Tree(HashMap<Tok<String>, ConstTree>),
}
impl ConstTree {
pub fn primitive(primitive: Primitive) -> Self {
Self::Const(Expr{
Self::Const(Expr {
location: Location::Unknown,
value: Clause::P(primitive)
value: Clause::P(primitive),
})
}
pub fn xfn(xfn: impl ExternFn + 'static) -> Self {
@@ -29,9 +29,7 @@ impl ConstTree {
pub fn atom(atom: impl Atomic + 'static) -> Self {
Self::primitive(Primitive::Atom(Atom(Box::new(atom))))
}
pub fn tree(
arr: impl IntoIterator<Item = (Token<String>, Self)>
) -> Self {
pub fn tree(arr: impl IntoIterator<Item = (Tok<String>, Self)>) -> Self {
Self::Tree(arr.into_iter().collect())
}
}
@@ -57,27 +55,29 @@ impl Add for ConstTree {
}
fn from_const_tree_rec(
path: Substack<Token<String>>,
consts: HashMap<Token<String>, ConstTree>,
file: &[Token<String>],
path: Substack<Tok<String>>,
consts: HashMap<Tok<String>, ConstTree>,
file: &[Tok<String>],
i: &Interner,
) -> ProjectModule {
let mut items = HashMap::new();
let path_v = path.iter().rev_vec_clone();
for (name, item) in consts {
items.insert(name, ModEntry{
items.insert(name, ModEntry {
exported: true,
member: match item {
ConstTree::Const(c) => ModMember::Item(c),
ConstTree::Tree(t) => ModMember::Sub(Rc::new(
from_const_tree_rec(path.push(name), t, file, i)
)),
}
ConstTree::Tree(t) => ModMember::Sub(Rc::new(from_const_tree_rec(
path.push(name),
t,
file,
i,
))),
},
});
}
let exports = items.keys()
.map(|name| (*name, i.i(&pushed(&path_v, *name))))
.collect();
let exports =
items.keys().map(|name| (*name, i.i(&pushed(&path_v, *name)))).collect();
Module {
items,
imports: vec![],
@@ -85,15 +85,15 @@ fn from_const_tree_rec(
exports,
file: Some(file.to_vec()),
..Default::default()
}
},
}
}
pub fn from_const_tree(
consts: HashMap<Token<String>, ConstTree>,
file: &[Token<String>],
consts: HashMap<Tok<String>, ConstTree>,
file: &[Tok<String>],
i: &Interner,
) -> ProjectTree {
let module = from_const_tree_rec(Substack::Bottom, consts, file, i);
ProjectTree(Rc::new(module))
}
}

View File

@@ -1,38 +1,30 @@
/* FILE SEPARATION BOUNDARY
// FILE SEPARATION BOUNDARY
//
// Collect all operators accessible in each file, parse the files with
// correct tokenization, resolve glob imports, convert expressions to
// refer to tokens with (local) absolute path, and connect them into a
// single tree.
//
// The module checks for imports from missing modules (including
// submodules). All other errors must be checked later.
//
// Injection strategy:
// Return all items of the given module in the injected tree for
// `injected` The output of this stage is a tree, which can simply be
// overlaid with the injected tree
Collect all operators accessible in each file, parse the files with
correct tokenization, resolve glob imports, convert expressions to
refer to tokens with (local) absolute path, and connect them into a
single tree.
The module checks for imports from missing modules (including submodules).
All other errors must be checked later.
Injection strategy:
Return all items of the given module in the injected tree for `injected`
The output of this stage is a tree, which can simply be overlaid with
the injected tree
*/
mod collect_ops;
mod parse_file;
mod add_prelude;
mod build_tree;
mod collect_ops;
mod const_tree;
mod normalize_imports;
mod parse_file;
mod prefix;
mod tree;
mod const_tree;
mod add_prelude;
pub use build_tree::{build_tree, split_path};
pub use collect_ops::InjectedOperatorsFn;
pub use const_tree::{
ConstTree, from_const_tree,
};
pub use const_tree::{from_const_tree, ConstTree};
pub use tree::{
ProjectExt, ProjectModule, ProjectTree, collect_consts, collect_rules
collect_consts, collect_rules, ProjectExt, ProjectModule, ProjectTree,
};
pub use build_tree::{
build_tree, split_path
};

View File

@@ -1,74 +1,88 @@
use crate::representations::tree::{Module, ModMember};
use crate::representations::sourcefile::{Member, FileEntry, Import};
use crate::utils::BoxedIter;
use crate::utils::{Substack, iter::box_once};
use crate::interner::{Interner, Token};
use crate::pipeline::import_abs_path::import_abs_path;
use super::collect_ops::ExportedOpsCache;
use crate::interner::{Interner, Tok};
use crate::pipeline::import_abs_path::import_abs_path;
use crate::representations::sourcefile::{
FileEntry, Import, Member, Namespace,
};
use crate::representations::tree::{ModMember, Module};
use crate::utils::iter::box_once;
use crate::utils::{BoxedIter, Substack};
fn member_rec(
// level
mod_stack: Substack<Token<String>>,
mod_stack: Substack<Tok<String>>,
preparsed: &Module<impl Clone, impl Clone>,
// object
member: Member,
// context
path: &[Token<String>],
path: &[Tok<String>],
ops_cache: &ExportedOpsCache,
i: &Interner
i: &Interner,
) -> Member {
match member {
Member::Namespace(name, body) => {
Member::Namespace(Namespace { name, body }) => {
let prepmember = &preparsed.items[&name].member;
let subprep = if let ModMember::Sub(m) = prepmember {m.clone()}
else {unreachable!("This name must point to a namespace")};
let subprep = if let ModMember::Sub(m) = prepmember {
m.clone()
} else {
unreachable!("This name must point to a namespace")
};
let new_body = entv_rec(
mod_stack.push(name),
subprep.as_ref(),
body,
path, ops_cache, i
path,
ops_cache,
i,
);
Member::Namespace(name, new_body)
Member::Namespace(Namespace { name, body: new_body })
},
any => any
any => any,
}
}
fn entv_rec(
// level
mod_stack: Substack<Token<String>>,
mod_stack: Substack<Tok<String>>,
preparsed: &Module<impl Clone, impl Clone>,
// object
data: Vec<FileEntry>,
// context
mod_path: &[Token<String>],
mod_path: &[Tok<String>],
ops_cache: &ExportedOpsCache,
i: &Interner
i: &Interner,
) -> Vec<FileEntry> {
data.into_iter()
data
.into_iter()
.map(|ent| match ent {
FileEntry::Import(imps) => FileEntry::Import(imps.into_iter()
.flat_map(|import| if let Import{ name: None, path } = import {
let p = import_abs_path(
mod_path, mod_stack, preparsed, &i.r(path)[..], i
).expect("Should have emerged in preparsing");
let names = ops_cache.find(&i.i(&p))
.expect("Should have emerged in second parsing");
let imports = names.iter()
.map(move |&n| Import{ name: Some(n), path })
.collect::<Vec<_>>();
Box::new(imports.into_iter()) as BoxedIter<Import>
} else {box_once(import)})
.collect()
FileEntry::Import(imps) => FileEntry::Import(
imps
.into_iter()
.flat_map(|import| {
if let Import { name: None, path } = import {
let p = import_abs_path(mod_path, mod_stack, &i.r(path)[..], i)
.expect("Should have emerged in preparsing");
let names = ops_cache
.find(&i.i(&p))
.expect("Should have emerged in second parsing");
let imports = names
.iter()
.map(move |&n| Import { name: Some(n), path })
.collect::<Vec<_>>();
Box::new(imports.into_iter()) as BoxedIter<Import>
} else {
box_once(import)
}
})
.collect(),
),
FileEntry::Exported(mem) => FileEntry::Exported(member_rec(
mod_stack, preparsed, mem, mod_path, ops_cache, i
mod_stack, preparsed, mem, mod_path, ops_cache, i,
)),
FileEntry::Internal(mem) => FileEntry::Internal(member_rec(
mod_stack, preparsed, mem, mod_path, ops_cache, i
mod_stack, preparsed, mem, mod_path, ops_cache, i,
)),
any => any
any => any,
})
.collect()
}
@@ -76,9 +90,9 @@ fn entv_rec(
pub fn normalize_imports(
preparsed: &Module<impl Clone, impl Clone>,
data: Vec<FileEntry>,
path: &[Token<String>],
path: &[Tok<String>],
ops_cache: &ExportedOpsCache,
i: &Interner
i: &Interner,
) -> Vec<FileEntry> {
entv_rec(Substack::Bottom, preparsed, data, path, ops_cache, i)
}
}

View File

@@ -1,18 +1,17 @@
use std::rc::Rc;
use crate::parse;
use crate::pipeline::error::ProjectError;
use crate::representations::sourcefile::{FileEntry, normalize_namespaces};
use crate::pipeline::source_loader::LoadedSourceTable;
use crate::interner::{Token, Interner};
use super::add_prelude::add_prelude;
use super::collect_ops::{ExportedOpsCache, collect_ops_for};
use super::collect_ops::{collect_ops_for, ExportedOpsCache};
use super::normalize_imports::normalize_imports;
use super::prefix::prefix;
use crate::interner::{Interner, Sym};
use crate::parse;
use crate::pipeline::error::ProjectError;
use crate::pipeline::source_loader::LoadedSourceTable;
use crate::representations::sourcefile::{normalize_namespaces, FileEntry};
pub fn parse_file(
path: Token<Vec<Token<String>>>,
path: Sym,
loaded: &LoadedSourceTable,
ops_cache: &ExportedOpsCache,
i: &Interner,
@@ -21,24 +20,24 @@ pub fn parse_file(
let ld = &loaded[&path];
// let ops_cache = collect_ops::mk_cache(loaded, i);
let ops = collect_ops_for(&i.r(path)[..], loaded, ops_cache, i)?;
let ops_vec = ops.iter()
.map(|t| i.r(*t))
.cloned()
.collect::<Vec<_>>();
let ctx = parse::ParsingContext{
let ops_vec = ops.iter().map(|t| i.r(*t)).cloned().collect::<Vec<_>>();
let ctx = parse::ParsingContext {
interner: i,
ops: &ops_vec,
file: Rc::new(i.extern_vec(path))
file: Rc::new(i.extern_vec(path)),
};
let entries = parse::parse(ld.text.as_str(), ctx)
.expect("This error should have been caught during loading");
let with_prelude = add_prelude(entries, &i.r(path)[..], prelude);
let impnormalized = normalize_imports(
&ld.preparsed.0, with_prelude, &i.r(path)[..], ops_cache, i
&ld.preparsed.0,
with_prelude,
&i.r(path)[..],
ops_cache,
i,
);
let nsnormalized = normalize_namespaces(
Box::new(impnormalized.into_iter()), i
).expect("This error should have been caught during preparsing");
let nsnormalized = normalize_namespaces(Box::new(impnormalized.into_iter()))
.expect("This error should have been caught during preparsing");
let prefixed = prefix(nsnormalized, &i.r(path)[..], ops_cache, i);
Ok(prefixed)
}
}

View File

@@ -1,82 +1,78 @@
use std::rc::Rc;
use crate::ast::{Constant, Rule};
use crate::interner::{Token, Interner};
use crate::utils::Substack;
use crate::representations::sourcefile::{Member, FileEntry};
use super::collect_ops::ExportedOpsCache;
use crate::ast::{Constant, Rule};
use crate::interner::{Interner, Tok};
use crate::representations::sourcefile::{FileEntry, Member, Namespace};
use crate::utils::Substack;
fn member_rec(
// level
mod_stack: Substack<Token<String>>,
mod_stack: Substack<Tok<String>>,
// object
data: Member,
// context
path: &[Token<String>],
path: &[Tok<String>],
ops_cache: &ExportedOpsCache,
i: &Interner
i: &Interner,
) -> Member {
// let except = |op| imported.contains(&op);
let except = |_| false;
let prefix_v = path.iter().copied()
let prefix_v = path
.iter()
.copied()
.chain(mod_stack.iter().rev_vec_clone().into_iter())
.collect::<Vec<_>>();
let prefix = i.i(&prefix_v);
match data {
Member::Namespace(name, body) => {
let new_body = entv_rec(
mod_stack.push(name),
body,
path, ops_cache, i
);
Member::Namespace(name, new_body)
}
Member::Constant(constant) => Member::Constant(Constant{
Member::Namespace(Namespace { name, body }) => {
let new_body = entv_rec(mod_stack.push(name), body, path, ops_cache, i);
Member::Namespace(Namespace { name, body: new_body })
},
Member::Constant(constant) => Member::Constant(Constant {
name: constant.name,
value: constant.value.prefix(prefix, i, &except)
value: constant.value.prefix(prefix, i, &except),
}),
Member::Rule(rule) => Member::Rule(Rule{
Member::Rule(rule) => Member::Rule(Rule {
prio: rule.prio,
source: Rc::new(rule.source.iter()
.map(|e| e.prefix(prefix, i, &except))
.collect()
source: Rc::new(
rule.source.iter().map(|e| e.prefix(prefix, i, &except)).collect(),
),
target: Rc::new(rule.target.iter()
.map(|e| e.prefix(prefix, i, &except))
.collect()
target: Rc::new(
rule.target.iter().map(|e| e.prefix(prefix, i, &except)).collect(),
),
})
}),
}
}
fn entv_rec(
// level
mod_stack: Substack<Token<String>>,
mod_stack: Substack<Tok<String>>,
// object
data: Vec<FileEntry>,
// context
path: &[Token<String>],
path: &[Tok<String>],
ops_cache: &ExportedOpsCache,
i: &Interner
i: &Interner,
) -> Vec<FileEntry> {
data.into_iter().map(|fe| match fe {
FileEntry::Exported(mem) => FileEntry::Exported(member_rec(
mod_stack, mem, path, ops_cache, i
)),
FileEntry::Internal(mem) => FileEntry::Internal(member_rec(
mod_stack, mem, path, ops_cache, i
)),
// XXX should [FileEntry::Export] be prefixed?
any => any
}).collect()
data
.into_iter()
.map(|fe| match fe {
FileEntry::Exported(mem) =>
FileEntry::Exported(member_rec(mod_stack, mem, path, ops_cache, i)),
FileEntry::Internal(mem) =>
FileEntry::Internal(member_rec(mod_stack, mem, path, ops_cache, i)),
// XXX should [FileEntry::Export] be prefixed?
any => any,
})
.collect()
}
pub fn prefix(
data: Vec<FileEntry>,
path: &[Token<String>],
path: &[Tok<String>],
ops_cache: &ExportedOpsCache,
i: &Interner
i: &Interner,
) -> Vec<FileEntry> {
entv_rec(Substack::Bottom, data, path, ops_cache, i)
}
}

View File

@@ -1,33 +1,36 @@
use std::{ops::Add, rc::Rc};
use std::ops::Add;
use std::rc::Rc;
use hashbrown::HashMap;
use crate::representations::tree::{Module, ModMember};
use crate::ast::{Rule, Expr};
use crate::interner::{Token, Interner};
use crate::ast::{Expr, Rule};
use crate::interner::{Interner, Sym, Tok};
use crate::representations::tree::{ModMember, Module};
use crate::utils::Substack;
#[derive(Clone, Debug, Default)]
pub struct ProjectExt{
pub struct ProjectExt {
/// Pairs each foreign token to the module it was imported from
pub imports_from: HashMap<Token<String>, Token<Vec<Token<String>>>>,
pub imports_from: HashMap<Tok<String>, Sym>,
/// Pairs each exported token to its original full name.
pub exports: HashMap<Token<String>, Token<Vec<Token<String>>>>,
pub exports: HashMap<Tok<String>, Sym>,
/// All rules defined in this module, exported or not
pub rules: Vec<Rule>,
/// Filename, if known, for error reporting
pub file: Option<Vec<Token<String>>>
pub file: Option<Vec<Tok<String>>>,
}
impl Add for ProjectExt {
type Output = Self;
fn add(mut self, rhs: Self) -> Self::Output {
let ProjectExt{ imports_from, exports, rules, file } = rhs;
let ProjectExt { imports_from, exports, rules, file } = rhs;
self.imports_from.extend(imports_from.into_iter());
self.exports.extend(exports.into_iter());
self.rules.extend(rules.into_iter());
if file.is_some() { self.file = file }
if file.is_some() {
self.file = file
}
self
}
}
@@ -51,10 +54,10 @@ pub fn collect_rules(project: &ProjectTree) -> Vec<Rule> {
}
fn collect_consts_rec(
path: Substack<Token<String>>,
bag: &mut HashMap<Token<Vec<Token<String>>>, Expr>,
path: Substack<Tok<String>>,
bag: &mut HashMap<Sym, Expr>,
module: &ProjectModule,
i: &Interner
i: &Interner,
) {
for (key, entry) in module.items.iter() {
match &entry.member {
@@ -62,26 +65,18 @@ fn collect_consts_rec(
let mut name = path.iter().rev_vec_clone();
name.push(*key);
bag.insert(i.i(&name), expr.clone());
}
ModMember::Sub(module) => {
collect_consts_rec(
path.push(*key),
bag, module, i
)
}
},
ModMember::Sub(module) =>
collect_consts_rec(path.push(*key), bag, module, i),
}
}
}
pub fn collect_consts(project: &ProjectTree, i: &Interner)
-> HashMap<Token<Vec<Token<String>>>, Expr>
{
pub fn collect_consts(
project: &ProjectTree,
i: &Interner,
) -> HashMap<Sym, Expr> {
let mut consts = HashMap::new();
collect_consts_rec(
Substack::Bottom,
&mut consts,
project.0.as_ref(),
i
);
collect_consts_rec(Substack::Bottom, &mut consts, project.0.as_ref(), i);
consts
}
}