bug fixes and performance improvements
This commit is contained in:
15
src/pipeline/error/mod.rs
Normal file
15
src/pipeline/error/mod.rs
Normal file
@@ -0,0 +1,15 @@
|
||||
mod project_error;
|
||||
mod parse_error_with_path;
|
||||
mod unexpected_directory;
|
||||
mod module_not_found;
|
||||
mod not_exported;
|
||||
mod too_many_supers;
|
||||
mod visibility_mismatch;
|
||||
|
||||
pub use project_error::{ErrorPosition, ProjectError};
|
||||
pub use parse_error_with_path::ParseErrorWithPath;
|
||||
pub use unexpected_directory::UnexpectedDirectory;
|
||||
pub use module_not_found::ModuleNotFound;
|
||||
pub use not_exported::NotExported;
|
||||
pub use too_many_supers::TooManySupers;
|
||||
pub use visibility_mismatch::VisibilityMismatch;
|
||||
25
src/pipeline/error/module_not_found.rs
Normal file
25
src/pipeline/error/module_not_found.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use crate::utils::{BoxedIter, iter::box_once};
|
||||
|
||||
use super::{ProjectError, ErrorPosition};
|
||||
|
||||
/// Error produced when an import refers to a nonexistent module
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct ModuleNotFound {
|
||||
pub file: Vec<String>,
|
||||
pub subpath: Vec<String>
|
||||
}
|
||||
impl ProjectError for ModuleNotFound {
|
||||
fn description(&self) -> &str {
|
||||
"an import refers to a nonexistent module"
|
||||
}
|
||||
fn message(&self) -> String {
|
||||
format!(
|
||||
"module {} in {} was not found",
|
||||
self.subpath.join("::"),
|
||||
self.file.join("/"),
|
||||
)
|
||||
}
|
||||
fn positions(&self) -> BoxedIter<ErrorPosition> {
|
||||
box_once(ErrorPosition::just_file(self.file.clone()))
|
||||
}
|
||||
}
|
||||
36
src/pipeline/error/not_exported.rs
Normal file
36
src/pipeline/error/not_exported.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::{utils::BoxedIter, representations::location::Location};
|
||||
|
||||
use super::{ProjectError, ErrorPosition};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct NotExported {
|
||||
pub file: Vec<String>,
|
||||
pub subpath: Vec<String>,
|
||||
pub referrer_file: Vec<String>,
|
||||
pub referrer_subpath: Vec<String>,
|
||||
}
|
||||
impl ProjectError for NotExported {
|
||||
fn description(&self) -> &str {
|
||||
"An import refers to a symbol that exists but isn't exported"
|
||||
}
|
||||
fn positions(&self) -> BoxedIter<ErrorPosition> {
|
||||
Box::new([
|
||||
ErrorPosition{
|
||||
location: Location::File(Rc::new(self.file.clone())),
|
||||
message: Some(format!(
|
||||
"{} isn't exported",
|
||||
self.subpath.join("::")
|
||||
)),
|
||||
},
|
||||
ErrorPosition{
|
||||
location: Location::File(Rc::new(self.referrer_file.clone())),
|
||||
message: Some(format!(
|
||||
"{} cannot see this symbol",
|
||||
self.referrer_subpath.join("::")
|
||||
)),
|
||||
}
|
||||
].into_iter())
|
||||
}
|
||||
}
|
||||
37
src/pipeline/error/parse_error_with_path.rs
Normal file
37
src/pipeline/error/parse_error_with_path.rs
Normal file
@@ -0,0 +1,37 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::representations::location::Location;
|
||||
use crate::utils::BoxedIter;
|
||||
use crate::parse::ParseError;
|
||||
|
||||
use super::ErrorPosition;
|
||||
use super::ProjectError;
|
||||
|
||||
/// Produced by stages that parse text when it fails.
|
||||
#[derive(Debug)]
|
||||
pub struct ParseErrorWithPath {
|
||||
pub full_source: String,
|
||||
pub path: Vec<String>,
|
||||
pub error: ParseError
|
||||
}
|
||||
impl ProjectError for ParseErrorWithPath {
|
||||
fn description(&self) -> &str {"Failed to parse code"}
|
||||
fn positions(&self) -> BoxedIter<ErrorPosition> {
|
||||
match &self.error {
|
||||
ParseError::Lex(lex) => Box::new(lex.iter().map(|s| ErrorPosition {
|
||||
location: Location::Range {
|
||||
file: Rc::new(self.path.clone()),
|
||||
range: s.span(),
|
||||
},
|
||||
message: Some(s.to_string())
|
||||
})),
|
||||
ParseError::Ast(ast) => Box::new(ast.iter().map(|(_i, s)| ErrorPosition {
|
||||
location: s.found().map(|e| Location::Range {
|
||||
file: Rc::new(self.path.clone()),
|
||||
range: e.range.clone()
|
||||
}).unwrap_or_else(|| Location::File(Rc::new(self.path.clone()))),
|
||||
message: Some(s.label().unwrap_or("Parse error").to_string())
|
||||
})),
|
||||
}
|
||||
}
|
||||
}
|
||||
50
src/pipeline/error/project_error.rs
Normal file
50
src/pipeline/error/project_error.rs
Normal file
@@ -0,0 +1,50 @@
|
||||
use std::fmt::{Debug, Display};
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::representations::location::Location;
|
||||
use crate::utils::BoxedIter;
|
||||
|
||||
/// A point of interest in resolving the error, such as the point where
|
||||
/// processing got stuck, a command that is likely to be incorrect
|
||||
pub struct ErrorPosition {
|
||||
pub location: Location,
|
||||
pub message: Option<String>
|
||||
}
|
||||
|
||||
impl ErrorPosition {
|
||||
/// An error position referring to an entire file with no comment
|
||||
pub fn just_file(file: Vec<String>) -> Self {
|
||||
Self { message: None, location: Location::File(Rc::new(file)) }
|
||||
}
|
||||
}
|
||||
|
||||
/// Errors addressed to the developer which are to be resolved with
|
||||
/// code changes
|
||||
pub trait ProjectError: Debug {
|
||||
/// A general description of this type of error
|
||||
fn description(&self) -> &str;
|
||||
/// A formatted message that includes specific parameters
|
||||
fn message(&self) -> String {String::new()}
|
||||
/// Code positions relevant to this error
|
||||
fn positions(&self) -> BoxedIter<ErrorPosition>;
|
||||
/// Convert the error into an [Rc<dyn ProjectError>] to be able to
|
||||
/// handle various errors together
|
||||
fn rc(self) -> Rc<dyn ProjectError> where Self: Sized + 'static {
|
||||
Rc::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for dyn ProjectError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let description = self.description();
|
||||
let message = self.message();
|
||||
let positions = self.positions();
|
||||
write!(f, "Problem with the project: {description}; {message}")?;
|
||||
for ErrorPosition { location, message } in positions {
|
||||
write!(f, "@{location}: {}",
|
||||
message.unwrap_or("location of interest".to_string())
|
||||
)?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
38
src/pipeline/error/too_many_supers.rs
Normal file
38
src/pipeline/error/too_many_supers.rs
Normal file
@@ -0,0 +1,38 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::{utils::{BoxedIter, iter::box_once}, representations::location::Location};
|
||||
|
||||
use super::{ProjectError, ErrorPosition};
|
||||
|
||||
/// Error produced when an import path starts with more `super` segments
|
||||
/// than the current module's absolute path
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct TooManySupers {
|
||||
pub path: Vec<String>,
|
||||
pub offender_file: Vec<String>,
|
||||
pub offender_mod: Vec<String>
|
||||
}
|
||||
impl ProjectError for TooManySupers {
|
||||
fn description(&self) -> &str {
|
||||
"an import path starts with more `super` segments than \
|
||||
the current module's absolute path"
|
||||
}
|
||||
fn message(&self) -> String {
|
||||
format!(
|
||||
"path {} in {} contains too many `super` steps.",
|
||||
self.path.join("::"),
|
||||
self.offender_mod.join("::")
|
||||
)
|
||||
}
|
||||
|
||||
fn positions(&self) -> BoxedIter<ErrorPosition> {
|
||||
box_once(ErrorPosition {
|
||||
location: Location::File(Rc::new(self.offender_file.clone())),
|
||||
message: Some(format!(
|
||||
"path {} in {} contains too many `super` steps.",
|
||||
self.path.join("::"),
|
||||
self.offender_mod.join("::")
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
||||
26
src/pipeline/error/unexpected_directory.rs
Normal file
26
src/pipeline/error/unexpected_directory.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
use crate::utils::{BoxedIter, iter::box_once};
|
||||
|
||||
use super::ErrorPosition;
|
||||
use super::ProjectError;
|
||||
|
||||
/// Produced when a stage that deals specifically with code encounters
|
||||
/// a path that refers to a directory
|
||||
#[derive(Debug)]
|
||||
pub struct UnexpectedDirectory {
|
||||
pub path: Vec<String>
|
||||
}
|
||||
impl ProjectError for UnexpectedDirectory {
|
||||
fn description(&self) -> &str {
|
||||
"A stage that deals specifically with code encountered a path \
|
||||
that refers to a directory"
|
||||
}
|
||||
fn positions(&self) -> BoxedIter<ErrorPosition> {
|
||||
box_once(ErrorPosition::just_file(self.path.clone()))
|
||||
}
|
||||
fn message(&self) -> String {
|
||||
format!(
|
||||
"{} was expected to be a file but a directory was found",
|
||||
self.path.join("/")
|
||||
)
|
||||
}
|
||||
}
|
||||
25
src/pipeline/error/visibility_mismatch.rs
Normal file
25
src/pipeline/error/visibility_mismatch.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use std::rc::Rc;
|
||||
use crate::representations::location::Location;
|
||||
use crate::utils::{BoxedIter, iter::box_once};
|
||||
|
||||
use super::project_error::{ProjectError, ErrorPosition};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct VisibilityMismatch{
|
||||
pub namespace: Vec<String>,
|
||||
pub file: Rc<Vec<String>>
|
||||
}
|
||||
impl ProjectError for VisibilityMismatch {
|
||||
fn description(&self) -> &str {
|
||||
"Some occurences of a namespace are exported but others are not"
|
||||
}
|
||||
fn positions(&self) -> BoxedIter<ErrorPosition> {
|
||||
box_once(ErrorPosition {
|
||||
location: Location::File(self.file.clone()),
|
||||
message: Some(format!(
|
||||
"{} is opened multiple times with different visibilities",
|
||||
self.namespace.join("::")
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
||||
106
src/pipeline/file_loader.rs
Normal file
106
src/pipeline/file_loader.rs
Normal file
@@ -0,0 +1,106 @@
|
||||
use std::path::Path;
|
||||
use std::rc::Rc;
|
||||
use std::path::PathBuf;
|
||||
use std::io;
|
||||
use std::fs;
|
||||
|
||||
use crate::utils::iter::box_once;
|
||||
use crate::utils::{Cache, BoxedIter};
|
||||
use crate::interner::{Interner, Token};
|
||||
use crate::pipeline::error::UnexpectedDirectory;
|
||||
use crate::pipeline::error::{ProjectError, ErrorPosition};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FileLoadingError{
|
||||
file: io::Error,
|
||||
dir: io::Error,
|
||||
path: Vec<String>
|
||||
}
|
||||
impl ProjectError for FileLoadingError {
|
||||
fn description(&self) -> &str {
|
||||
"Neither a file nor a directory could be read from \
|
||||
the requested path"
|
||||
}
|
||||
fn positions(&self) -> BoxedIter<ErrorPosition> {
|
||||
box_once(ErrorPosition::just_file(self.path.clone()))
|
||||
}
|
||||
fn message(&self) -> String {
|
||||
format!("File: {}\nDirectory: {}", self.file, self.dir)
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the result of loading code from a string-tree form such
|
||||
/// as the file system.
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
pub enum Loaded {
|
||||
Code(Rc<String>),
|
||||
Collection(Rc<Vec<String>>),
|
||||
}
|
||||
impl Loaded {
|
||||
pub fn is_code(&self) -> bool {matches!(self, Loaded::Code(_))}
|
||||
}
|
||||
|
||||
pub type IOResult = Result<Loaded, Rc<dyn ProjectError>>;
|
||||
|
||||
pub type FileCache<'a> = Cache<'a, Token<Vec<Token<String>>>, IOResult>;
|
||||
|
||||
/// Load a file from a path expressed in Rust strings, but relative to
|
||||
/// a root expressed as an OS Path.
|
||||
pub fn load_file(root: &Path, path: &[impl AsRef<str>]) -> IOResult {
|
||||
// let os_path = path.into_iter()
|
||||
// .map_into::<OsString>()
|
||||
// .collect::<Vec<_>>();
|
||||
let full_path = path.iter().fold(
|
||||
root.to_owned(),
|
||||
|p, s| p.join(s.as_ref())
|
||||
);
|
||||
let file_path = full_path.with_extension("orc");
|
||||
let file_error = match fs::read_to_string(&file_path) {
|
||||
Ok(string) => return Ok(Loaded::Code(Rc::new(string))),
|
||||
Err(err) => err
|
||||
};
|
||||
let dir = match fs::read_dir(&full_path) {
|
||||
Ok(dir) => dir,
|
||||
Err(dir_error) => {
|
||||
return Err(FileLoadingError {
|
||||
file: file_error,
|
||||
dir: dir_error,
|
||||
path: path.iter()
|
||||
.map(|s| s.as_ref().to_string())
|
||||
.collect(),
|
||||
}.rc())
|
||||
}
|
||||
};
|
||||
let names = dir.filter_map(Result::ok)
|
||||
.filter_map(|ent| {
|
||||
let fname = ent.file_name().into_string().ok()?;
|
||||
let ftyp = ent.metadata().ok()?.file_type();
|
||||
Some(if ftyp.is_dir() {fname} else {
|
||||
fname.strip_suffix(".or")?.to_string()
|
||||
})
|
||||
}).collect();
|
||||
Ok(Loaded::Collection(Rc::new(names)))
|
||||
}
|
||||
|
||||
/// Generates a cached file loader for a directory
|
||||
pub fn mk_cache(root: PathBuf, i: &Interner) -> FileCache {
|
||||
Cache::new(move |token: Token<Vec<Token<String>>>, _this| -> IOResult {
|
||||
let path = i.r(token).iter()
|
||||
.map(|t| i.r(*t).as_str())
|
||||
.collect::<Vec<_>>();
|
||||
load_file(&root, &path)
|
||||
})
|
||||
}
|
||||
|
||||
/// Loads the string contents of a file at the given location.
|
||||
/// If the path points to a directory, raises an error.
|
||||
pub fn load_text(
|
||||
path: Token<Vec<Token<String>>>,
|
||||
load_file: &impl Fn(Token<Vec<Token<String>>>) -> IOResult,
|
||||
i: &Interner
|
||||
) -> Result<Rc<String>, Rc<dyn ProjectError>> {
|
||||
if let Loaded::Code(s) = load_file(path)? {Ok(s)}
|
||||
else {Err(UnexpectedDirectory{
|
||||
path: i.r(path).iter().map(|t| i.r(*t)).cloned().collect()
|
||||
}.rc())}
|
||||
}
|
||||
32
src/pipeline/import_abs_path.rs
Normal file
32
src/pipeline/import_abs_path.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::representations::tree::Module;
|
||||
use crate::representations::sourcefile::absolute_path;
|
||||
use crate::utils::{Substack};
|
||||
use crate::interner::{Token, Interner};
|
||||
|
||||
use super::error::{ProjectError, TooManySupers};
|
||||
|
||||
pub fn import_abs_path(
|
||||
src_path: &[Token<String>],
|
||||
mod_stack: Substack<Token<String>>,
|
||||
module: &Module<impl Clone, impl Clone>,
|
||||
import_path: &[Token<String>],
|
||||
i: &Interner,
|
||||
) -> Result<Vec<Token<String>>, Rc<dyn ProjectError>> {
|
||||
// path of module within file
|
||||
let mod_pathv = mod_stack.iter().rev_vec_clone();
|
||||
// path of module within compilation
|
||||
let abs_pathv = src_path.iter().copied()
|
||||
.chain(mod_pathv.iter().copied())
|
||||
.collect::<Vec<_>>();
|
||||
// preload-target path relative to module
|
||||
// preload-target path within compilation
|
||||
absolute_path(&abs_pathv, import_path, i, &|n| {
|
||||
module.items.contains_key(&n)
|
||||
}).map_err(|_| TooManySupers{
|
||||
path: import_path.iter().map(|t| i.r(*t)).cloned().collect(),
|
||||
offender_file: src_path.iter().map(|t| i.r(*t)).cloned().collect(),
|
||||
offender_mod: mod_pathv.iter().map(|t| i.r(*t)).cloned().collect(),
|
||||
}.rc())
|
||||
}
|
||||
53
src/pipeline/import_resolution/alias_map.rs
Normal file
53
src/pipeline/import_resolution/alias_map.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use hashbrown::{HashMap, HashSet};
|
||||
|
||||
use std::hash::Hash;
|
||||
|
||||
use crate::interner::Token;
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct AliasMap{
|
||||
pub targets: HashMap<Token<Vec<Token<String>>>, Token<Vec<Token<String>>>>,
|
||||
pub aliases: HashMap<Token<Vec<Token<String>>>, HashSet<Token<Vec<Token<String>>>>>,
|
||||
}
|
||||
impl AliasMap {
|
||||
pub fn new() -> Self {Self::default()}
|
||||
|
||||
pub fn link(&mut self, alias: Token<Vec<Token<String>>>, target: Token<Vec<Token<String>>>) {
|
||||
let prev = self.targets.insert(alias, target);
|
||||
debug_assert!(prev.is_none(), "Alias already has a target");
|
||||
multimap_entry(&mut self.aliases, &target).insert(alias);
|
||||
// Remove aliases of the alias
|
||||
if let Some(alts) = self.aliases.remove(&alias) {
|
||||
for alt in alts {
|
||||
// Assert that this step has always been done in the past
|
||||
debug_assert!(
|
||||
self.aliases.get(&alt)
|
||||
.map(HashSet::is_empty)
|
||||
.unwrap_or(true),
|
||||
"Alias set of alias not empty"
|
||||
);
|
||||
debug_assert!(
|
||||
self.targets.insert(alt, target) == Some(alias),
|
||||
"Name not target of its own alias"
|
||||
);
|
||||
multimap_entry(&mut self.aliases, &target).insert(alt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve(&self, alias: Token<Vec<Token<String>>>) -> Option<Token<Vec<Token<String>>>> {
|
||||
self.targets.get(&alias).copied()
|
||||
}
|
||||
}
|
||||
|
||||
/// find or create the set belonging to the given key in the given
|
||||
/// map-to-set (aka. multimap)
|
||||
fn multimap_entry<'a, K: Eq + Hash + Clone, V>(
|
||||
map: &'a mut HashMap<K, HashSet<V>>,
|
||||
key: &'_ K
|
||||
) -> &'a mut HashSet<V> {
|
||||
map.raw_entry_mut()
|
||||
.from_key(key)
|
||||
.or_insert_with(|| (key.clone(), HashSet::new()))
|
||||
.1
|
||||
}
|
||||
87
src/pipeline/import_resolution/apply_aliases.rs
Normal file
87
src/pipeline/import_resolution/apply_aliases.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
|
||||
use crate::{utils::Substack, interner::{Token, Interner}, pipeline::{ProjectModule, ProjectExt}, representations::tree::{ModEntry, ModMember}, ast::{Rule, Expr}};
|
||||
|
||||
use super::{alias_map::AliasMap, decls::InjectedAsFn};
|
||||
|
||||
fn process_expr(
|
||||
expr: &Expr,
|
||||
alias_map: &AliasMap,
|
||||
injected_as: &impl InjectedAsFn,
|
||||
i: &Interner,
|
||||
) -> Expr {
|
||||
expr.map_names(&|n| {
|
||||
injected_as(&i.r(n)[..]).or_else(|| {
|
||||
alias_map.resolve(n).map(|n| {
|
||||
injected_as(&i.r(n)[..]).unwrap_or(n)
|
||||
})
|
||||
})
|
||||
}).unwrap_or_else(|| expr.clone())
|
||||
}
|
||||
|
||||
// TODO: replace is_injected with injected_as
|
||||
/// Replace all aliases with the name they're originally defined as
|
||||
fn apply_aliases_rec(
|
||||
path: Substack<Token<String>>,
|
||||
module: &ProjectModule,
|
||||
alias_map: &AliasMap,
|
||||
i: &Interner,
|
||||
injected_as: &impl InjectedAsFn,
|
||||
) -> ProjectModule {
|
||||
let items = module.items.iter().map(|(name, ent)| {
|
||||
let ModEntry{ exported, member } = ent;
|
||||
let member = match member {
|
||||
ModMember::Item(expr) => ModMember::Item(
|
||||
process_expr(expr, alias_map, injected_as, i)
|
||||
),
|
||||
ModMember::Sub(module) => {
|
||||
let subpath = path.push(*name);
|
||||
let is_ignored = injected_as(&subpath.iter().rev_vec_clone()).is_some();
|
||||
let new_mod = if is_ignored {module.clone()} else {
|
||||
let module = module.as_ref();
|
||||
Rc::new(apply_aliases_rec(
|
||||
subpath, module,
|
||||
alias_map, i, injected_as
|
||||
))
|
||||
};
|
||||
ModMember::Sub(new_mod)
|
||||
}
|
||||
};
|
||||
(*name, ModEntry{ exported: *exported, member })
|
||||
}).collect::<HashMap<_, _>>();
|
||||
let rules = module.extra.rules.iter().map(|rule| {
|
||||
let Rule{ source, prio, target } = rule;
|
||||
Rule{
|
||||
prio: *prio,
|
||||
source: Rc::new(source.iter()
|
||||
.map(|expr| process_expr(expr, alias_map, injected_as, i))
|
||||
.collect::<Vec<_>>()
|
||||
),
|
||||
target: Rc::new(target.iter()
|
||||
.map(|expr| process_expr(expr, alias_map, injected_as, i))
|
||||
.collect::<Vec<_>>()
|
||||
),
|
||||
}
|
||||
}).collect::<Vec<_>>();
|
||||
ProjectModule{
|
||||
items,
|
||||
imports: module.imports.clone(),
|
||||
extra: ProjectExt{
|
||||
rules,
|
||||
exports: module.extra.exports.clone(),
|
||||
file: module.extra.file.clone(),
|
||||
imports_from: module.extra.imports_from.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_aliases(
|
||||
module: &ProjectModule,
|
||||
alias_map: &AliasMap,
|
||||
i: &Interner,
|
||||
injected_as: &impl InjectedAsFn,
|
||||
) -> ProjectModule {
|
||||
apply_aliases_rec(Substack::Bottom, module, alias_map, i, injected_as)
|
||||
}
|
||||
103
src/pipeline/import_resolution/collect_aliases.rs
Normal file
103
src/pipeline/import_resolution/collect_aliases.rs
Normal file
@@ -0,0 +1,103 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::representations::tree::{WalkErrorKind, ModMember};
|
||||
use crate::pipeline::error::{ProjectError, NotExported};
|
||||
use crate::pipeline::project_tree::{ProjectTree, split_path, ProjectModule};
|
||||
use crate::interner::{Token, Interner};
|
||||
use crate::utils::{Substack, pushed};
|
||||
|
||||
use super::alias_map::AliasMap;
|
||||
use super::decls::InjectedAsFn;
|
||||
|
||||
/// Assert that a module identified by a path can see a given symbol
|
||||
fn assert_visible(
|
||||
source: &[Token<String>], // must point to a file or submodule
|
||||
target: &[Token<String>], // may point to a symbol or module of any kind
|
||||
project: &ProjectTree,
|
||||
i: &Interner
|
||||
) -> Result<(), Rc<dyn ProjectError>> {
|
||||
let (tgt_item, tgt_path) = if let Some(s) = target.split_last() {s}
|
||||
else {return Ok(())};
|
||||
let shared_len = source.iter()
|
||||
.zip(tgt_path.iter())
|
||||
.take_while(|(a, b)| a == b)
|
||||
.count();
|
||||
let shared_root = project.0.walk(&tgt_path[..shared_len], false)
|
||||
.expect("checked in parsing");
|
||||
let direct_parent = shared_root.walk(&tgt_path[shared_len..], true)
|
||||
.map_err(|e| match e.kind {
|
||||
WalkErrorKind::Missing => panic!("checked in parsing"),
|
||||
WalkErrorKind::Private => {
|
||||
let full_path = &tgt_path[..shared_len + e.pos];
|
||||
let (file, sub) = split_path(full_path, &project);
|
||||
let (ref_file, ref_sub) = split_path(source, &project);
|
||||
NotExported{
|
||||
file: i.extern_all(file),
|
||||
subpath: i.extern_all(sub),
|
||||
referrer_file: i.extern_all(ref_file),
|
||||
referrer_subpath: i.extern_all(ref_sub),
|
||||
}.rc()
|
||||
}
|
||||
})?;
|
||||
let tgt_item_exported = direct_parent.extra.exports.contains_key(tgt_item);
|
||||
let target_prefixes_source = shared_len == tgt_path.len()
|
||||
&& source.get(shared_len) == Some(tgt_item);
|
||||
if !tgt_item_exported && !target_prefixes_source {
|
||||
let (file, sub) = split_path(target, &project);
|
||||
let (ref_file, ref_sub) = split_path(source, &project);
|
||||
Err(NotExported{
|
||||
file: i.extern_all(file),
|
||||
subpath: i.extern_all(sub),
|
||||
referrer_file: i.extern_all(ref_file),
|
||||
referrer_subpath: i.extern_all(ref_sub),
|
||||
}.rc())
|
||||
} else {Ok(())}
|
||||
}
|
||||
|
||||
/// Populate target and alias maps from the module tree recursively
|
||||
fn collect_aliases_rec(
|
||||
path: Substack<Token<String>>,
|
||||
module: &ProjectModule,
|
||||
project: &ProjectTree,
|
||||
alias_map: &mut AliasMap,
|
||||
i: &Interner,
|
||||
injected_as: &impl InjectedAsFn,
|
||||
) -> Result<(), Rc<dyn ProjectError>> {
|
||||
// Assume injected module has been alias-resolved
|
||||
let mod_path_v = path.iter().rev_vec_clone();
|
||||
if injected_as(&mod_path_v).is_some() {return Ok(())};
|
||||
for (&name, &target_mod) in module.extra.imports_from.iter() {
|
||||
let target_mod_v = i.r(target_mod);
|
||||
let target_sym_v = pushed(target_mod_v, name);
|
||||
assert_visible(&mod_path_v, &target_sym_v, project, i)?;
|
||||
let sym_path_v = pushed(&mod_path_v, name);
|
||||
let sym_path = i.i(&sym_path_v);
|
||||
let target_sym = i.i(&target_sym_v);
|
||||
alias_map.link(sym_path, target_sym);
|
||||
}
|
||||
for (&name, entry) in module.items.iter() {
|
||||
let submodule = if let ModMember::Sub(s) = &entry.member {
|
||||
s.as_ref()
|
||||
} else {continue};
|
||||
collect_aliases_rec(
|
||||
path.push(name),
|
||||
submodule, project, alias_map,
|
||||
i, injected_as,
|
||||
)?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Populate target and alias maps from the module tree
|
||||
pub fn collect_aliases(
|
||||
module: &ProjectModule,
|
||||
project: &ProjectTree,
|
||||
alias_map: &mut AliasMap,
|
||||
i: &Interner,
|
||||
injected_as: &impl InjectedAsFn,
|
||||
) -> Result<(), Rc<dyn ProjectError>> {
|
||||
collect_aliases_rec(
|
||||
Substack::Bottom, module, project, alias_map,
|
||||
i, injected_as
|
||||
)
|
||||
}
|
||||
5
src/pipeline/import_resolution/decls.rs
Normal file
5
src/pipeline/import_resolution/decls.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
use crate::interner::Token;
|
||||
|
||||
pub trait InjectedAsFn = Fn(
|
||||
&[Token<String>]
|
||||
) -> Option<Token<Vec<Token<String>>>>;
|
||||
7
src/pipeline/import_resolution/mod.rs
Normal file
7
src/pipeline/import_resolution/mod.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
mod alias_map;
|
||||
mod collect_aliases;
|
||||
mod apply_aliases;
|
||||
mod resolve_imports;
|
||||
mod decls;
|
||||
|
||||
pub use resolve_imports::resolve_imports;
|
||||
28
src/pipeline/import_resolution/resolve_imports.rs
Normal file
28
src/pipeline/import_resolution/resolve_imports.rs
Normal file
@@ -0,0 +1,28 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::interner::Interner;
|
||||
use crate::pipeline::error::ProjectError;
|
||||
use crate::pipeline::project_tree::ProjectTree;
|
||||
|
||||
|
||||
use super::alias_map::AliasMap;
|
||||
use super::collect_aliases::collect_aliases;
|
||||
use super::apply_aliases::apply_aliases;
|
||||
use super::decls::InjectedAsFn;
|
||||
|
||||
/// Follow import chains to locate the original name of all tokens, then
|
||||
/// replace these aliases with the original names throughout the tree
|
||||
pub fn resolve_imports(
|
||||
project: ProjectTree,
|
||||
i: &Interner,
|
||||
injected_as: &impl InjectedAsFn,
|
||||
) -> Result<ProjectTree, Rc<dyn ProjectError>> {
|
||||
let mut map = AliasMap::new();
|
||||
collect_aliases(
|
||||
project.0.as_ref(),
|
||||
&project, &mut map,
|
||||
i, injected_as
|
||||
)?;
|
||||
let new_mod = apply_aliases(project.0.as_ref(), &map, i, injected_as);
|
||||
Ok(ProjectTree(Rc::new(new_mod)))
|
||||
}
|
||||
19
src/pipeline/mod.rs
Normal file
19
src/pipeline/mod.rs
Normal file
@@ -0,0 +1,19 @@
|
||||
pub mod error;
|
||||
mod project_tree;
|
||||
mod source_loader;
|
||||
mod import_abs_path;
|
||||
mod split_name;
|
||||
mod import_resolution;
|
||||
pub mod file_loader;
|
||||
mod parse_layer;
|
||||
|
||||
pub use parse_layer::parse_layer;
|
||||
pub use project_tree::{
|
||||
ConstTree, ProjectExt, ProjectModule, ProjectTree, from_const_tree,
|
||||
collect_consts, collect_rules,
|
||||
};
|
||||
// pub use file_loader::{Loaded, FileLoadingError, IOResult};
|
||||
// pub use error::{
|
||||
// ErrorPosition, ModuleNotFound, NotExported, ParseErrorWithPath,
|
||||
// ProjectError, TooManySupers, UnexpectedDirectory
|
||||
// };
|
||||
52
src/pipeline/parse_layer.rs
Normal file
52
src/pipeline/parse_layer.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::representations::sourcefile::FileEntry;
|
||||
use crate::interner::{Token, Interner};
|
||||
|
||||
use super::{project_tree, import_resolution};
|
||||
use super::source_loader;
|
||||
use super::file_loader::IOResult;
|
||||
use super::error::ProjectError;
|
||||
use super::ProjectTree;
|
||||
|
||||
/// Using an IO callback, produce a project tree that includes the given
|
||||
/// target symbols or files if they're defined.
|
||||
///
|
||||
/// The environment accessible to the loaded source can be specified with
|
||||
/// a pre-existing tree which will be merged with the loaded data, and a
|
||||
/// prelude which will be prepended to each individual file. Since the
|
||||
/// prelude gets compiled with each file, normally it should be a glob
|
||||
/// import pointing to a module in the environment.
|
||||
pub fn parse_layer<'a>(
|
||||
targets: &[Token<Vec<Token<String>>>],
|
||||
loader: &impl Fn(Token<Vec<Token<String>>>) -> IOResult,
|
||||
environment: &'a ProjectTree,
|
||||
prelude: &[FileEntry],
|
||||
i: &Interner,
|
||||
) -> Result<ProjectTree, Rc<dyn ProjectError>> {
|
||||
// A path is injected if it is walkable in the injected tree
|
||||
let injected_as = |path: &[Token<String>]| {
|
||||
let (item, modpath) = path.split_last()?;
|
||||
let module = environment.0.walk(modpath, false).ok()?;
|
||||
let inj = module.extra.exports.get(item).copied()?;
|
||||
Some(inj)
|
||||
};
|
||||
let injected_names = |path: Token<Vec<Token<String>>>| {
|
||||
let pathv = &i.r(path)[..];
|
||||
let module = environment.0.walk(&pathv, false).ok()?;
|
||||
Some(Rc::new(
|
||||
module.extra.exports.keys().copied().collect()
|
||||
))
|
||||
};
|
||||
let source = source_loader::load_source(
|
||||
targets, i, loader, &|path| injected_as(path).is_some()
|
||||
)?;
|
||||
let tree = project_tree::build_tree(source, i, prelude, &injected_names)?;
|
||||
let sum = ProjectTree(Rc::new(
|
||||
environment.0.as_ref().clone()
|
||||
+ tree.0.as_ref().clone()
|
||||
));
|
||||
let resolvd = import_resolution::resolve_imports(sum, i, &injected_as)?;
|
||||
// Addition among modules favours the left hand side.
|
||||
Ok(resolvd)
|
||||
}
|
||||
52
src/pipeline/project_tree/add_prelude.rs
Normal file
52
src/pipeline/project_tree/add_prelude.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
use crate::representations::sourcefile::{Member, FileEntry};
|
||||
use crate::interner::Token;
|
||||
|
||||
fn member_rec(
|
||||
// object
|
||||
member: Member,
|
||||
// context
|
||||
path: &[Token<String>],
|
||||
prelude: &[FileEntry],
|
||||
) -> Member {
|
||||
match member {
|
||||
Member::Namespace(name, body) => {
|
||||
let new_body = entv_rec(
|
||||
body,
|
||||
path,
|
||||
prelude
|
||||
);
|
||||
Member::Namespace(name, new_body)
|
||||
},
|
||||
any => any
|
||||
}
|
||||
}
|
||||
|
||||
fn entv_rec(
|
||||
// object
|
||||
data: Vec<FileEntry>,
|
||||
// context
|
||||
mod_path: &[Token<String>],
|
||||
prelude: &[FileEntry],
|
||||
) -> Vec<FileEntry> {
|
||||
prelude.iter().cloned()
|
||||
.chain(data.into_iter()
|
||||
.map(|ent| match ent {
|
||||
FileEntry::Exported(mem) => FileEntry::Exported(member_rec(
|
||||
mem, mod_path, prelude
|
||||
)),
|
||||
FileEntry::Internal(mem) => FileEntry::Internal(member_rec(
|
||||
mem, mod_path, prelude
|
||||
)),
|
||||
any => any
|
||||
})
|
||||
)
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn add_prelude(
|
||||
data: Vec<FileEntry>,
|
||||
path: &[Token<String>],
|
||||
prelude: &[FileEntry],
|
||||
) -> Vec<FileEntry> {
|
||||
entv_rec(data, path, prelude)
|
||||
}
|
||||
215
src/pipeline/project_tree/build_tree.rs
Normal file
215
src/pipeline/project_tree/build_tree.rs
Normal file
@@ -0,0 +1,215 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
|
||||
use crate::pipeline::error::ProjectError;
|
||||
use crate::interner::{Token, Interner};
|
||||
use crate::utils::iter::{box_once, box_empty};
|
||||
use crate::utils::{Substack, pushed};
|
||||
use crate::ast::{Expr, Constant};
|
||||
use crate::pipeline::source_loader::{LoadedSourceTable, LoadedSource};
|
||||
use crate::representations::tree::{Module, ModMember, ModEntry};
|
||||
use crate::representations::sourcefile::{FileEntry, Member, absolute_path};
|
||||
|
||||
use super::collect_ops::InjectedOperatorsFn;
|
||||
use super::{collect_ops, ProjectTree, ProjectExt};
|
||||
use super::parse_file::parse_file;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ParsedSource<'a> {
|
||||
path: Vec<Token<String>>,
|
||||
loaded: &'a LoadedSource,
|
||||
parsed: Vec<FileEntry>
|
||||
}
|
||||
|
||||
pub fn split_path<'a>(path: &'a [Token<String>], proj: &'a ProjectTree)
|
||||
-> (&'a [Token<String>], &'a [Token<String>])
|
||||
{
|
||||
let (end, body) = if let Some(s) = path.split_last() {s}
|
||||
else {return (&[], &[])};
|
||||
let mut module = proj.0.walk(body, false).expect("invalid path cannot be split");
|
||||
if let ModMember::Sub(m) = &module.items[end].member {
|
||||
module = m.clone();
|
||||
}
|
||||
let file = module.extra.file.as_ref()
|
||||
.map(|s| &path[..s.len()])
|
||||
.unwrap_or(&path[..]);
|
||||
let subpath = &path[file.len()..];
|
||||
(file, subpath)
|
||||
}
|
||||
|
||||
/// Convert normalized, prefixed source into a module
|
||||
fn source_to_module(
|
||||
// level
|
||||
path: Substack<Token<String>>,
|
||||
preparsed: &Module<impl Clone, impl Clone>,
|
||||
// data
|
||||
data: Vec<FileEntry>,
|
||||
// context
|
||||
i: &Interner,
|
||||
filepath_len: usize,
|
||||
) -> Rc<Module<Expr, ProjectExt>> {
|
||||
let path_v = path.iter().rev_vec_clone();
|
||||
let imports = data.iter()
|
||||
.filter_map(|ent| if let FileEntry::Import(impv) = ent {
|
||||
Some(impv.iter())
|
||||
} else {None})
|
||||
.flatten()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let imports_from = imports.iter()
|
||||
.map(|imp| {
|
||||
let mut imp_path_v = i.r(imp.path).clone();
|
||||
imp_path_v.push(imp.name.expect("imports normalized"));
|
||||
let mut abs_path = absolute_path(
|
||||
&path_v,
|
||||
&imp_path_v,
|
||||
i, &|n| preparsed.items.contains_key(&n)
|
||||
).expect("tested in preparsing");
|
||||
let name = abs_path.pop().expect("importing the global context");
|
||||
(name, i.i(&abs_path))
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
let exports = data.iter()
|
||||
.flat_map(|ent| {
|
||||
let mk_ent = |name| (name, i.i(&pushed(&path_v, name)));
|
||||
match ent {
|
||||
FileEntry::Export(names)
|
||||
=> Box::new(names.iter().copied().map(mk_ent)),
|
||||
FileEntry::Exported(mem) => match mem {
|
||||
Member::Constant(constant) => box_once(mk_ent(constant.name)),
|
||||
Member::Namespace(name, _) => box_once(mk_ent(*name)),
|
||||
Member::Rule(rule) => {
|
||||
let mut names = Vec::new();
|
||||
for e in rule.source.iter() {
|
||||
e.visit_names(Substack::Bottom, &mut |n| {
|
||||
if let Some([name]) = i.r(n).strip_prefix(&path_v[..]) {
|
||||
names.push((*name, n))
|
||||
}
|
||||
})
|
||||
}
|
||||
Box::new(names.into_iter())
|
||||
}
|
||||
}
|
||||
_ => box_empty()
|
||||
}
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
let rules = data.iter()
|
||||
.filter_map(|ent| match ent {
|
||||
FileEntry::Exported(Member::Rule(rule)) => Some(rule),
|
||||
FileEntry::Internal(Member::Rule(rule)) => Some(rule),
|
||||
_ => None,
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let items = data.into_iter()
|
||||
.filter_map(|ent| match ent {
|
||||
FileEntry::Exported(Member::Namespace(name, body)) => {
|
||||
let prep_member = &preparsed.items[&name].member;
|
||||
let new_prep = if let ModMember::Sub(s) = prep_member {s.as_ref()}
|
||||
else { panic!("preparsed missing a submodule") };
|
||||
let module = source_to_module(
|
||||
path.push(name),
|
||||
new_prep, body, i, filepath_len
|
||||
);
|
||||
let member = ModMember::Sub(module);
|
||||
Some((name, ModEntry{ exported: true, member }))
|
||||
}
|
||||
FileEntry::Internal(Member::Namespace(name, body)) => {
|
||||
let prep_member = &preparsed.items[&name].member;
|
||||
let new_prep = if let ModMember::Sub(s) = prep_member {s.as_ref()}
|
||||
else { panic!("preparsed missing a submodule") };
|
||||
let module = source_to_module(
|
||||
path.push(name),
|
||||
new_prep, body, i, filepath_len
|
||||
);
|
||||
let member = ModMember::Sub(module);
|
||||
Some((name, ModEntry{ exported: false, member }))
|
||||
}
|
||||
FileEntry::Exported(Member::Constant(Constant{ name, value })) => {
|
||||
let member = ModMember::Item(value);
|
||||
Some((name, ModEntry{ exported: true, member }))
|
||||
}
|
||||
FileEntry::Internal(Member::Constant(Constant{ name, value })) => {
|
||||
let member = ModMember::Item(value);
|
||||
Some((name, ModEntry{ exported: false, member }))
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
Rc::new(Module {
|
||||
imports,
|
||||
items,
|
||||
extra: ProjectExt {
|
||||
imports_from,
|
||||
exports,
|
||||
rules,
|
||||
file: Some(path_v[..filepath_len].to_vec())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn files_to_module(
|
||||
path: Substack<Token<String>>,
|
||||
files: &[ParsedSource],
|
||||
i: &Interner
|
||||
) -> Rc<Module<Expr, ProjectExt>> {
|
||||
let lvl = path.len();
|
||||
let path_v = path.iter().rev_vec_clone();
|
||||
if files.len() == 1 && files[0].path.len() == lvl {
|
||||
return source_to_module(
|
||||
path,
|
||||
files[0].loaded.preparsed.0.as_ref(),
|
||||
files[0].parsed.clone(),
|
||||
i, path.len()
|
||||
)
|
||||
}
|
||||
let items = files.group_by(|a, b| a.path[lvl] == b.path[lvl]).into_iter()
|
||||
.map(|files| {
|
||||
let namespace = files[0].path[lvl];
|
||||
let subpath = path.push(namespace);
|
||||
let module = files_to_module(subpath, files, i);
|
||||
let member = ModMember::Sub(module);
|
||||
(namespace, ModEntry{ exported: true, member })
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
let exports = items.keys()
|
||||
.copied()
|
||||
.map(|name| (name, i.i(&pushed(&path_v, name))))
|
||||
.collect();
|
||||
Rc::new(Module{
|
||||
items,
|
||||
imports: vec![],
|
||||
extra: ProjectExt {
|
||||
exports,
|
||||
imports_from: HashMap::new(),
|
||||
rules: vec![], file: None,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn build_tree<'a>(
|
||||
files: LoadedSourceTable,
|
||||
i: &Interner,
|
||||
prelude: &[FileEntry],
|
||||
injected: &impl InjectedOperatorsFn,
|
||||
) -> Result<ProjectTree, Rc<dyn ProjectError>> {
|
||||
let ops_cache = collect_ops::mk_cache(&files, i, injected);
|
||||
let mut entries = files.iter()
|
||||
.map(|(path, loaded)| Ok((
|
||||
i.r(*path),
|
||||
loaded,
|
||||
parse_file(*path, &files, &ops_cache, i, prelude)?
|
||||
)))
|
||||
.collect::<Result<Vec<_>, Rc<dyn ProjectError>>>()?;
|
||||
// sort by similarity, then longest-first
|
||||
entries.sort_unstable_by(|a, b| a.0.cmp(&b.0).reverse());
|
||||
let files = entries.into_iter()
|
||||
.map(|(path, loaded, parsed)| ParsedSource{
|
||||
loaded, parsed,
|
||||
path: path.clone()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
Ok(ProjectTree(files_to_module(Substack::Bottom, &files, i)))
|
||||
}
|
||||
75
src/pipeline/project_tree/collect_ops/exported_ops.rs
Normal file
75
src/pipeline/project_tree/collect_ops/exported_ops.rs
Normal file
@@ -0,0 +1,75 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use hashbrown::HashSet;
|
||||
|
||||
use crate::representations::tree::WalkErrorKind;
|
||||
use crate::pipeline::source_loader::LoadedSourceTable;
|
||||
use crate::pipeline::error::{ProjectError, ModuleNotFound};
|
||||
use crate::interner::{Token, Interner};
|
||||
use crate::utils::Cache;
|
||||
use crate::pipeline::split_name::split_name;
|
||||
|
||||
pub type OpsResult = Result<Rc<HashSet<Token<String>>>, Rc<dyn ProjectError>>;
|
||||
pub type ExportedOpsCache<'a> = Cache<'a, Token<Vec<Token<String>>>, OpsResult>;
|
||||
|
||||
pub trait InjectedOperatorsFn = Fn(
|
||||
Token<Vec<Token<String>>>
|
||||
) -> Option<Rc<HashSet<Token<String>>>>;
|
||||
|
||||
fn coprefix<T: Eq>(
|
||||
l: impl Iterator<Item = T>,
|
||||
r: impl Iterator<Item = T>
|
||||
) -> usize {
|
||||
l.zip(r).take_while(|(a, b)| a == b).count()
|
||||
}
|
||||
|
||||
/// Collect all names exported by the module at the specified path
|
||||
pub fn collect_exported_ops(
|
||||
path: Token<Vec<Token<String>>>,
|
||||
loaded: &LoadedSourceTable,
|
||||
i: &Interner,
|
||||
injected: &impl InjectedOperatorsFn
|
||||
) -> OpsResult {
|
||||
if let Some(i) = injected(path) {return Ok(i)}
|
||||
let is_file = |n: &[Token<String>]| loaded.contains_key(&i.i(n));
|
||||
let path_s = &i.r(path)[..];
|
||||
let name_split = split_name(path_s, &is_file);
|
||||
let (fpath_v, subpath_v) = if let Some(f) = name_split {f} else {
|
||||
return Ok(Rc::new(loaded.keys().copied()
|
||||
.filter_map(|modname| {
|
||||
let modname_s = i.r(modname);
|
||||
if path_s.len() == coprefix(path_s.iter(), modname_s.iter()) {
|
||||
Some(modname_s[path_s.len()])
|
||||
} else {None}
|
||||
})
|
||||
.collect::<HashSet<_>>()
|
||||
))
|
||||
};
|
||||
let fpath = i.i(fpath_v);
|
||||
let preparsed = &loaded[&fpath].preparsed;
|
||||
let module = preparsed.0.walk(&subpath_v, false)
|
||||
.map_err(|walk_err| match walk_err.kind {
|
||||
WalkErrorKind::Private => unreachable!("visibility is not being checked here"),
|
||||
WalkErrorKind::Missing => ModuleNotFound{
|
||||
file: i.extern_vec(fpath),
|
||||
subpath: subpath_v.into_iter()
|
||||
.take(walk_err.pos)
|
||||
.map(|t| i.r(*t))
|
||||
.cloned()
|
||||
.collect()
|
||||
}.rc(),
|
||||
})?;
|
||||
Ok(Rc::new(module.items.iter()
|
||||
.filter(|(_, v)| v.exported)
|
||||
.map(|(k, _)| *k)
|
||||
.collect()
|
||||
))
|
||||
}
|
||||
|
||||
pub fn mk_cache<'a>(
|
||||
loaded: &'a LoadedSourceTable,
|
||||
i: &'a Interner,
|
||||
injected: &'a impl InjectedOperatorsFn,
|
||||
) -> ExportedOpsCache<'a> {
|
||||
Cache::new(|path, _this| collect_exported_ops(path, loaded, i, injected))
|
||||
}
|
||||
8
src/pipeline/project_tree/collect_ops/mod.rs
Normal file
8
src/pipeline/project_tree/collect_ops/mod.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
mod exported_ops;
|
||||
mod ops_for;
|
||||
|
||||
pub use exported_ops::{
|
||||
ExportedOpsCache, OpsResult, InjectedOperatorsFn,
|
||||
collect_exported_ops, mk_cache
|
||||
};
|
||||
pub use ops_for::collect_ops_for;
|
||||
49
src/pipeline/project_tree/collect_ops/ops_for.rs
Normal file
49
src/pipeline/project_tree/collect_ops/ops_for.rs
Normal file
@@ -0,0 +1,49 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use hashbrown::HashSet;
|
||||
|
||||
use crate::parse::is_op;
|
||||
use crate::pipeline::error::ProjectError;
|
||||
use crate::pipeline::source_loader::LoadedSourceTable;
|
||||
use crate::interner::{Token, Interner};
|
||||
use crate::representations::tree::{Module, ModMember};
|
||||
use crate::pipeline::import_abs_path::import_abs_path;
|
||||
|
||||
use super::exported_ops::{ExportedOpsCache, OpsResult};
|
||||
|
||||
/// Collect all operators and names, exported or local, defined in this
|
||||
/// tree.
|
||||
fn tree_all_ops(
|
||||
module: &Module<impl Clone, impl Clone>,
|
||||
ops: &mut HashSet<Token<String>>
|
||||
) {
|
||||
ops.extend(module.items.keys().copied());
|
||||
for ent in module.items.values() {
|
||||
if let ModMember::Sub(m) = &ent.member {
|
||||
tree_all_ops(m.as_ref(), ops);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Collect all names imported in this file
|
||||
pub fn collect_ops_for(
|
||||
file: &[Token<String>],
|
||||
loaded: &LoadedSourceTable,
|
||||
ops_cache: &ExportedOpsCache,
|
||||
i: &Interner
|
||||
) -> OpsResult {
|
||||
let tree = &loaded[&i.i(file)].preparsed.0;
|
||||
let mut ret = HashSet::new();
|
||||
tree_all_ops(tree.as_ref(), &mut ret);
|
||||
tree.visit_all_imports(&mut |modpath, module, import| {
|
||||
if let Some(n) = import.name { ret.insert(n); } else {
|
||||
let path = import_abs_path(
|
||||
&file, modpath, module, &i.r(import.path)[..], i
|
||||
).expect("This error should have been caught during loading");
|
||||
ret.extend(ops_cache.find(&i.i(&path))?.iter().copied());
|
||||
}
|
||||
Ok::<_, Rc<dyn ProjectError>>(())
|
||||
})?;
|
||||
ret.drain_filter(|t| !is_op(i.r(*t)));
|
||||
Ok(Rc::new(ret))
|
||||
}
|
||||
93
src/pipeline/project_tree/const_tree.rs
Normal file
93
src/pipeline/project_tree/const_tree.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use std::{ops::Add, rc::Rc};
|
||||
|
||||
use hashbrown::HashMap;
|
||||
|
||||
use crate::representations::tree::{ModEntry, ModMember, Module};
|
||||
use crate::representations::Primitive;
|
||||
use crate::representations::location::Location;
|
||||
use crate::foreign::ExternFn;
|
||||
use crate::interner::{Token, Interner};
|
||||
use crate::ast::{Expr, Clause};
|
||||
use crate::utils::{Substack, pushed};
|
||||
|
||||
use super::{ProjectModule, ProjectExt, ProjectTree};
|
||||
|
||||
pub enum ConstTree {
|
||||
Const(Expr),
|
||||
Tree(HashMap<Token<String>, ConstTree>)
|
||||
}
|
||||
impl ConstTree {
|
||||
pub fn xfn(xfn: impl ExternFn + 'static) -> Self {
|
||||
Self::Const(Expr{
|
||||
location: Location::Unknown,
|
||||
value: Clause::P(Primitive::ExternFn(Box::new(xfn)))
|
||||
})
|
||||
}
|
||||
pub fn tree(
|
||||
arr: impl IntoIterator<Item = (Token<String>, Self)>
|
||||
) -> Self {
|
||||
Self::Tree(arr.into_iter().collect())
|
||||
}
|
||||
}
|
||||
impl Add for ConstTree {
|
||||
type Output = ConstTree;
|
||||
|
||||
fn add(self, rhs: ConstTree) -> Self::Output {
|
||||
if let (Self::Tree(t1), Self::Tree(mut t2)) = (self, rhs) {
|
||||
let mut product = HashMap::new();
|
||||
for (key, i1) in t1 {
|
||||
if let Some(i2) = t2.remove(&key) {
|
||||
product.insert(key, i1 + i2);
|
||||
} else {
|
||||
product.insert(key, i1);
|
||||
}
|
||||
}
|
||||
product.extend(t2.into_iter());
|
||||
Self::Tree(product)
|
||||
} else {
|
||||
panic!("cannot combine tree and value fields")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn from_const_tree_rec(
|
||||
path: Substack<Token<String>>,
|
||||
consts: HashMap<Token<String>, ConstTree>,
|
||||
file: &[Token<String>],
|
||||
i: &Interner,
|
||||
) -> ProjectModule {
|
||||
let mut items = HashMap::new();
|
||||
let path_v = path.iter().rev_vec_clone();
|
||||
for (name, item) in consts {
|
||||
items.insert(name, ModEntry{
|
||||
exported: true,
|
||||
member: match item {
|
||||
ConstTree::Const(c) => ModMember::Item(c),
|
||||
ConstTree::Tree(t) => ModMember::Sub(Rc::new(
|
||||
from_const_tree_rec(path.push(name), t, file, i)
|
||||
)),
|
||||
}
|
||||
});
|
||||
}
|
||||
let exports = items.keys()
|
||||
.map(|name| (*name, i.i(&pushed(&path_v, *name))))
|
||||
.collect();
|
||||
Module {
|
||||
items,
|
||||
imports: vec![],
|
||||
extra: ProjectExt {
|
||||
exports,
|
||||
file: Some(file.to_vec()),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_const_tree(
|
||||
consts: HashMap<Token<String>, ConstTree>,
|
||||
file: &[Token<String>],
|
||||
i: &Interner,
|
||||
) -> ProjectTree {
|
||||
let module = from_const_tree_rec(Substack::Bottom, consts, file, i);
|
||||
ProjectTree(Rc::new(module))
|
||||
}
|
||||
38
src/pipeline/project_tree/mod.rs
Normal file
38
src/pipeline/project_tree/mod.rs
Normal file
@@ -0,0 +1,38 @@
|
||||
/* FILE SEPARATION BOUNDARY
|
||||
|
||||
Collect all operators accessible in each file, parse the files with
|
||||
correct tokenization, resolve glob imports, convert expressions to
|
||||
refer to tokens with (local) absolute path, and connect them into a
|
||||
single tree.
|
||||
|
||||
The module checks for imports from missing modules (including submodules).
|
||||
All other errors must be checked later.
|
||||
|
||||
Injection strategy:
|
||||
Return all items of the given module in the injected tree for `injected`
|
||||
The output of this stage is a tree, which can simply be overlaid with
|
||||
the injected tree
|
||||
*/
|
||||
|
||||
mod collect_ops;
|
||||
mod parse_file;
|
||||
mod build_tree;
|
||||
mod normalize_imports;
|
||||
mod prefix;
|
||||
mod tree;
|
||||
mod const_tree;
|
||||
mod add_prelude;
|
||||
|
||||
pub use collect_ops::InjectedOperatorsFn;
|
||||
|
||||
pub use const_tree::{
|
||||
ConstTree, from_const_tree,
|
||||
};
|
||||
|
||||
pub use tree::{
|
||||
ProjectExt, ProjectModule, ProjectTree, collect_consts, collect_rules
|
||||
};
|
||||
|
||||
pub use build_tree::{
|
||||
build_tree, split_path
|
||||
};
|
||||
84
src/pipeline/project_tree/normalize_imports.rs
Normal file
84
src/pipeline/project_tree/normalize_imports.rs
Normal file
@@ -0,0 +1,84 @@
|
||||
use crate::representations::tree::{Module, ModMember};
|
||||
use crate::representations::sourcefile::{Member, FileEntry, Import};
|
||||
use crate::utils::BoxedIter;
|
||||
use crate::utils::{Substack, iter::box_once};
|
||||
use crate::interner::{Interner, Token};
|
||||
use crate::pipeline::import_abs_path::import_abs_path;
|
||||
|
||||
use super::collect_ops::ExportedOpsCache;
|
||||
|
||||
fn member_rec(
|
||||
// level
|
||||
mod_stack: Substack<Token<String>>,
|
||||
preparsed: &Module<impl Clone, impl Clone>,
|
||||
// object
|
||||
member: Member,
|
||||
// context
|
||||
path: &[Token<String>],
|
||||
ops_cache: &ExportedOpsCache,
|
||||
i: &Interner
|
||||
) -> Member {
|
||||
match member {
|
||||
Member::Namespace(name, body) => {
|
||||
let prepmember = &preparsed.items[&name].member;
|
||||
let subprep = if let ModMember::Sub(m) = prepmember {m.clone()}
|
||||
else {unreachable!("This name must point to a namespace")};
|
||||
let new_body = entv_rec(
|
||||
mod_stack.push(name),
|
||||
subprep.as_ref(),
|
||||
body,
|
||||
path, ops_cache, i
|
||||
);
|
||||
Member::Namespace(name, new_body)
|
||||
},
|
||||
any => any
|
||||
}
|
||||
}
|
||||
|
||||
fn entv_rec(
|
||||
// level
|
||||
mod_stack: Substack<Token<String>>,
|
||||
preparsed: &Module<impl Clone, impl Clone>,
|
||||
// object
|
||||
data: Vec<FileEntry>,
|
||||
// context
|
||||
mod_path: &[Token<String>],
|
||||
ops_cache: &ExportedOpsCache,
|
||||
i: &Interner
|
||||
) -> Vec<FileEntry> {
|
||||
data.into_iter()
|
||||
.map(|ent| match ent {
|
||||
FileEntry::Import(imps) => FileEntry::Import(imps.into_iter()
|
||||
.flat_map(|import| if let Import{ name: None, path } = import {
|
||||
let p = import_abs_path(
|
||||
mod_path, mod_stack, preparsed, &i.r(path)[..], i
|
||||
).expect("Should have emerged in preparsing");
|
||||
let names = ops_cache.find(&i.i(&p))
|
||||
.expect("Should have emerged in second parsing");
|
||||
let imports = names.iter()
|
||||
.map(move |&n| Import{ name: Some(n), path })
|
||||
.collect::<Vec<_>>();
|
||||
Box::new(imports.into_iter()) as BoxedIter<Import>
|
||||
} else {box_once(import)})
|
||||
.collect()
|
||||
),
|
||||
FileEntry::Exported(mem) => FileEntry::Exported(member_rec(
|
||||
mod_stack, preparsed, mem, mod_path, ops_cache, i
|
||||
)),
|
||||
FileEntry::Internal(mem) => FileEntry::Internal(member_rec(
|
||||
mod_stack, preparsed, mem, mod_path, ops_cache, i
|
||||
)),
|
||||
any => any
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn normalize_imports(
|
||||
preparsed: &Module<impl Clone, impl Clone>,
|
||||
data: Vec<FileEntry>,
|
||||
path: &[Token<String>],
|
||||
ops_cache: &ExportedOpsCache,
|
||||
i: &Interner
|
||||
) -> Vec<FileEntry> {
|
||||
entv_rec(Substack::Bottom, preparsed, data, path, ops_cache, i)
|
||||
}
|
||||
44
src/pipeline/project_tree/parse_file.rs
Normal file
44
src/pipeline/project_tree/parse_file.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::parse;
|
||||
use crate::pipeline::error::ProjectError;
|
||||
use crate::representations::sourcefile::{FileEntry, normalize_namespaces};
|
||||
use crate::pipeline::source_loader::LoadedSourceTable;
|
||||
use crate::interner::{Token, Interner};
|
||||
|
||||
use super::add_prelude::add_prelude;
|
||||
use super::collect_ops::{ExportedOpsCache, collect_ops_for};
|
||||
use super::normalize_imports::normalize_imports;
|
||||
use super::prefix::prefix;
|
||||
|
||||
pub fn parse_file(
|
||||
path: Token<Vec<Token<String>>>,
|
||||
loaded: &LoadedSourceTable,
|
||||
ops_cache: &ExportedOpsCache,
|
||||
i: &Interner,
|
||||
prelude: &[FileEntry],
|
||||
) -> Result<Vec<FileEntry>, Rc<dyn ProjectError>> {
|
||||
let ld = &loaded[&path];
|
||||
// let ops_cache = collect_ops::mk_cache(loaded, i);
|
||||
let ops = collect_ops_for(&i.r(path)[..], loaded, ops_cache, i)?;
|
||||
let ops_vec = ops.iter()
|
||||
.map(|t| i.r(*t))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let ctx = parse::ParsingContext{
|
||||
interner: i,
|
||||
ops: &ops_vec,
|
||||
file: Rc::new(i.extern_vec(path))
|
||||
};
|
||||
let entries = parse::parse(ld.text.as_str(), ctx)
|
||||
.expect("This error should have been caught during loading");
|
||||
let with_prelude = add_prelude(entries, &i.r(path)[..], prelude);
|
||||
let impnormalized = normalize_imports(
|
||||
&ld.preparsed.0, with_prelude, &i.r(path)[..], ops_cache, i
|
||||
);
|
||||
let nsnormalized = normalize_namespaces(
|
||||
Box::new(impnormalized.into_iter()), i
|
||||
).expect("This error should have been caught during preparsing");
|
||||
let prefixed = prefix(nsnormalized, &i.r(path)[..], ops_cache, i);
|
||||
Ok(prefixed)
|
||||
}
|
||||
82
src/pipeline/project_tree/prefix.rs
Normal file
82
src/pipeline/project_tree/prefix.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::ast::{Constant, Rule};
|
||||
use crate::interner::{Token, Interner};
|
||||
use crate::utils::Substack;
|
||||
use crate::representations::sourcefile::{Member, FileEntry};
|
||||
|
||||
use super::collect_ops::ExportedOpsCache;
|
||||
|
||||
fn member_rec(
|
||||
// level
|
||||
mod_stack: Substack<Token<String>>,
|
||||
// object
|
||||
data: Member,
|
||||
// context
|
||||
path: &[Token<String>],
|
||||
ops_cache: &ExportedOpsCache,
|
||||
i: &Interner
|
||||
) -> Member {
|
||||
// let except = |op| imported.contains(&op);
|
||||
let except = |_| false;
|
||||
let prefix_v = path.iter().copied()
|
||||
.chain(mod_stack.iter().rev_vec_clone().into_iter())
|
||||
.collect::<Vec<_>>();
|
||||
let prefix = i.i(&prefix_v);
|
||||
match data {
|
||||
Member::Namespace(name, body) => {
|
||||
let new_body = entv_rec(
|
||||
mod_stack.push(name),
|
||||
body,
|
||||
path, ops_cache, i
|
||||
);
|
||||
Member::Namespace(name, new_body)
|
||||
}
|
||||
Member::Constant(constant) => Member::Constant(Constant{
|
||||
name: constant.name,
|
||||
value: constant.value.prefix(prefix, i, &except)
|
||||
}),
|
||||
Member::Rule(rule) => Member::Rule(Rule{
|
||||
prio: rule.prio,
|
||||
source: Rc::new(rule.source.iter()
|
||||
.map(|e| e.prefix(prefix, i, &except))
|
||||
.collect()
|
||||
),
|
||||
target: Rc::new(rule.target.iter()
|
||||
.map(|e| e.prefix(prefix, i, &except))
|
||||
.collect()
|
||||
),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn entv_rec(
|
||||
// level
|
||||
mod_stack: Substack<Token<String>>,
|
||||
// object
|
||||
data: Vec<FileEntry>,
|
||||
// context
|
||||
path: &[Token<String>],
|
||||
ops_cache: &ExportedOpsCache,
|
||||
i: &Interner
|
||||
) -> Vec<FileEntry> {
|
||||
data.into_iter().map(|fe| match fe {
|
||||
FileEntry::Exported(mem) => FileEntry::Exported(member_rec(
|
||||
mod_stack, mem, path, ops_cache, i
|
||||
)),
|
||||
FileEntry::Internal(mem) => FileEntry::Internal(member_rec(
|
||||
mod_stack, mem, path, ops_cache, i
|
||||
)),
|
||||
// XXX should [FileEntry::Export] be prefixed?
|
||||
any => any
|
||||
}).collect()
|
||||
}
|
||||
|
||||
pub fn prefix(
|
||||
data: Vec<FileEntry>,
|
||||
path: &[Token<String>],
|
||||
ops_cache: &ExportedOpsCache,
|
||||
i: &Interner
|
||||
) -> Vec<FileEntry> {
|
||||
entv_rec(Substack::Bottom, data, path, ops_cache, i)
|
||||
}
|
||||
87
src/pipeline/project_tree/tree.rs
Normal file
87
src/pipeline/project_tree/tree.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
use std::{ops::Add, rc::Rc};
|
||||
|
||||
use hashbrown::HashMap;
|
||||
|
||||
use crate::representations::tree::{Module, ModMember};
|
||||
use crate::ast::{Rule, Expr};
|
||||
use crate::interner::{Token, Interner};
|
||||
use crate::utils::Substack;
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct ProjectExt{
|
||||
/// Pairs each foreign token to the module it was imported from
|
||||
pub imports_from: HashMap<Token<String>, Token<Vec<Token<String>>>>,
|
||||
/// Pairs each exported token to its original full name.
|
||||
pub exports: HashMap<Token<String>, Token<Vec<Token<String>>>>,
|
||||
/// All rules defined in this module, exported or not
|
||||
pub rules: Vec<Rule>,
|
||||
/// Filename, if known, for error reporting
|
||||
pub file: Option<Vec<Token<String>>>
|
||||
}
|
||||
|
||||
impl Add for ProjectExt {
|
||||
type Output = Self;
|
||||
|
||||
fn add(mut self, rhs: Self) -> Self::Output {
|
||||
let ProjectExt{ imports_from, exports, rules, file } = rhs;
|
||||
self.imports_from.extend(imports_from.into_iter());
|
||||
self.exports.extend(exports.into_iter());
|
||||
self.rules.extend(rules.into_iter());
|
||||
if file.is_some() { self.file = file }
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub type ProjectModule = Module<Expr, ProjectExt>;
|
||||
pub struct ProjectTree(pub Rc<ProjectModule>);
|
||||
|
||||
fn collect_rules_rec(bag: &mut Vec<Rule>, module: &ProjectModule) {
|
||||
bag.extend(module.extra.rules.iter().cloned());
|
||||
for item in module.items.values() {
|
||||
if let ModMember::Sub(module) = &item.member {
|
||||
collect_rules_rec(bag, module.as_ref());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn collect_rules(project: &ProjectTree) -> Vec<Rule> {
|
||||
let mut rules = Vec::new();
|
||||
collect_rules_rec(&mut rules, project.0.as_ref());
|
||||
rules
|
||||
}
|
||||
|
||||
fn collect_consts_rec(
|
||||
path: Substack<Token<String>>,
|
||||
bag: &mut HashMap<Token<Vec<Token<String>>>, Expr>,
|
||||
module: &ProjectModule,
|
||||
i: &Interner
|
||||
) {
|
||||
for (key, entry) in module.items.iter() {
|
||||
match &entry.member {
|
||||
ModMember::Item(expr) => {
|
||||
let mut name = path.iter().rev_vec_clone();
|
||||
name.push(*key);
|
||||
bag.insert(i.i(&name), expr.clone());
|
||||
}
|
||||
ModMember::Sub(module) => {
|
||||
collect_consts_rec(
|
||||
path.push(*key),
|
||||
bag, module, i
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn collect_consts(project: &ProjectTree, i: &Interner)
|
||||
-> HashMap<Token<Vec<Token<String>>>, Expr>
|
||||
{
|
||||
let mut consts = HashMap::new();
|
||||
collect_consts_rec(
|
||||
Substack::Bottom,
|
||||
&mut consts,
|
||||
project.0.as_ref(),
|
||||
i
|
||||
);
|
||||
consts
|
||||
}
|
||||
82
src/pipeline/source_loader/load_source.rs
Normal file
82
src/pipeline/source_loader/load_source.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use std::iter;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::pipeline::error::ProjectError;
|
||||
use crate::pipeline::import_abs_path::import_abs_path;
|
||||
use crate::pipeline::split_name::split_name;
|
||||
use crate::interner::{Token, Interner};
|
||||
|
||||
use crate::pipeline::file_loader::{Loaded, load_text, IOResult};
|
||||
use super::loaded_source::{LoadedSourceTable, LoadedSource};
|
||||
use super::preparse::preparse;
|
||||
|
||||
/// Load the source at the given path or all within if it's a collection,
|
||||
/// and all sources imported from these.
|
||||
fn load_abs_path_rec(
|
||||
abs_path: Token<Vec<Token<String>>>,
|
||||
table: &mut LoadedSourceTable,
|
||||
i: &Interner,
|
||||
get_source: &impl Fn(Token<Vec<Token<String>>>) -> IOResult,
|
||||
is_injected: &impl Fn(&[Token<String>]) -> bool
|
||||
) -> Result<(), Rc<dyn ProjectError>> {
|
||||
let abs_pathv = i.r(abs_path);
|
||||
// short-circuit if this import is defined externally or already known
|
||||
if is_injected(&abs_pathv) | table.contains_key(&abs_path) {
|
||||
return Ok(())
|
||||
}
|
||||
// try splitting the path to file, swallowing any IO errors
|
||||
let is_file = |p| (get_source)(p).map(|l| l.is_code()).unwrap_or(false);
|
||||
let name_split = split_name(&abs_pathv, &|p| is_file(i.i(p)));
|
||||
let filename = if let Some((f, _)) = name_split {f} else {
|
||||
// If the path could not be split to file, load it as directory
|
||||
let coll = if let Loaded::Collection(c) = (get_source)(abs_path)? {c}
|
||||
// ^^ raise any IO error that was previously swallowed
|
||||
else {panic!("split_name returned None but the path is a file")};
|
||||
// recurse on all files and folders within
|
||||
for item in coll.iter() {
|
||||
let abs_subpath = abs_pathv.iter()
|
||||
.copied()
|
||||
.chain(iter::once(i.i(item)))
|
||||
.collect::<Vec<_>>();
|
||||
load_abs_path_rec(
|
||||
i.i(&abs_subpath), table, i, get_source, is_injected
|
||||
)?
|
||||
}
|
||||
return Ok(());
|
||||
};
|
||||
// otherwise load, preparse and record this file
|
||||
let text = load_text(i.i(filename), &get_source, i)?;
|
||||
let preparsed = preparse(
|
||||
filename.iter().map(|t| i.r(*t)).cloned().collect(),
|
||||
text.as_str(), i
|
||||
)?;
|
||||
table.insert(abs_path, LoadedSource{ text, preparsed: preparsed.clone() });
|
||||
// recurse on all imported modules
|
||||
preparsed.0.visit_all_imports(&mut |modpath, module, import| {
|
||||
let abs_pathv = import_abs_path(
|
||||
&filename, modpath,
|
||||
module, &import.nonglob_path(i), i
|
||||
)?;
|
||||
// recurse on imported module
|
||||
load_abs_path_rec(i.i(&abs_pathv), table, i, get_source, is_injected)
|
||||
})
|
||||
}
|
||||
|
||||
/// Load and preparse all files reachable from the load targets via
|
||||
/// imports that aren't injected.
|
||||
pub fn load_source(
|
||||
targets: &[Token<Vec<Token<String>>>],
|
||||
i: &Interner,
|
||||
get_source: &impl Fn(Token<Vec<Token<String>>>) -> IOResult,
|
||||
is_injected: &impl Fn(&[Token<String>]) -> bool,
|
||||
) -> Result<LoadedSourceTable, Rc<dyn ProjectError>> {
|
||||
let mut table = LoadedSourceTable::new();
|
||||
for target in targets {
|
||||
load_abs_path_rec(
|
||||
*target,
|
||||
&mut table,
|
||||
i, get_source, is_injected
|
||||
)?
|
||||
}
|
||||
Ok(table)
|
||||
}
|
||||
13
src/pipeline/source_loader/loaded_source.rs
Normal file
13
src/pipeline/source_loader/loaded_source.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
use std::{rc::Rc, collections::HashMap};
|
||||
|
||||
use crate::interner::Token;
|
||||
|
||||
use super::preparse::Preparsed;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LoadedSource {
|
||||
pub text: Rc<String>,
|
||||
pub preparsed: Preparsed,
|
||||
}
|
||||
|
||||
pub type LoadedSourceTable = HashMap<Token<Vec<Token<String>>>, LoadedSource>;
|
||||
25
src/pipeline/source_loader/mod.rs
Normal file
25
src/pipeline/source_loader/mod.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
/* PULL LOGISTICS BOUNDARY
|
||||
|
||||
Specifying exactly what this module should be doing was an unexpectedly
|
||||
hard challenge. It is intended to encapsulate all pull logistics, but
|
||||
this definition is apparently prone to scope creep.
|
||||
|
||||
Load files, preparse them to obtain a list of imports, follow these.
|
||||
Preparsing also returns the module tree and list of exported synbols
|
||||
for free, which is needed later so the output of preparsing is also
|
||||
attached to the module output.
|
||||
|
||||
The module checks for IO errors, syntax errors, malformed imports and
|
||||
imports from missing files. All other errors must be checked later.
|
||||
|
||||
Injection strategy:
|
||||
see whether names are valid in the injected tree for is_injected
|
||||
*/
|
||||
|
||||
mod load_source;
|
||||
mod loaded_source;
|
||||
mod preparse;
|
||||
|
||||
pub use loaded_source::{LoadedSource, LoadedSourceTable};
|
||||
pub use load_source::load_source;
|
||||
pub use preparse::Preparsed;
|
||||
102
src/pipeline/source_loader/preparse.rs
Normal file
102
src/pipeline/source_loader/preparse.rs
Normal file
@@ -0,0 +1,102 @@
|
||||
use hashbrown::HashMap;
|
||||
use std::hash::Hash;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::ast::Constant;
|
||||
use crate::pipeline::error::{ProjectError, ParseErrorWithPath, VisibilityMismatch};
|
||||
use crate::representations::sourcefile::{normalize_namespaces, Member};
|
||||
use crate::representations::tree::{ModEntry, ModMember};
|
||||
use crate::interner::Interner;
|
||||
use crate::parse::{self, ParsingContext};
|
||||
use crate::representations::{sourcefile::{FileEntry, imports}, tree::Module};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Preparsed(pub Rc<Module<(), ()>>);
|
||||
|
||||
/// Add an internal flat name if it does not exist yet
|
||||
fn add_intern<K: Eq + Hash>(
|
||||
map: &mut HashMap<K, ModEntry<(), ()>>, k: K
|
||||
) {
|
||||
let _ = map.try_insert(k, ModEntry {
|
||||
exported: false,
|
||||
member: ModMember::Item(()),
|
||||
});
|
||||
}
|
||||
|
||||
/// Add an exported flat name or export any existing entry
|
||||
fn add_export<K: Eq + Hash>(
|
||||
map: &mut HashMap<K, ModEntry<(), ()>>, k: K
|
||||
) {
|
||||
if let Some(entry) = map.get_mut(&k) {
|
||||
entry.exported = true
|
||||
} else {
|
||||
map.insert(k, ModEntry {
|
||||
exported: true,
|
||||
member: ModMember::Item(()),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert source lines into a module
|
||||
fn to_module(src: &[FileEntry], i: &Interner) -> Rc<Module<(), ()>>
|
||||
{
|
||||
let imports = imports(src.iter()).cloned().collect::<Vec<_>>();
|
||||
let mut items = src.iter().filter_map(|ent| match ent {
|
||||
FileEntry::Internal(Member::Namespace(name, data)) => {
|
||||
let member = ModMember::Sub(to_module(data, i));
|
||||
let entry = ModEntry{ exported: false, member };
|
||||
Some((*name, entry))
|
||||
}
|
||||
FileEntry::Exported(Member::Namespace(name, data)) => {
|
||||
let member = ModMember::Sub(to_module(data, i));
|
||||
let entry = ModEntry{ exported: true, member };
|
||||
Some((*name, entry))
|
||||
}
|
||||
_ => None
|
||||
}).collect::<HashMap<_, _>>();
|
||||
for file_entry in src { match file_entry {
|
||||
FileEntry::Comment(_) | FileEntry::Import(_)
|
||||
| FileEntry::Internal(Member::Namespace(..))
|
||||
| FileEntry::Exported(Member::Namespace(..)) => (),
|
||||
FileEntry::Export(tokv) => for tok in tokv {
|
||||
add_export(&mut items, *tok)
|
||||
}
|
||||
FileEntry::Internal(Member::Constant(Constant{ name, .. }))
|
||||
=> add_intern(&mut items, *name),
|
||||
FileEntry::Exported(Member::Constant(Constant{ name, .. }))
|
||||
=> add_export(&mut items, *name),
|
||||
FileEntry::Internal(Member::Rule(rule)) => {
|
||||
let names = rule.collect_single_names(i);
|
||||
for name in names {
|
||||
add_intern(&mut items, name)
|
||||
}
|
||||
}
|
||||
FileEntry::Exported(Member::Rule(rule)) => {
|
||||
let names = rule.collect_single_names(i);
|
||||
for name in names {
|
||||
add_export(&mut items, name)
|
||||
}
|
||||
}
|
||||
}}
|
||||
Rc::new(Module { imports, items, extra: () })
|
||||
}
|
||||
|
||||
/// Preparse the module. At this stage, only the imports and
|
||||
/// names defined by the module can be parsed
|
||||
pub fn preparse(file: Vec<String>, source: &str, i: &Interner)
|
||||
-> Result<Preparsed, Rc<dyn ProjectError>> {
|
||||
// Parse with no operators
|
||||
let ctx = ParsingContext::<&str>::new(&[], i, Rc::new(file.clone()));
|
||||
let entries = parse::parse(source, ctx)
|
||||
.map_err(|error| ParseErrorWithPath{
|
||||
full_source: source.to_string(),
|
||||
error,
|
||||
path: file.clone()
|
||||
}.rc())?;
|
||||
let normalized = normalize_namespaces(Box::new(entries.into_iter()), i)
|
||||
.map_err(|ns| VisibilityMismatch{
|
||||
namespace: ns.into_iter().map(|t| i.r(t)).cloned().collect(),
|
||||
file: Rc::new(file.clone())
|
||||
}.rc())?;
|
||||
Ok(Preparsed(to_module(&normalized, i)))
|
||||
}
|
||||
14
src/pipeline/split_name.rs
Normal file
14
src/pipeline/split_name.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
use crate::interner::Token;
|
||||
|
||||
pub fn split_name<'a>(
|
||||
path: &'a [Token<String>],
|
||||
is_valid: &impl Fn(&[Token<String>]) -> bool
|
||||
) -> Option<(&'a [Token<String>], &'a [Token<String>])> {
|
||||
for split in (0..=path.len()).rev() {
|
||||
let (filename, subpath) = path.split_at(split);
|
||||
if is_valid(filename) {
|
||||
return Some((filename, subpath))
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
Reference in New Issue
Block a user