Rule execution now runs, no tests tho

This commit is contained in:
2022-08-19 12:55:02 +02:00
parent 329dea72b7
commit 891d78c112
30 changed files with 925 additions and 560 deletions

View File

@@ -1,20 +1,22 @@
use std::iter;
use chumsky::{Parser, prelude::*};
use crate::{enum_parser, utils::BoxedIter};
use itertools::Itertools;
use mappable_rc::Mrc;
use crate::utils::iter::{box_once, box_flatten, into_boxed_iter, BoxedIterIter};
use crate::utils::{to_mrc_slice, mrc_derive};
use crate::{enum_parser, box_chain};
use super::lexer::Lexeme;
#[derive(Debug, Clone)]
pub struct Import {
pub path: Vec<String>,
pub path: Mrc<[String]>,
pub name: Option<String>
}
/// initialize a BoxedIter<BoxedIter<String>> with a single element.
fn init_table(name: String) -> BoxedIter<'static, BoxedIter<'static, String>> {
fn init_table(name: String) -> BoxedIterIter<'static, String> {
// I'm not at all confident that this is a good approach.
Box::new(iter::once(Box::new(iter::once(name)) as BoxedIter<String>))
box_once(box_once(name))
}
/// Parse an import command
@@ -24,7 +26,7 @@ fn init_table(name: String) -> BoxedIter<'static, BoxedIter<'static, String>> {
/// to go wild. There's a blacklist in [name]
pub fn import_parser() -> impl Parser<Lexeme, Vec<Import>, Error = Simple<Lexeme>> {
// TODO: this algorithm isn't cache friendly, copies a lot and is generally pretty bad.
recursive(|expr: Recursive<Lexeme, BoxedIter<BoxedIter<String>>, Simple<Lexeme>>| {
recursive(|expr: Recursive<Lexeme, BoxedIterIter<String>, Simple<Lexeme>>| {
enum_parser!(Lexeme::Name)
.separated_by(just(Lexeme::NS))
.then(
@@ -34,7 +36,7 @@ pub fn import_parser() -> impl Parser<Lexeme, Vec<Import>, Error = Simple<Lexeme
expr.clone()
.separated_by(just(Lexeme::name(",")))
.delimited_by(just(Lexeme::LP('(')), just(Lexeme::RP('(')))
.map(|v| Box::new(v.into_iter().flatten()) as BoxedIter<BoxedIter<String>>)
.map(|v| box_flatten(v.into_iter()))
.labelled("import group"),
// Each expr returns a list of imports, flatten those into a common list
just(Lexeme::name("*")).map(|_| init_table("*".to_string()))
@@ -44,22 +46,23 @@ pub fn import_parser() -> impl Parser<Lexeme, Vec<Import>, Error = Simple<Lexeme
))
).or_not()
)
.map(|(name, opt_post): (Vec<String>, Option<BoxedIter<BoxedIter<String>>>)| -> BoxedIter<BoxedIter<String>> {
.map(|(name, opt_post): (Vec<String>, Option<BoxedIterIter<String>>)| -> BoxedIterIter<String> {
if let Some(post) = opt_post {
Box::new(post.map(move |el| {
Box::new(name.clone().into_iter().chain(el)) as BoxedIter<String>
})) as BoxedIter<BoxedIter<String>>
box_chain!(name.clone().into_iter(), el)
}))
} else {
Box::new(iter::once(Box::new(name.into_iter()) as BoxedIter<String>))
box_once(into_boxed_iter(name))
}
})
}).map(|paths| {
paths.filter_map(|namespaces| {
let mut path: Vec<String> = namespaces.collect();
match path.pop()?.as_str() {
"*" => Some(Import { path, name: None }),
name => Some(Import { path, name: Some(name.to_owned()) })
let path = to_mrc_slice(namespaces.collect_vec());
let path_prefix = mrc_derive(&path, |p| &p[..p.len() - 1]);
match path.last()?.as_str() {
"*" => Some(Import { path: path_prefix, name: None }),
name => Some(Import { path: path_prefix, name: Some(name.to_owned()) })
}
}).collect()
}).labelled("import")
}
}