forked from Orchid/orchid
Rule execution now runs, no tests tho
This commit is contained in:
@@ -1,13 +1,16 @@
|
||||
use chumsky::{self, prelude::*, Parser};
|
||||
use crate::{enum_parser, expression::{Clause, Expr, Literal}};
|
||||
use mappable_rc::Mrc;
|
||||
use crate::enum_parser;
|
||||
use crate::expression::{Clause, Expr, Literal};
|
||||
use crate::utils::to_mrc_slice;
|
||||
|
||||
use super::{lexer::Lexeme};
|
||||
use super::lexer::Lexeme;
|
||||
|
||||
fn sexpr_parser<P>(
|
||||
expr: P
|
||||
) -> impl Parser<Lexeme, Clause, Error = Simple<Lexeme>> + Clone
|
||||
where P: Parser<Lexeme, Expr, Error = Simple<Lexeme>> + Clone {
|
||||
Lexeme::paren_parser(expr.repeated()).map(|(del, b)| Clause::S(del, b))
|
||||
Lexeme::paren_parser(expr.repeated()).map(|(del, b)| Clause::S(del, to_mrc_slice(b)))
|
||||
}
|
||||
|
||||
fn lambda_parser<P>(
|
||||
@@ -28,9 +31,9 @@ where P: Parser<Lexeme, Expr, Error = Simple<Lexeme>> + Clone {
|
||||
.then_ignore(just(Lexeme::name(".")))
|
||||
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
||||
.then(expr.repeated().at_least(1))
|
||||
.map(|((name, typ), mut body): ((String, Vec<Expr>), Vec<Expr>)| {
|
||||
.map(|((name, typ), body): ((String, Vec<Expr>), Vec<Expr>)| {
|
||||
// for ent in &mut body { ent.bind_parameter(&name) };
|
||||
Clause::Lambda(name, typ, body)
|
||||
Clause::Lambda(name, to_mrc_slice(typ), to_mrc_slice(body))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -51,13 +54,10 @@ where P: Parser<Lexeme, Expr, Error = Simple<Lexeme>> + Clone {
|
||||
.then_ignore(just(Lexeme::name(".")))
|
||||
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
||||
.then(expr.repeated().at_least(1))
|
||||
.try_map(|((name, typ), mut body), s| if name == None && typ.is_empty() {
|
||||
.try_map(|((name, typ), body), s| if name.is_none() && typ.is_empty() {
|
||||
Err(Simple::custom(s, "Auto without name or type has no effect"))
|
||||
} else {
|
||||
// if let Some(n) = &name {
|
||||
// for ent in &mut body { ent.bind_parameter(n) }
|
||||
// }
|
||||
Ok(Clause::Auto(name, typ, body))
|
||||
Ok(Clause::Auto(name, to_mrc_slice(typ), to_mrc_slice(body)))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -71,8 +71,8 @@ fn name_parser() -> impl Parser<Lexeme, Vec<String>, Error = Simple<Lexeme>> + C
|
||||
|
||||
fn placeholder_parser() -> impl Parser<Lexeme, String, Error = Simple<Lexeme>> + Clone {
|
||||
enum_parser!(Lexeme::Name).try_map(|name, span| {
|
||||
name.strip_prefix("$").map(&str::to_string)
|
||||
.ok_or(Simple::custom(span, "Not a placeholder"))
|
||||
name.strip_prefix('$').map(&str::to_string)
|
||||
.ok_or_else(|| Simple::custom(span, "Not a placeholder"))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -83,18 +83,22 @@ pub fn xpr_parser() -> impl Parser<Lexeme, Expr, Error = Simple<Lexeme>> {
|
||||
enum_parser!(Lexeme::Comment).repeated()
|
||||
.ignore_then(choice((
|
||||
enum_parser!(Lexeme >> Literal; Int, Num, Char, Str).map(Clause::Literal),
|
||||
placeholder_parser().map(|n| Clause::Placeh(n, None)),
|
||||
just(Lexeme::name("..."))
|
||||
.ignore_then(placeholder_parser())
|
||||
placeholder_parser().map(|key| Clause::Placeh{key, vec: None}),
|
||||
just(Lexeme::name("...")).to(true)
|
||||
.or(just(Lexeme::name("..")).to(false))
|
||||
.then(placeholder_parser())
|
||||
.then(
|
||||
just(Lexeme::Type)
|
||||
.ignore_then(enum_parser!(Lexeme::Int))
|
||||
.or_not().map(Option::unwrap_or_default)
|
||||
)
|
||||
.map(|(name, prio)| Clause::Placeh(name, Some(prio.try_into().unwrap()))),
|
||||
.map(|((nonzero, key), prio)| Clause::Placeh{key, vec: Some((
|
||||
prio.try_into().unwrap(),
|
||||
nonzero
|
||||
))}),
|
||||
name_parser().map(|qualified| Clause::Name {
|
||||
local: if qualified.len() == 1 {Some(qualified[0].clone())} else {None},
|
||||
qualified
|
||||
qualified: to_mrc_slice(qualified)
|
||||
}),
|
||||
sexpr_parser(expr.clone()),
|
||||
lambda_parser(expr.clone()),
|
||||
@@ -104,6 +108,6 @@ pub fn xpr_parser() -> impl Parser<Lexeme, Expr, Error = Simple<Lexeme>> {
|
||||
just(Lexeme::Type)
|
||||
.ignore_then(expr.clone()).or_not()
|
||||
)
|
||||
.map(|(val, typ)| Expr(val, typ.map(Box::new)))
|
||||
.map(|(val, typ)| Expr(val, typ.map(Mrc::new)))
|
||||
}).labelled("Expression")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,20 +1,22 @@
|
||||
use std::iter;
|
||||
|
||||
use chumsky::{Parser, prelude::*};
|
||||
use crate::{enum_parser, utils::BoxedIter};
|
||||
use itertools::Itertools;
|
||||
use mappable_rc::Mrc;
|
||||
use crate::utils::iter::{box_once, box_flatten, into_boxed_iter, BoxedIterIter};
|
||||
use crate::utils::{to_mrc_slice, mrc_derive};
|
||||
use crate::{enum_parser, box_chain};
|
||||
|
||||
use super::lexer::Lexeme;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Import {
|
||||
pub path: Vec<String>,
|
||||
pub path: Mrc<[String]>,
|
||||
pub name: Option<String>
|
||||
}
|
||||
|
||||
/// initialize a BoxedIter<BoxedIter<String>> with a single element.
|
||||
fn init_table(name: String) -> BoxedIter<'static, BoxedIter<'static, String>> {
|
||||
fn init_table(name: String) -> BoxedIterIter<'static, String> {
|
||||
// I'm not at all confident that this is a good approach.
|
||||
Box::new(iter::once(Box::new(iter::once(name)) as BoxedIter<String>))
|
||||
box_once(box_once(name))
|
||||
}
|
||||
|
||||
/// Parse an import command
|
||||
@@ -24,7 +26,7 @@ fn init_table(name: String) -> BoxedIter<'static, BoxedIter<'static, String>> {
|
||||
/// to go wild. There's a blacklist in [name]
|
||||
pub fn import_parser() -> impl Parser<Lexeme, Vec<Import>, Error = Simple<Lexeme>> {
|
||||
// TODO: this algorithm isn't cache friendly, copies a lot and is generally pretty bad.
|
||||
recursive(|expr: Recursive<Lexeme, BoxedIter<BoxedIter<String>>, Simple<Lexeme>>| {
|
||||
recursive(|expr: Recursive<Lexeme, BoxedIterIter<String>, Simple<Lexeme>>| {
|
||||
enum_parser!(Lexeme::Name)
|
||||
.separated_by(just(Lexeme::NS))
|
||||
.then(
|
||||
@@ -34,7 +36,7 @@ pub fn import_parser() -> impl Parser<Lexeme, Vec<Import>, Error = Simple<Lexeme
|
||||
expr.clone()
|
||||
.separated_by(just(Lexeme::name(",")))
|
||||
.delimited_by(just(Lexeme::LP('(')), just(Lexeme::RP('(')))
|
||||
.map(|v| Box::new(v.into_iter().flatten()) as BoxedIter<BoxedIter<String>>)
|
||||
.map(|v| box_flatten(v.into_iter()))
|
||||
.labelled("import group"),
|
||||
// Each expr returns a list of imports, flatten those into a common list
|
||||
just(Lexeme::name("*")).map(|_| init_table("*".to_string()))
|
||||
@@ -44,22 +46,23 @@ pub fn import_parser() -> impl Parser<Lexeme, Vec<Import>, Error = Simple<Lexeme
|
||||
))
|
||||
).or_not()
|
||||
)
|
||||
.map(|(name, opt_post): (Vec<String>, Option<BoxedIter<BoxedIter<String>>>)| -> BoxedIter<BoxedIter<String>> {
|
||||
.map(|(name, opt_post): (Vec<String>, Option<BoxedIterIter<String>>)| -> BoxedIterIter<String> {
|
||||
if let Some(post) = opt_post {
|
||||
Box::new(post.map(move |el| {
|
||||
Box::new(name.clone().into_iter().chain(el)) as BoxedIter<String>
|
||||
})) as BoxedIter<BoxedIter<String>>
|
||||
box_chain!(name.clone().into_iter(), el)
|
||||
}))
|
||||
} else {
|
||||
Box::new(iter::once(Box::new(name.into_iter()) as BoxedIter<String>))
|
||||
box_once(into_boxed_iter(name))
|
||||
}
|
||||
})
|
||||
}).map(|paths| {
|
||||
paths.filter_map(|namespaces| {
|
||||
let mut path: Vec<String> = namespaces.collect();
|
||||
match path.pop()?.as_str() {
|
||||
"*" => Some(Import { path, name: None }),
|
||||
name => Some(Import { path, name: Some(name.to_owned()) })
|
||||
let path = to_mrc_slice(namespaces.collect_vec());
|
||||
let path_prefix = mrc_derive(&path, |p| &p[..p.len() - 1]);
|
||||
match path.last()?.as_str() {
|
||||
"*" => Some(Import { path: path_prefix, name: None }),
|
||||
name => Some(Import { path: path_prefix, name: Some(name.to_owned()) })
|
||||
}
|
||||
}).collect()
|
||||
}).labelled("import")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::{ops::Range, iter, fmt};
|
||||
use ordered_float::NotNan;
|
||||
use chumsky::{Parser, prelude::*};
|
||||
use std::fmt::Debug;
|
||||
use crate::utils::BoxedIter;
|
||||
use crate::{utils::{BoxedIter, iter::{box_once, box_flatten}}, box_chain};
|
||||
|
||||
use super::{number, string, name, comment};
|
||||
|
||||
@@ -14,9 +14,10 @@ impl Debug for Entry {
|
||||
// f.debug_tuple("Entry").field(&self.0).field(&self.1).finish()
|
||||
}
|
||||
}
|
||||
impl Into<(Lexeme, Range<usize>)> for Entry {
|
||||
fn into(self) -> (Lexeme, Range<usize>) {
|
||||
(self.0, self.1)
|
||||
|
||||
impl From<Entry> for (Lexeme, Range<usize>) {
|
||||
fn from(ent: Entry) -> Self {
|
||||
(ent.0, ent.1)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -107,13 +108,13 @@ fn paren_parser<'a>(
|
||||
lp: char, rp: char
|
||||
) -> impl Parser<char, LexSubres<'a>, Error=Simple<char>> + 'a {
|
||||
expr.padded().repeated()
|
||||
.map(|x| Box::new(x.into_iter().flatten()) as LexSubres)
|
||||
.map(|x| box_flatten(x.into_iter()))
|
||||
.delimited_by(just(lp), just(rp)).map_with_span(move |b, s| {
|
||||
Box::new(
|
||||
iter::once(Entry(Lexeme::LP(lp), s.start..s.start+1))
|
||||
.chain(b)
|
||||
.chain(iter::once(Entry(Lexeme::RP(lp), s.end-1..s.end)))
|
||||
) as LexSubres
|
||||
box_chain!(
|
||||
iter::once(Entry(Lexeme::LP(lp), s.start..s.start+1)),
|
||||
b,
|
||||
iter::once(Entry(Lexeme::RP(lp), s.end-1..s.end))
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -127,7 +128,7 @@ where T: AsRef<str> + Clone {
|
||||
paren_parser(recurse.clone(), '[', ']'),
|
||||
paren_parser(recurse.clone(), '{', '}'),
|
||||
choice((
|
||||
just("==").padded().to(Lexeme::rule(0f64)),
|
||||
just(":=").padded().to(Lexeme::rule(0f64)),
|
||||
just("=").ignore_then(number::float_parser()).then_ignore(just("=>")).map(Lexeme::rule),
|
||||
comment::comment_parser().map(Lexeme::Comment),
|
||||
just("::").padded().to(Lexeme::NS),
|
||||
@@ -139,7 +140,7 @@ where T: AsRef<str> + Clone {
|
||||
string::char_parser().map(Lexeme::Char),
|
||||
string::str_parser().map(Lexeme::Str),
|
||||
name::name_parser(&all_ops).map(Lexeme::Name), // includes namespacing
|
||||
)).map_with_span(|lx, span| Box::new(iter::once(Entry(lx, span))) as LexSubres)
|
||||
)).map_with_span(|lx, span| box_once(Entry(lx, span)) as LexSubres)
|
||||
))
|
||||
}).separated_by(one_of("\t ").repeated())
|
||||
.flatten().collect()
|
||||
|
||||
@@ -3,11 +3,11 @@ use chumsky::{self, prelude::*, Parser};
|
||||
/// Matches any one of the passed operators, longest-first
|
||||
fn op_parser<'a, T: AsRef<str> + Clone>(ops: &[T]) -> BoxedParser<'a, char, String, Simple<char>> {
|
||||
let mut sorted_ops: Vec<String> = ops.iter().map(|t| t.as_ref().to_string()).collect();
|
||||
sorted_ops.sort_by(|a, b| b.len().cmp(&a.len()));
|
||||
sorted_ops.sort_by_key(|op| -(op.len() as i64));
|
||||
sorted_ops.into_iter()
|
||||
.map(|op| just(op).boxed())
|
||||
.reduce(|a, b| a.or(b).boxed())
|
||||
.unwrap_or(empty().map(|()| panic!("Empty isn't meant to match")).boxed())
|
||||
.unwrap_or_else(|| empty().map(|()| panic!("Empty isn't meant to match")).boxed())
|
||||
.labelled("operator").boxed()
|
||||
}
|
||||
|
||||
@@ -56,4 +56,4 @@ pub fn is_op<T: AsRef<str>>(s: T) -> bool {
|
||||
Some(x) => !x.is_alphanumeric(),
|
||||
None => false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ fn separated_digits_parser(base: u32) -> impl Parser<char, String, Error = Simpl
|
||||
just('_')
|
||||
.ignore_then(text::digits(base))
|
||||
.repeated()
|
||||
.map(|sv| sv.iter().map(|s| s.chars()).flatten().collect())
|
||||
.map(|sv| sv.iter().flat_map(|s| s.chars()).collect())
|
||||
}
|
||||
|
||||
/// parse a grouped uint
|
||||
@@ -31,7 +31,7 @@ fn uint_parser(base: u32) -> impl Parser<char, u64, Error = Simple<char>> {
|
||||
/// parse exponent notation, or return 0 as the default exponent.
|
||||
/// The exponent is always in decimal.
|
||||
fn pow_parser() -> impl Parser<char, i32, Error = Simple<char>> {
|
||||
return choice((
|
||||
choice((
|
||||
just('p')
|
||||
.ignore_then(text::int(10))
|
||||
.map(|s: String| s.parse().unwrap()),
|
||||
@@ -45,15 +45,15 @@ fn pow_parser() -> impl Parser<char, i32, Error = Simple<char>> {
|
||||
///
|
||||
/// TODO it panics if it finds a negative exponent
|
||||
fn nat2u(base: u64) -> impl Fn((u64, i32),) -> u64 {
|
||||
return move |(val, exp)| {
|
||||
move |(val, exp)| {
|
||||
if exp == 0 {val}
|
||||
else {val * base.checked_pow(exp.try_into().unwrap()).unwrap()}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// returns a mapper that converts a mantissa and an exponent into a float
|
||||
fn nat2f(base: u64) -> impl Fn((NotNan<f64>, i32),) -> NotNan<f64> {
|
||||
return move |(val, exp)| {
|
||||
move |(val, exp)| {
|
||||
if exp == 0 {val}
|
||||
else {val * (base as f64).powf(exp.try_into().unwrap())}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ use chumsky::{prelude::{Simple, end}, Stream, Parser};
|
||||
use itertools::Itertools;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::expression::Rule;
|
||||
use crate::{expression::Rule, parse::lexer::LexedText};
|
||||
|
||||
use super::{Lexeme, FileEntry, lexer, line_parser, LexerEntry};
|
||||
|
||||
@@ -24,14 +24,17 @@ where
|
||||
S: Into<Stream<'a, char, Range<usize>, Iter>> {
|
||||
let lexed = lexer(ops).parse(stream).map_err(ParseError::Lex)?;
|
||||
println!("Lexed:\n{:?}", lexed);
|
||||
let LexedText(token_batchv) = lexed;
|
||||
let parsr = line_parser().then_ignore(end());
|
||||
let (parsed_lines, errors_per_line) = lexed.0.into_iter().filter_map(|v| {
|
||||
let (parsed_lines, errors_per_line) = token_batchv.into_iter().filter(|v| {
|
||||
!v.is_empty()
|
||||
}).map(|v| {
|
||||
// Find the first invalid position for Stream::for_iter
|
||||
let LexerEntry(_, Range{ end, .. }) = v.last().unwrap().clone();
|
||||
// Stream expects tuples, lexer outputs structs
|
||||
let tuples = v.into_iter().map_into::<(Lexeme, Range<usize>)>();
|
||||
Some(parsr.parse(Stream::from_iter(end..end+1, tuples)))
|
||||
// ^^^^^^^^^^
|
||||
parsr.parse(Stream::from_iter(end..end+1, tuples))
|
||||
// ^^^^^^^^^^
|
||||
// I haven't the foggiest idea why this is needed, parsers are supposed to be lazy so the
|
||||
// end of input should make little difference
|
||||
}).map(|res| match res {
|
||||
@@ -39,13 +42,13 @@ where
|
||||
Err(e) => (None, e)
|
||||
}).unzip::<_, _, Vec<_>, Vec<_>>();
|
||||
let total_err = errors_per_line.into_iter()
|
||||
.map(Vec::into_iter).flatten()
|
||||
.flat_map(Vec::into_iter)
|
||||
.collect::<Vec<_>>();
|
||||
if total_err.len() > 0 { Err(ParseError::Ast(total_err)) }
|
||||
if !total_err.is_empty() { Err(ParseError::Ast(total_err)) }
|
||||
else { Ok(parsed_lines.into_iter().map(Option::unwrap).collect()) }
|
||||
}
|
||||
|
||||
pub fn reparse<'a, Iter, S, Op>(ops: &[Op], stream: S, pre: &Vec<FileEntry>)
|
||||
pub fn reparse<'a, Iter, S, Op>(ops: &[Op], stream: S, pre: &[FileEntry])
|
||||
-> Result<Vec<FileEntry>, ParseError>
|
||||
where
|
||||
Op: 'a + AsRef<str> + Clone,
|
||||
@@ -62,4 +65,4 @@ where
|
||||
}
|
||||
output
|
||||
}).collect())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use std::collections::HashSet;
|
||||
use std::iter;
|
||||
|
||||
use crate::enum_parser;
|
||||
use crate::{enum_parser, box_chain};
|
||||
use crate::expression::{Expr, Clause, Rule};
|
||||
use crate::utils::BoxedIter;
|
||||
use crate::utils::to_mrc_slice;
|
||||
use crate::utils::Stackframe;
|
||||
use crate::utils::iter::box_empty;
|
||||
|
||||
use super::expression::xpr_parser;
|
||||
use super::import;
|
||||
@@ -24,12 +24,12 @@ pub enum FileEntry {
|
||||
fn visit_all_names_clause_recur<'a, F>(
|
||||
clause: &'a Clause,
|
||||
binds: Stackframe<String>,
|
||||
mut cb: &mut F
|
||||
) where F: FnMut(&'a Vec<String>) {
|
||||
cb: &mut F
|
||||
) where F: FnMut(&'a [String]) {
|
||||
match clause {
|
||||
Clause::Auto(name, typ, body) => {
|
||||
for x in typ.iter() {
|
||||
visit_all_names_expr_recur(x, binds.clone(), &mut cb)
|
||||
visit_all_names_expr_recur(x, binds.clone(), cb)
|
||||
}
|
||||
let binds_dup = binds.clone();
|
||||
let new_binds = if let Some(n) = name {
|
||||
@@ -38,25 +38,23 @@ fn visit_all_names_clause_recur<'a, F>(
|
||||
binds
|
||||
};
|
||||
for x in body.iter() {
|
||||
visit_all_names_expr_recur(x, new_binds.clone(), &mut cb)
|
||||
visit_all_names_expr_recur(x, new_binds.clone(), cb)
|
||||
}
|
||||
},
|
||||
Clause::Lambda(name, typ, body) => {
|
||||
for x in typ.iter() {
|
||||
visit_all_names_expr_recur(x, binds.clone(), &mut cb)
|
||||
visit_all_names_expr_recur(x, binds.clone(), cb)
|
||||
}
|
||||
for x in body.iter() {
|
||||
visit_all_names_expr_recur(x, binds.push(name.to_owned()), &mut cb)
|
||||
visit_all_names_expr_recur(x, binds.push(name.to_owned()), cb)
|
||||
}
|
||||
},
|
||||
Clause::S(_, body) => for x in body.iter() {
|
||||
visit_all_names_expr_recur(x, binds.clone(), &mut cb)
|
||||
visit_all_names_expr_recur(x, binds.clone(), cb)
|
||||
},
|
||||
Clause::Name{ local, qualified } => {
|
||||
if let Some(name) = local {
|
||||
if binds.iter().all(|x| x != name) {
|
||||
cb(qualified)
|
||||
}
|
||||
Clause::Name{ local: Some(name), qualified } => {
|
||||
if binds.iter().all(|x| x != name) {
|
||||
cb(qualified)
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
@@ -72,7 +70,7 @@ fn visit_all_names_expr_recur<'a, F>(
|
||||
expr: &'a Expr,
|
||||
binds: Stackframe<String>,
|
||||
cb: &mut F
|
||||
) where F: FnMut(&'a Vec<String>) {
|
||||
) where F: FnMut(&'a [String]) {
|
||||
let Expr(val, typ) = expr;
|
||||
visit_all_names_clause_recur(val, binds.clone(), cb);
|
||||
if let Some(t) = typ {
|
||||
@@ -81,10 +79,10 @@ fn visit_all_names_expr_recur<'a, F>(
|
||||
}
|
||||
|
||||
/// Collect all names that occur in an expression
|
||||
fn find_all_names(expr: &Expr) -> HashSet<&Vec<String>> {
|
||||
fn find_all_names(expr: &Expr) -> HashSet<&[String]> {
|
||||
let mut ret = HashSet::new();
|
||||
visit_all_names_expr_recur(expr, Stackframe::new(String::new()), &mut |n| {
|
||||
if !n.last().unwrap().starts_with("$") {
|
||||
if !n.last().unwrap().starts_with('$') {
|
||||
ret.insert(n);
|
||||
}
|
||||
});
|
||||
@@ -111,19 +109,27 @@ pub fn line_parser() -> impl Parser<Lexeme, FileEntry, Error = Simple<Lexeme>> {
|
||||
println!("{:?} could not yield an export", s); e
|
||||
})
|
||||
.ignore_then(rule_parser())
|
||||
.map(|(source, prio, target)| FileEntry::Rule(Rule{source, prio, target}, true)),
|
||||
.map(|(source, prio, target)| FileEntry::Rule(Rule {
|
||||
source: to_mrc_slice(source),
|
||||
prio,
|
||||
target: to_mrc_slice(target)
|
||||
}, true)),
|
||||
// This could match almost anything so it has to go last
|
||||
rule_parser().map(|(source, prio, target)| FileEntry::Rule(Rule{source, prio, target}, false)),
|
||||
rule_parser().map(|(source, prio, target)| FileEntry::Rule(Rule{
|
||||
source: to_mrc_slice(source),
|
||||
prio,
|
||||
target: to_mrc_slice(target)
|
||||
}, false)),
|
||||
))
|
||||
}
|
||||
|
||||
/// Collect all exported names (and a lot of other words) from a file
|
||||
pub fn exported_names(src: &Vec<FileEntry>) -> HashSet<&Vec<String>> {
|
||||
pub fn exported_names(src: &[FileEntry]) -> HashSet<&[String]> {
|
||||
src.iter().flat_map(|ent| match ent {
|
||||
FileEntry::Rule(Rule{source, target, ..}, true) =>
|
||||
Box::new(source.iter().chain(target.iter())) as BoxedIter<&Expr>,
|
||||
_ => Box::new(iter::empty())
|
||||
}).map(find_all_names).flatten().collect()
|
||||
box_chain!(source.iter(), target.iter()),
|
||||
_ => box_empty()
|
||||
}).flat_map(find_all_names).collect()
|
||||
}
|
||||
|
||||
/// Summarize all imports from a file in a single list of qualified names
|
||||
@@ -135,4 +141,4 @@ where I: Iterator<Item = &'b FileEntry> + 'a {
|
||||
FileEntry::Import(impv) => Some(impv.iter()),
|
||||
_ => None
|
||||
}).flatten()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ fn text_parser(delim: char) -> impl Parser<char, char, Error = Simple<char>> {
|
||||
.or(just('r').to('\r'))
|
||||
.or(just('t').to('\t'))
|
||||
.or(just('u').ignore_then(
|
||||
filter(|c: &char| c.is_digit(16))
|
||||
filter(|c: &char| c.is_ascii_hexdigit())
|
||||
.repeated()
|
||||
.exactly(4)
|
||||
.collect::<String>()
|
||||
@@ -43,4 +43,4 @@ pub fn str_parser() -> impl Parser<char, String, Error = Simple<char>> {
|
||||
.repeated()
|
||||
).then_ignore(just('"'))
|
||||
.flatten().collect()
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user