forked from Orchid/orchid
Preparation for sharing
- rustfmt - clippy - comments - README
This commit is contained in:
@@ -1,55 +1,67 @@
|
||||
use std::iter;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::representations::location::Location;
|
||||
use crate::representations::sourcefile::{FileEntry, Member};
|
||||
use crate::enum_filter;
|
||||
use crate::ast::{Rule, Constant, Expr, Clause};
|
||||
use crate::interner::Token;
|
||||
|
||||
use super::Entry;
|
||||
use super::context::Context;
|
||||
use super::expression::xpr_parser;
|
||||
use super::import::import_parser;
|
||||
use super::lexer::{Lexeme, filter_map_lex};
|
||||
|
||||
use chumsky::{Parser, prelude::*};
|
||||
use chumsky::prelude::*;
|
||||
use chumsky::Parser;
|
||||
use itertools::Itertools;
|
||||
|
||||
fn rule_parser<'a>(ctx: impl Context + 'a)
|
||||
-> impl Parser<Entry, Rule, Error = Simple<Entry>> + 'a
|
||||
{
|
||||
xpr_parser(ctx.clone()).repeated().at_least(1)
|
||||
use super::context::Context;
|
||||
use super::decls::{SimpleParser, SimpleRecursive};
|
||||
use super::expression::xpr_parser;
|
||||
use super::import::import_parser;
|
||||
use super::lexer::{filter_map_lex, Lexeme};
|
||||
use super::Entry;
|
||||
use crate::ast::{Clause, Constant, Expr, Rule};
|
||||
use crate::enum_filter;
|
||||
use crate::representations::location::Location;
|
||||
use crate::representations::sourcefile::{FileEntry, Member, Namespace};
|
||||
|
||||
fn rule_parser<'a>(
|
||||
ctx: impl Context + 'a,
|
||||
) -> impl SimpleParser<Entry, Rule> + 'a {
|
||||
xpr_parser(ctx.clone())
|
||||
.repeated()
|
||||
.at_least(1)
|
||||
.then(filter_map_lex(enum_filter!(Lexeme::Rule)))
|
||||
.then(xpr_parser(ctx).repeated().at_least(1))
|
||||
.map(|((s, (prio, _)), t)| Rule{
|
||||
.map(|((s, (prio, _)), t)| Rule {
|
||||
source: Rc::new(s),
|
||||
prio,
|
||||
target: Rc::new(t)
|
||||
}).labelled("Rule")
|
||||
target: Rc::new(t),
|
||||
})
|
||||
.labelled("Rule")
|
||||
}
|
||||
|
||||
fn const_parser<'a>(ctx: impl Context + 'a)
|
||||
-> impl Parser<Entry, Constant, Error = Simple<Entry>> + 'a
|
||||
{
|
||||
fn const_parser<'a>(
|
||||
ctx: impl Context + 'a,
|
||||
) -> impl SimpleParser<Entry, Constant> + 'a {
|
||||
filter_map_lex(enum_filter!(Lexeme::Name))
|
||||
.then_ignore(Lexeme::Const.parser())
|
||||
.then(xpr_parser(ctx.clone()).repeated().at_least(1))
|
||||
.map(move |((name, _), value)| Constant{
|
||||
.map(move |((name, _), value)| Constant {
|
||||
name,
|
||||
value: if let Ok(ex) = value.iter().exactly_one() { ex.clone() }
|
||||
else {
|
||||
let start = value.first().expect("value cannot be empty")
|
||||
.location.range().expect("all locations in parsed source are known")
|
||||
value: if let Ok(ex) = value.iter().exactly_one() {
|
||||
ex.clone()
|
||||
} else {
|
||||
let start = value
|
||||
.first()
|
||||
.expect("value cannot be empty")
|
||||
.location
|
||||
.range()
|
||||
.expect("all locations in parsed source are known")
|
||||
.start;
|
||||
let end = value.last().expect("asserted right above")
|
||||
.location.range().expect("all locations in parsed source are known")
|
||||
let end = value
|
||||
.last()
|
||||
.expect("asserted right above")
|
||||
.location
|
||||
.range()
|
||||
.expect("all locations in parsed source are known")
|
||||
.end;
|
||||
Expr{
|
||||
Expr {
|
||||
location: Location::Range { file: ctx.file(), range: start..end },
|
||||
value: Clause::S('(', Rc::new(value))
|
||||
value: Clause::S('(', Rc::new(value)),
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -60,56 +72,61 @@ pub fn collect_errors<T, E: chumsky::Error<T>>(e: Vec<E>) -> E {
|
||||
}
|
||||
|
||||
fn namespace_parser<'a>(
|
||||
line: impl Parser<Entry, FileEntry, Error = Simple<Entry>> + 'a,
|
||||
) -> impl Parser<Entry, (Token<String>, Vec<FileEntry>), Error = Simple<Entry>> + 'a {
|
||||
Lexeme::Namespace.parser()
|
||||
.ignore_then(filter_map_lex(enum_filter!(Lexeme::Name)))
|
||||
.then(
|
||||
any().repeated().delimited_by(
|
||||
Lexeme::LP('(').parser(),
|
||||
Lexeme::RP('(').parser()
|
||||
).try_map(move |body, _| {
|
||||
split_lines(&body)
|
||||
.map(|l| line.parse(l))
|
||||
.collect::<Result<Vec<_>,_>>()
|
||||
.map_err(collect_errors)
|
||||
})
|
||||
).map(move |((name, _), body)| {
|
||||
(name, body)
|
||||
})
|
||||
line: impl SimpleParser<Entry, FileEntry> + 'a,
|
||||
) -> impl SimpleParser<Entry, Namespace> + 'a {
|
||||
Lexeme::Namespace
|
||||
.parser()
|
||||
.ignore_then(filter_map_lex(enum_filter!(Lexeme::Name)))
|
||||
.then(
|
||||
any()
|
||||
.repeated()
|
||||
.delimited_by(Lexeme::LP('(').parser(), Lexeme::RP('(').parser())
|
||||
.try_map(move |body, _| {
|
||||
split_lines(&body)
|
||||
.map(|l| line.parse(l))
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.map_err(collect_errors)
|
||||
}),
|
||||
)
|
||||
.map(move |((name, _), body)| Namespace { name, body })
|
||||
}
|
||||
|
||||
fn member_parser<'a>(
|
||||
line: impl Parser<Entry, FileEntry, Error = Simple<Entry>> + 'a,
|
||||
ctx: impl Context + 'a
|
||||
) -> impl Parser<Entry, Member, Error = Simple<Entry>> + 'a {
|
||||
line: impl SimpleParser<Entry, FileEntry> + 'a,
|
||||
ctx: impl Context + 'a,
|
||||
) -> impl SimpleParser<Entry, Member> + 'a {
|
||||
choice((
|
||||
namespace_parser(line)
|
||||
.map(|(name, body)| Member::Namespace(name, body)),
|
||||
namespace_parser(line).map(Member::Namespace),
|
||||
rule_parser(ctx.clone()).map(Member::Rule),
|
||||
const_parser(ctx).map(Member::Constant),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn line_parser<'a>(ctx: impl Context + 'a)
|
||||
-> impl Parser<Entry, FileEntry, Error = Simple<Entry>> + 'a
|
||||
{
|
||||
recursive(|line: Recursive<Entry, FileEntry, Simple<Entry>>| {
|
||||
pub fn line_parser<'a>(
|
||||
ctx: impl Context + 'a,
|
||||
) -> impl SimpleParser<Entry, FileEntry> + 'a {
|
||||
recursive(|line: SimpleRecursive<Entry, FileEntry>| {
|
||||
choice((
|
||||
// In case the usercode wants to parse doc
|
||||
filter_map_lex(enum_filter!(Lexeme >> FileEntry; Comment)).map(|(ent, _)| ent),
|
||||
filter_map_lex(enum_filter!(Lexeme >> FileEntry; Comment))
|
||||
.map(|(ent, _)| ent),
|
||||
// plain old imports
|
||||
Lexeme::Import.parser()
|
||||
Lexeme::Import
|
||||
.parser()
|
||||
.ignore_then(import_parser(ctx.clone()).map(FileEntry::Import)),
|
||||
Lexeme::Export.parser().ignore_then(choice((
|
||||
// token collection
|
||||
Lexeme::NS.parser().ignore_then(
|
||||
filter_map_lex(enum_filter!(Lexeme::Name)).map(|(e, _)| e)
|
||||
.separated_by(Lexeme::Name(ctx.interner().i(",")).parser())
|
||||
.delimited_by(Lexeme::LP('(').parser(), Lexeme::RP('(').parser())
|
||||
).map(FileEntry::Export),
|
||||
Lexeme::NS
|
||||
.parser()
|
||||
.ignore_then(
|
||||
filter_map_lex(enum_filter!(Lexeme::Name))
|
||||
.map(|(e, _)| e)
|
||||
.separated_by(Lexeme::Name(ctx.interner().i(",")).parser())
|
||||
.delimited_by(Lexeme::LP('(').parser(), Lexeme::RP('(').parser()),
|
||||
)
|
||||
.map(FileEntry::Export),
|
||||
// public declaration
|
||||
member_parser(line.clone(), ctx.clone()).map(FileEntry::Exported)
|
||||
member_parser(line.clone(), ctx.clone()).map(FileEntry::Exported),
|
||||
))),
|
||||
// This could match almost anything so it has to go last
|
||||
member_parser(line, ctx).map(FileEntry::Internal),
|
||||
@@ -123,13 +140,13 @@ pub fn split_lines(data: &[Entry]) -> impl Iterator<Item = &[Entry]> {
|
||||
let mut finished = false;
|
||||
iter::from_fn(move || {
|
||||
let mut paren_count = 0;
|
||||
while let Some((i, Entry{ lexeme, .. })) = source.next() {
|
||||
for (i, Entry { lexeme, .. }) in source.by_ref() {
|
||||
match lexeme {
|
||||
Lexeme::LP(_) => paren_count += 1,
|
||||
Lexeme::RP(_) => paren_count -= 1,
|
||||
Lexeme::BR if paren_count == 0 => {
|
||||
let begin = last_slice;
|
||||
last_slice = i+1;
|
||||
last_slice = i + 1;
|
||||
return Some(&data[begin..i]);
|
||||
},
|
||||
_ => (),
|
||||
@@ -138,8 +155,9 @@ pub fn split_lines(data: &[Entry]) -> impl Iterator<Item = &[Entry]> {
|
||||
// Include last line even without trailing newline
|
||||
if !finished {
|
||||
finished = true;
|
||||
return Some(&data[last_slice..])
|
||||
return Some(&data[last_slice..]);
|
||||
}
|
||||
None
|
||||
}).filter(|s| s.len() > 0)
|
||||
})
|
||||
.filter(|s| !s.is_empty())
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user