October commit
- custom parser support and infra - type-tagging and traits (untested) - match expressions
This commit is contained in:
16
.vscode/launch.json
vendored
Normal file
16
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Debug",
|
||||
"program": "${workspaceFolder}/<executable file>",
|
||||
"args": [],
|
||||
"cwd": "${workspaceFolder}"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -5,7 +5,7 @@ const folder_view := (path, next) => do{
|
||||
cps println $ "Contents of " ++ fs::os_print path;
|
||||
cps entries = async::block_on $ fs::read_dir path;
|
||||
cps list::enumerate entries
|
||||
|> list::map (pass \id. pass \name. \is_dir.
|
||||
|> list::map ((t[id, t[name, is_dir]]) =>
|
||||
println $ to_string id ++ ": " ++ fs::os_print name ++ if is_dir then "/" else ""
|
||||
)
|
||||
|> list::chain;
|
||||
@@ -17,7 +17,7 @@ const folder_view := (path, next) => do{
|
||||
|> tuple::pick 0 2;
|
||||
next parent_path
|
||||
} else do {
|
||||
cps subname, is_dir = to_uint choice
|
||||
let t[subname, is_dir] = to_uint choice
|
||||
|> (list::get entries)
|
||||
|> option::unwrap;
|
||||
let subpath = fs::join_paths path subname;
|
||||
|
||||
21
examples/match/main.orc
Normal file
21
examples/match/main.orc
Normal file
@@ -0,0 +1,21 @@
|
||||
import std::to_string
|
||||
|
||||
const foo := t[option::some "world!", option::none]
|
||||
|
||||
const test1 := match foo {
|
||||
t[option::some balh, option::none] => balh;
|
||||
}
|
||||
|
||||
const bar := map::new[
|
||||
"age" = 22,
|
||||
"name" = "lbfalvy",
|
||||
"is_alive" = true,
|
||||
"species" = "human",
|
||||
"greeting" = "Hello"
|
||||
]
|
||||
|
||||
const test2 := match bar {
|
||||
map::having ["is_alive" = true, "greeting" = foo] => foo
|
||||
}
|
||||
|
||||
const main := test2 ++ ", " ++ test1
|
||||
@@ -30,7 +30,7 @@ struct Args {
|
||||
pub macro_limit: usize,
|
||||
/// Print the parsed ruleset and exit
|
||||
#[arg(long)]
|
||||
pub dump_repo: bool,
|
||||
pub list_macros: bool,
|
||||
/// Step through the macro execution process in the specified symbol
|
||||
#[arg(long, default_value = "")]
|
||||
pub macro_debug: String,
|
||||
@@ -112,6 +112,16 @@ pub fn macro_debug(premacro: PreMacro, sym: Sym) -> ExitCode {
|
||||
"p" | "print" => print_for_debug(&code),
|
||||
"d" | "dump" => print!("Rules: {}", premacro.repo),
|
||||
"q" | "quit" => return ExitCode::SUCCESS,
|
||||
"complete" => {
|
||||
if let Some((idx, c)) = steps.last() {
|
||||
code = c;
|
||||
print!("Step {idx}: ");
|
||||
print_for_debug(&code);
|
||||
} else {
|
||||
print!("Already halted")
|
||||
}
|
||||
return ExitCode::SUCCESS;
|
||||
},
|
||||
"h" | "help" => print!(
|
||||
"Available commands:
|
||||
\t<blank>, n, next\t\ttake a step
|
||||
@@ -148,7 +158,7 @@ pub fn main() -> ExitCode {
|
||||
.add_system(io::Service::new(scheduler.clone(), std_streams))
|
||||
.add_system(directfs::DirectFS::new(scheduler));
|
||||
let premacro = env.load_dir(&dir, &main).unwrap();
|
||||
if args.dump_repo {
|
||||
if args.list_macros {
|
||||
println!("Parsed rules: {}", premacro.repo);
|
||||
return ExitCode::SUCCESS;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::fmt::Display;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::foreign::ExternError;
|
||||
use crate::foreign::{ExternError, XfnResult};
|
||||
use crate::Location;
|
||||
|
||||
/// Some expectation (usually about the argument types of a function) did not
|
||||
@@ -15,15 +15,15 @@ pub struct AssertionError {
|
||||
impl AssertionError {
|
||||
/// Construct, upcast and wrap in a Result that never succeeds for easy
|
||||
/// short-circuiting
|
||||
pub fn fail<T>(
|
||||
location: Location,
|
||||
message: &'static str,
|
||||
) -> Result<T, Rc<dyn ExternError>> {
|
||||
pub fn fail<T>(location: Location, message: &'static str) -> XfnResult<T> {
|
||||
Err(Self::ext(location, message))
|
||||
}
|
||||
|
||||
/// Construct and upcast to [ExternError]
|
||||
pub fn ext(location: Location, message: &'static str) -> Rc<dyn ExternError> {
|
||||
pub fn ext(
|
||||
location: Location,
|
||||
message: &'static str,
|
||||
) -> Arc<dyn ExternError> {
|
||||
Self { location, message }.into_extern()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ mod conflicting_roles;
|
||||
mod import_all;
|
||||
mod no_targets;
|
||||
mod not_exported;
|
||||
mod parse_error_with_tokens;
|
||||
mod project_error;
|
||||
mod runtime_error;
|
||||
mod too_many_supers;
|
||||
@@ -16,7 +15,6 @@ pub use conflicting_roles::ConflictingRoles;
|
||||
pub use import_all::ImportAll;
|
||||
pub use no_targets::NoTargets;
|
||||
pub use not_exported::NotExported;
|
||||
pub use parse_error_with_tokens::ParseErrorWithTokens;
|
||||
pub use project_error::{ErrorPosition, ProjectError, ProjectResult};
|
||||
pub use runtime_error::RuntimeError;
|
||||
pub use too_many_supers::TooManySupers;
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use itertools::Itertools;
|
||||
|
||||
use super::{ErrorPosition, ProjectError};
|
||||
use crate::parse::Entry;
|
||||
use crate::utils::BoxedIter;
|
||||
|
||||
/// Produced by stages that parse text when it fails.
|
||||
pub struct ParseErrorWithTokens {
|
||||
/// The complete source of the faulty file
|
||||
pub full_source: String,
|
||||
/// Tokens, if the error did not occur during tokenization
|
||||
pub tokens: Vec<Entry>,
|
||||
/// The parse error produced by Chumsky
|
||||
pub error: Rc<dyn ProjectError>,
|
||||
}
|
||||
impl ProjectError for ParseErrorWithTokens {
|
||||
fn description(&self) -> &str { self.error.description() }
|
||||
fn message(&self) -> String {
|
||||
format!(
|
||||
"Failed to parse code: {}\nTokenized source for context:\n{}",
|
||||
self.error.message(),
|
||||
self.tokens.iter().map(|t| t.to_string()).join(" "),
|
||||
)
|
||||
}
|
||||
fn positions(&self) -> BoxedIter<ErrorPosition> { self.error.positions() }
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::fmt::Display;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::foreign::ExternError;
|
||||
use crate::foreign::{ExternError, XfnResult};
|
||||
|
||||
/// Some external event prevented the operation from succeeding
|
||||
#[derive(Clone)]
|
||||
@@ -13,15 +13,15 @@ pub struct RuntimeError {
|
||||
impl RuntimeError {
|
||||
/// Construct, upcast and wrap in a Result that never succeeds for easy
|
||||
/// short-circuiting
|
||||
pub fn fail<T>(
|
||||
message: String,
|
||||
operation: &'static str,
|
||||
) -> Result<T, Rc<dyn ExternError>> {
|
||||
pub fn fail<T>(message: String, operation: &'static str) -> XfnResult<T> {
|
||||
Err(Self { message, operation }.into_extern())
|
||||
}
|
||||
|
||||
/// Construct and upcast to [ExternError]
|
||||
pub fn ext(message: String, operation: &'static str) -> Rc<dyn ExternError> {
|
||||
pub fn ext(
|
||||
message: String,
|
||||
operation: &'static str,
|
||||
) -> Arc<dyn ExternError> {
|
||||
Self { message, operation }.into_extern()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,10 +46,12 @@ impl<'a> Environment<'a> {
|
||||
let mut line_parsers = vec![];
|
||||
let mut prelude = vec![];
|
||||
for sys in systems.iter() {
|
||||
lexer_plugins.extend(sys.lexer_plugins.iter().map(|b| &**b));
|
||||
line_parsers.extend(sys.line_parsers.iter().map(|b| &**b));
|
||||
if !sys.code.is_empty() {
|
||||
tree = parse_layer(
|
||||
sys.code.keys().map(|sym| &sym[..]),
|
||||
&|k| sys.load_file(k),
|
||||
&|k, referrer| sys.load_file(k, referrer),
|
||||
&tree,
|
||||
&prelude,
|
||||
&lexer_plugins,
|
||||
@@ -57,8 +59,6 @@ impl<'a> Environment<'a> {
|
||||
i,
|
||||
)?;
|
||||
}
|
||||
lexer_plugins.extend(sys.lexer_plugin.as_deref().iter());
|
||||
line_parsers.extend(sys.line_parser.as_deref().iter());
|
||||
prelude.extend_from_slice(&sys.prelude);
|
||||
}
|
||||
Ok(CompiledEnv { prelude, tree, systems })
|
||||
@@ -74,14 +74,14 @@ impl<'a> Environment<'a> {
|
||||
let CompiledEnv { prelude, systems, tree } = self.compile()?;
|
||||
let file_cache = file_loader::mk_dir_cache(dir.to_path_buf());
|
||||
let lexer_plugins = (systems.iter())
|
||||
.filter_map(|s| s.lexer_plugin.as_deref())
|
||||
.flat_map(|s| s.lexer_plugins.iter().map(|b| &**b))
|
||||
.collect::<Vec<_>>();
|
||||
let line_parsers = (systems.iter())
|
||||
.filter_map(|s| s.line_parser.as_deref())
|
||||
.flat_map(|s| s.line_parsers.iter().map(|b| &**b))
|
||||
.collect::<Vec<_>>();
|
||||
let vname_tree = parse_layer(
|
||||
iter::once(target),
|
||||
&|path| file_cache.find(path),
|
||||
&|path, _| file_cache.find(path),
|
||||
&tree,
|
||||
&prelude,
|
||||
&lexer_plugins,
|
||||
|
||||
@@ -86,7 +86,8 @@ impl<'a> PreMacro<'a> {
|
||||
} else {
|
||||
repo.pass(source).unwrap_or_else(|| source.clone())
|
||||
};
|
||||
let runtree = ast_to_interpreted(&unmatched).map_err(|e| e.rc())?;
|
||||
let runtree =
|
||||
ast_to_interpreted(&unmatched, name.clone()).map_err(|e| e.rc())?;
|
||||
symbols.insert(name.clone(), runtree);
|
||||
}
|
||||
Ok(Process {
|
||||
|
||||
@@ -27,10 +27,10 @@ pub struct System<'a> {
|
||||
/// Custom lexer for the source code representation atomic data.
|
||||
/// These take priority over builtin lexers so the syntax they
|
||||
/// match should be unambiguous
|
||||
pub lexer_plugin: Option<Box<dyn LexerPlugin>>,
|
||||
pub lexer_plugins: Vec<Box<dyn LexerPlugin>>,
|
||||
/// Parser that processes custom line types into their representation in the
|
||||
/// module tree
|
||||
pub line_parser: Option<Box<dyn LineParser>>,
|
||||
pub line_parsers: Vec<Box<dyn LineParser>>,
|
||||
}
|
||||
impl<'a> System<'a> {
|
||||
/// Intern the name of the system so that it can be used as an Orchid
|
||||
@@ -41,10 +41,17 @@ impl<'a> System<'a> {
|
||||
}
|
||||
|
||||
/// Load a file from the system
|
||||
pub fn load_file(&self, path: &[Tok<String>]) -> IOResult {
|
||||
pub fn load_file(
|
||||
&self,
|
||||
path: &[Tok<String>],
|
||||
referrer: &[Tok<String>],
|
||||
) -> IOResult {
|
||||
(self.code.get(path)).cloned().ok_or_else(|| {
|
||||
let err =
|
||||
MissingSystemCode { path: path.to_vec(), system: self.name.clone() };
|
||||
let err = MissingSystemCode {
|
||||
path: path.to_vec(),
|
||||
system: self.name.clone(),
|
||||
referrer: referrer.to_vec(),
|
||||
};
|
||||
err.rc()
|
||||
})
|
||||
}
|
||||
@@ -56,6 +63,7 @@ impl<'a> System<'a> {
|
||||
pub struct MissingSystemCode {
|
||||
path: VName,
|
||||
system: Vec<String>,
|
||||
referrer: VName,
|
||||
}
|
||||
impl ProjectError for MissingSystemCode {
|
||||
fn description(&self) -> &str {
|
||||
@@ -63,8 +71,9 @@ impl ProjectError for MissingSystemCode {
|
||||
}
|
||||
fn message(&self) -> String {
|
||||
format!(
|
||||
"Path {} is not defined by {} or any system before it",
|
||||
"Path {} imported by {} is not defined by {} or any system before it",
|
||||
Interner::extern_all(&self.path).join("::"),
|
||||
Interner::extern_all(&self.referrer).join("::"),
|
||||
self.system.join("::")
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
use std::any::Any;
|
||||
use std::fmt::Debug;
|
||||
use std::rc::Rc;
|
||||
|
||||
use dyn_clone::DynClone;
|
||||
|
||||
use super::ExternError;
|
||||
use super::XfnResult;
|
||||
use crate::ddispatch::request;
|
||||
use crate::error::AssertionError;
|
||||
use crate::interpreted::{ExprInst, TryFromExprInst};
|
||||
use crate::interpreter::{Context, RuntimeError};
|
||||
use crate::representations::interpreted::Clause;
|
||||
use crate::utils::ddispatch::Responder;
|
||||
use crate::{ast, NameLike};
|
||||
|
||||
/// Information returned by [Atomic::run]. This mirrors
|
||||
/// [crate::interpreter::Return] but with a clause instead of an Expr.
|
||||
@@ -77,6 +77,24 @@ where
|
||||
{
|
||||
self.atom_cls().wrap()
|
||||
}
|
||||
|
||||
/// Wrap the atom in a clause to be placed in a [sourcefile::FileEntry].
|
||||
#[must_use]
|
||||
fn ast_cls<N: NameLike>(self) -> ast::Clause<N>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
ast::Clause::Atom(Atom::new(self))
|
||||
}
|
||||
|
||||
/// Wrap the atom in an expression to be placed in a [sourcefile::FileEntry].
|
||||
#[must_use]
|
||||
fn ast_exp<N: NameLike>(self) -> ast::Expr<N>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.ast_cls().into_expr()
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a black box unit of code with its own normalization steps.
|
||||
@@ -129,7 +147,7 @@ impl Debug for Atom {
|
||||
}
|
||||
|
||||
impl TryFromExprInst for Atom {
|
||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> {
|
||||
fn from_exi(exi: ExprInst) -> XfnResult<Self> {
|
||||
let loc = exi.location();
|
||||
match exi.expr_val().clause {
|
||||
Clause::Atom(a) => Ok(a),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::error::Error;
|
||||
use std::fmt::{Debug, Display};
|
||||
use std::hash::Hash;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use dyn_clone::{clone_box, DynClone};
|
||||
|
||||
@@ -9,16 +9,17 @@ use super::XfnResult;
|
||||
use crate::interpreted::ExprInst;
|
||||
use crate::interpreter::Context;
|
||||
use crate::representations::interpreted::Clause;
|
||||
use crate::{ast, NameLike};
|
||||
|
||||
/// Errors produced by external code
|
||||
pub trait ExternError: Display {
|
||||
pub trait ExternError: Display + Send + Sync + DynClone {
|
||||
/// Convert into trait object
|
||||
#[must_use]
|
||||
fn into_extern(self) -> Rc<dyn ExternError>
|
||||
fn into_extern(self) -> Arc<dyn ExternError>
|
||||
where
|
||||
Self: 'static + Sized,
|
||||
{
|
||||
Rc::new(self)
|
||||
Arc::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,6 +52,14 @@ pub trait ExternFn: DynClone + Send {
|
||||
{
|
||||
Clause::ExternFn(ExFn(Box::new(self)))
|
||||
}
|
||||
/// Wrap this function in a clause to be placed in a [FileEntry].
|
||||
#[must_use]
|
||||
fn xfn_ast_cls<N: NameLike>(self) -> ast::Clause<N>
|
||||
where
|
||||
Self: Sized + 'static,
|
||||
{
|
||||
ast::Clause::ExternFn(ExFn(Box::new(self)))
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for dyn ExternFn {}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::fmt::Debug;
|
||||
use std::marker::PhantomData;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::atom::StrictEq;
|
||||
use super::{
|
||||
@@ -61,7 +61,7 @@ impl<T, U, F> Param<T, U, F> {
|
||||
/// Wrap a new function in a parametric struct
|
||||
pub fn new(f: F) -> Self
|
||||
where
|
||||
F: FnOnce(T) -> Result<U, Rc<dyn ExternError>>,
|
||||
F: FnOnce(T) -> Result<U, Arc<dyn ExternError>>,
|
||||
{
|
||||
Self { data: f, _t: PhantomData, _u: PhantomData }
|
||||
}
|
||||
@@ -77,7 +77,7 @@ impl<T, U, F: Clone> Clone for Param<T, U, F> {
|
||||
impl<
|
||||
T: 'static + TryFromExprInst,
|
||||
U: 'static + ToClause,
|
||||
F: 'static + Clone + Send + FnOnce(T) -> Result<U, Rc<dyn ExternError>>,
|
||||
F: 'static + Clone + Send + FnOnce(T) -> Result<U, Arc<dyn ExternError>>,
|
||||
> ToClause for Param<T, U, F>
|
||||
{
|
||||
fn to_clause(self) -> Clause { self.xfn_cls() }
|
||||
@@ -109,7 +109,7 @@ impl<T, U, F> Responder for FnMiddleStage<T, U, F> {}
|
||||
impl<
|
||||
T: 'static + TryFromExprInst,
|
||||
U: 'static + ToClause,
|
||||
F: 'static + Clone + FnOnce(T) -> Result<U, Rc<dyn ExternError>> + Send,
|
||||
F: 'static + Clone + FnOnce(T) -> Result<U, Arc<dyn ExternError>> + Send,
|
||||
> Atomic for FnMiddleStage<T, U, F>
|
||||
{
|
||||
fn as_any(self: Box<Self>) -> Box<dyn std::any::Any> { self }
|
||||
@@ -127,7 +127,7 @@ impl<
|
||||
impl<
|
||||
T: 'static + TryFromExprInst,
|
||||
U: 'static + ToClause,
|
||||
F: 'static + Clone + Send + FnOnce(T) -> Result<U, Rc<dyn ExternError>>,
|
||||
F: 'static + Clone + Send + FnOnce(T) -> Result<U, Arc<dyn ExternError>>,
|
||||
> ExternFn for Param<T, U, F>
|
||||
{
|
||||
fn name(&self) -> &str { "anonymous Rust function" }
|
||||
@@ -137,8 +137,9 @@ impl<
|
||||
}
|
||||
|
||||
pub mod constructors {
|
||||
use std::rc::Rc;
|
||||
|
||||
|
||||
use std::sync::Arc;
|
||||
use super::{Param, ToClause};
|
||||
use crate::foreign::{ExternError, ExternFn};
|
||||
use crate::interpreted::TryFromExprInst;
|
||||
@@ -163,7 +164,7 @@ pub mod constructors {
|
||||
TLast: TryFromExprInst + 'static,
|
||||
TReturn: ToClause + Send + 'static,
|
||||
TFunction: FnOnce( $( $t , )* TLast )
|
||||
-> Result<TReturn, Rc<dyn ExternError>> + Clone + Send + 'static
|
||||
-> Result<TReturn, Arc<dyn ExternError>> + Clone + Send + 'static
|
||||
>(function: TFunction) -> impl ExternFn {
|
||||
xfn_variant!(@BODY_LOOP function
|
||||
( $( ( $t [< $t:lower >] ) )* )
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
use std::any::Any;
|
||||
use std::fmt::Debug;
|
||||
use std::rc::Rc;
|
||||
|
||||
use ordered_float::NotNan;
|
||||
|
||||
use super::atom::StrictEq;
|
||||
use super::{AtomicResult, AtomicReturn, ExternError};
|
||||
use super::{AtomicResult, AtomicReturn, XfnResult};
|
||||
use crate::error::AssertionError;
|
||||
#[allow(unused)] // for doc
|
||||
// use crate::define_fn;
|
||||
@@ -62,7 +61,7 @@ impl<T: InertAtomic> Atomic for T {
|
||||
}
|
||||
|
||||
impl<T: InertAtomic> TryFromExprInst for T {
|
||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> {
|
||||
fn from_exi(exi: ExprInst) -> XfnResult<Self> {
|
||||
let Expr { clause, location } = exi.expr_val();
|
||||
match clause {
|
||||
Clause::Atom(a) => match a.0.as_any().downcast() {
|
||||
|
||||
@@ -8,7 +8,7 @@ mod extern_fn;
|
||||
mod fn_bridge;
|
||||
mod inert;
|
||||
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use atom::{Atom, Atomic, AtomicResult, AtomicReturn, StrictEq};
|
||||
pub use extern_fn::{ExFn, ExternError, ExternFn};
|
||||
@@ -22,4 +22,4 @@ pub use inert::InertAtomic;
|
||||
pub use crate::representations::interpreted::Clause;
|
||||
|
||||
/// Return type of the argument to the [xfn_1ary] family of functions
|
||||
pub type XfnResult<T> = Result<T, Rc<dyn ExternError>>;
|
||||
pub type XfnResult<T> = Result<T, Arc<dyn ExternError>>;
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
use std::fmt::Display;
|
||||
use std::rc::Rc;
|
||||
use std::fmt::{Debug, Display};
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::foreign::ExternError;
|
||||
use crate::{Location, Sym};
|
||||
|
||||
/// Problems in the process of execution
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum RuntimeError {
|
||||
/// A Rust function encountered an error
|
||||
Extern(Rc<dyn ExternError>),
|
||||
Extern(Arc<dyn ExternError>),
|
||||
/// Primitive applied as function
|
||||
NonFunctionApplication(Location),
|
||||
/// Symbol not in context
|
||||
MissingSymbol(Sym, Location),
|
||||
}
|
||||
|
||||
impl From<Rc<dyn ExternError>> for RuntimeError {
|
||||
fn from(value: Rc<dyn ExternError>) -> Self { Self::Extern(value) }
|
||||
impl From<Arc<dyn ExternError>> for RuntimeError {
|
||||
fn from(value: Arc<dyn ExternError>) -> Self { Self::Extern(value) }
|
||||
}
|
||||
|
||||
impl Display for RuntimeError {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use std::any::{Any, TypeId};
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use trait_set::trait_set;
|
||||
@@ -58,7 +58,7 @@ impl<'a> HandlerTable<'a> {
|
||||
|
||||
/// Various possible outcomes of a [Handler] execution. Ok returns control to
|
||||
/// the interpreter. The meaning of Err is decided by the value in it.
|
||||
pub type HandlerRes = Result<ExprInst, Rc<dyn ExternError>>;
|
||||
pub type HandlerRes = Result<ExprInst, Arc<dyn ExternError>>;
|
||||
|
||||
/// [run] orchid code, executing any commands it returns using the specified
|
||||
/// [Handler]s.
|
||||
|
||||
@@ -12,7 +12,7 @@ pub mod facade;
|
||||
pub mod foreign;
|
||||
pub mod interner;
|
||||
pub mod interpreter;
|
||||
mod parse;
|
||||
pub mod parse;
|
||||
pub mod pipeline;
|
||||
mod representations;
|
||||
pub mod rule;
|
||||
|
||||
@@ -13,16 +13,27 @@ use crate::{Location, VName};
|
||||
/// Hiding type parameters in associated types allows for simpler
|
||||
/// parser definitions
|
||||
pub trait Context {
|
||||
/// Get the path to the current file
|
||||
#[must_use]
|
||||
fn file(&self) -> Arc<VName>;
|
||||
/// Get a reference to the interner
|
||||
#[must_use]
|
||||
fn interner(&self) -> &Interner;
|
||||
/// Get a reference to the full source text for position math and to build
|
||||
/// [Location]s.
|
||||
#[must_use]
|
||||
fn source(&self) -> Arc<String>;
|
||||
/// Get the list of all lexer plugins
|
||||
#[must_use]
|
||||
fn lexers(&self) -> &[&dyn LexerPlugin];
|
||||
/// Get the list of all parser plugins
|
||||
#[must_use]
|
||||
fn line_parsers(&self) -> &[&dyn LineParser];
|
||||
/// Find our position in the text given the text we've yet to parse
|
||||
#[must_use]
|
||||
fn pos(&self, tail: &str) -> usize { self.source().len() - tail.len() }
|
||||
/// Generate a location given the length of a token and the unparsed text
|
||||
/// after it. See also [Context::range_loc] if the maths gets complex.
|
||||
#[must_use]
|
||||
fn location(&self, len: usize, tail: &str) -> Location {
|
||||
match self.pos(tail).checked_sub(len) {
|
||||
@@ -33,14 +44,35 @@ pub trait Context {
|
||||
},
|
||||
}
|
||||
}
|
||||
/// Generate a location given a range in the source file. The location can be
|
||||
/// computed with [Context::pos]. See also [Context::location].
|
||||
#[must_use]
|
||||
fn range_loc(&self, range: Range<usize>) -> Location {
|
||||
Location::Range { file: self.file(), range, source: self.source() }
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Context + ?Sized> Context for &C {
|
||||
fn file(&self) -> Arc<VName> { (*self).file() }
|
||||
fn interner(&self) -> &Interner { (*self).interner() }
|
||||
fn lexers(&self) -> &[&dyn LexerPlugin] { (*self).lexers() }
|
||||
fn line_parsers(&self) -> &[&dyn LineParser] { (*self).line_parsers() }
|
||||
fn location(&self, len: usize, tail: &str) -> Location {
|
||||
(*self).location(len, tail)
|
||||
}
|
||||
fn pos(&self, tail: &str) -> usize { (*self).pos(tail) }
|
||||
fn range_loc(&self, range: Range<usize>) -> Location {
|
||||
(*self).range_loc(range)
|
||||
}
|
||||
fn source(&self) -> Arc<String> { (*self).source() }
|
||||
}
|
||||
|
||||
/// Return value of a lexer plugin; the parsed data and the remaining string
|
||||
pub type LexerPluginOut<'a> = Option<ProjectResult<(Atom, &'a str)>>;
|
||||
/// Return value of a line parser; the meaningful lines derived from this parser
|
||||
pub type LineParserOut = Option<ProjectResult<Vec<FileEntryKind>>>;
|
||||
|
||||
/// A plugin callback that reads a custom lexeme.
|
||||
pub trait LexerPlugin:
|
||||
for<'a> Fn(&'a str, &dyn Context) -> LexerPluginOut<'a> + Sync + Send
|
||||
{
|
||||
@@ -50,12 +82,17 @@ impl<F> LexerPlugin for F where
|
||||
{
|
||||
}
|
||||
|
||||
/// A plugin callback that parses a custom file entry
|
||||
pub trait LineParser:
|
||||
Fn(Stream<'_>, &dyn Context) -> LineParserOut + Sync + Send
|
||||
for<'a> Fn(Stream<'_>, &'a (dyn Context + 'a)) -> LineParserOut
|
||||
+ Sync
|
||||
+ Send
|
||||
{
|
||||
}
|
||||
impl<F> LineParser for F where
|
||||
F: Fn(Stream<'_>, &dyn Context) -> LineParserOut + Sync + Send
|
||||
F: for<'a> Fn(Stream<'_>, &'a (dyn Context + 'a)) -> LineParserOut
|
||||
+ Sync
|
||||
+ Send
|
||||
{
|
||||
}
|
||||
|
||||
@@ -64,22 +101,23 @@ impl<F> LineParser for F where
|
||||
/// Hiding type parameters in associated types allows for simpler
|
||||
/// parser definitions
|
||||
pub struct ParsingContext<'a> {
|
||||
pub interner: &'a Interner,
|
||||
pub file: Arc<VName>,
|
||||
pub source: Arc<String>,
|
||||
pub lexers: &'a [&'a dyn LexerPlugin],
|
||||
pub line_parsers: &'a [&'a dyn LineParser],
|
||||
interner: &'a Interner,
|
||||
file_path: Arc<VName>,
|
||||
source: Arc<String>,
|
||||
lexers: &'a [&'a dyn LexerPlugin],
|
||||
line_parsers: &'a [&'a dyn LineParser],
|
||||
}
|
||||
|
||||
impl<'a> ParsingContext<'a> {
|
||||
/// Create a new parsing context
|
||||
pub fn new(
|
||||
interner: &'a Interner,
|
||||
file: Arc<VName>,
|
||||
file_path: Arc<VName>,
|
||||
source: Arc<String>,
|
||||
lexers: &'a [&'a dyn LexerPlugin],
|
||||
line_parsers: &'a [&'a dyn LineParser],
|
||||
) -> Self {
|
||||
Self { interner, file, source, lexers, line_parsers }
|
||||
Self { interner, file_path, source, lexers, line_parsers }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,7 +125,7 @@ impl<'a> Clone for ParsingContext<'a> {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
interner: self.interner,
|
||||
file: self.file.clone(),
|
||||
file_path: self.file_path.clone(),
|
||||
source: self.source.clone(),
|
||||
lexers: self.lexers,
|
||||
line_parsers: self.line_parsers,
|
||||
@@ -97,7 +135,7 @@ impl<'a> Clone for ParsingContext<'a> {
|
||||
|
||||
impl Context for ParsingContext<'_> {
|
||||
fn interner(&self) -> &Interner { self.interner }
|
||||
fn file(&self) -> Arc<VName> { self.file.clone() }
|
||||
fn file(&self) -> Arc<VName> { self.file_path.clone() }
|
||||
fn source(&self) -> Arc<String> { self.source.clone() }
|
||||
fn lexers(&self) -> &[&dyn LexerPlugin] { self.lexers }
|
||||
fn line_parsers(&self) -> &[&dyn LineParser] { self.line_parsers }
|
||||
@@ -116,3 +154,23 @@ impl<'a> Context for MockContext<'a> {
|
||||
fn range_loc(&self, _: Range<usize>) -> Location { Location::Unknown }
|
||||
fn source(&self) -> Arc<String> { Arc::new(String::new()) }
|
||||
}
|
||||
|
||||
pub struct FlatLocContext<'a, C: Context + ?Sized> {
|
||||
sub: &'a C,
|
||||
location: &'a Location,
|
||||
}
|
||||
impl<'a, C: Context + ?Sized> FlatLocContext<'a, C> {
|
||||
pub fn new(sub: &'a C, location: &'a Location) -> Self {
|
||||
Self { sub, location }
|
||||
}
|
||||
}
|
||||
impl<'a, C: Context + ?Sized> Context for FlatLocContext<'a, C> {
|
||||
fn interner(&self) -> &Interner { self.sub.interner() }
|
||||
fn pos(&self, _: &str) -> usize { 0 }
|
||||
fn file(&self) -> Arc<VName> { self.sub.file() }
|
||||
fn lexers(&self) -> &[&dyn LexerPlugin] { self.sub.lexers() }
|
||||
fn line_parsers(&self) -> &[&dyn LineParser] { self.sub.line_parsers() }
|
||||
fn source(&self) -> Arc<String> { self.sub.source() }
|
||||
fn location(&self, _: usize, _: &str) -> Location { self.location.clone() }
|
||||
fn range_loc(&self, _: Range<usize>) -> Location { self.location.clone() }
|
||||
}
|
||||
|
||||
@@ -1,13 +1,19 @@
|
||||
//! Errors produced by the parser. Plugins are encouraged to reuse these where
|
||||
//! applicable.
|
||||
|
||||
use std::rc::Rc;
|
||||
|
||||
use itertools::Itertools;
|
||||
|
||||
use super::{Entry, Lexeme};
|
||||
use crate::error::ProjectError;
|
||||
use super::{Entry, Lexeme, Stream};
|
||||
use crate::ast::PType;
|
||||
use crate::error::{ProjectError, ProjectResult};
|
||||
use crate::{Location, Tok};
|
||||
|
||||
/// A line does not begin with an identifying keyword
|
||||
#[derive(Debug)]
|
||||
pub struct LineNeedsPrefix {
|
||||
/// Erroneous line starter
|
||||
pub entry: Entry,
|
||||
}
|
||||
impl ProjectError for LineNeedsPrefix {
|
||||
@@ -18,6 +24,7 @@ impl ProjectError for LineNeedsPrefix {
|
||||
}
|
||||
}
|
||||
|
||||
/// The line ends abruptly
|
||||
#[derive(Debug)]
|
||||
pub struct UnexpectedEOL {
|
||||
/// Last entry before EOL
|
||||
@@ -33,7 +40,9 @@ impl ProjectError for UnexpectedEOL {
|
||||
}
|
||||
}
|
||||
|
||||
/// The line should have ended
|
||||
pub struct ExpectedEOL {
|
||||
/// Location of the last valid or first excessive token
|
||||
pub location: Location,
|
||||
}
|
||||
impl ProjectError for ExpectedEOL {
|
||||
@@ -41,11 +50,14 @@ impl ProjectError for ExpectedEOL {
|
||||
fn one_position(&self) -> Location { self.location.clone() }
|
||||
}
|
||||
|
||||
/// A name was expected
|
||||
#[derive(Debug)]
|
||||
pub struct ExpectedName {
|
||||
/// Non-name entry
|
||||
pub entry: Entry,
|
||||
}
|
||||
impl ExpectedName {
|
||||
/// If the entry is a name, return its text. If it's not, produce this error.
|
||||
pub fn expect(entry: &Entry) -> Result<Tok<String>, Rc<dyn ProjectError>> {
|
||||
match &entry.lexeme {
|
||||
Lexeme::Name(n) => Ok(n.clone()),
|
||||
@@ -57,24 +69,22 @@ impl ProjectError for ExpectedName {
|
||||
fn description(&self) -> &str { "A name was expected" }
|
||||
fn one_position(&self) -> Location { self.entry.location() }
|
||||
fn message(&self) -> String {
|
||||
if self.entry.is_keyword() {
|
||||
format!(
|
||||
"{} is a restricted keyword and cannot be used as a name",
|
||||
self.entry
|
||||
)
|
||||
} else {
|
||||
format!("Expected a name, found {}", self.entry)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A specific lexeme was expected
|
||||
#[derive()]
|
||||
pub struct Expected {
|
||||
/// The lexemes that would have been acceptable
|
||||
pub expected: Vec<Lexeme>,
|
||||
/// Whether a name would also have been acceptable (multiname)
|
||||
pub or_name: bool,
|
||||
/// What was actually found
|
||||
pub found: Entry,
|
||||
}
|
||||
impl Expected {
|
||||
/// Assert that the entry contains exactly the specified lexeme
|
||||
pub fn expect(l: Lexeme, e: &Entry) -> Result<(), Rc<dyn ProjectError>> {
|
||||
if e.lexeme.strict_eq(&l) {
|
||||
return Ok(());
|
||||
@@ -99,7 +109,9 @@ impl ProjectError for Expected {
|
||||
}
|
||||
}
|
||||
|
||||
/// A token reserved for future use was found in the code
|
||||
pub struct ReservedToken {
|
||||
/// The offending token
|
||||
pub entry: Entry,
|
||||
}
|
||||
impl ProjectError for ReservedToken {
|
||||
@@ -108,8 +120,11 @@ impl ProjectError for ReservedToken {
|
||||
fn message(&self) -> String { format!("{} is a reserved token", self.entry) }
|
||||
}
|
||||
|
||||
/// A token was found where it doesn't belong
|
||||
pub struct BadTokenInRegion {
|
||||
/// What was found
|
||||
pub entry: Entry,
|
||||
/// Human-readable name of the region where it should not appear
|
||||
pub region: &'static str,
|
||||
}
|
||||
impl ProjectError for BadTokenInRegion {
|
||||
@@ -120,8 +135,11 @@ impl ProjectError for BadTokenInRegion {
|
||||
}
|
||||
}
|
||||
|
||||
/// A specific lexeme was searched but not found
|
||||
pub struct NotFound {
|
||||
/// Human-readable description of what was searched
|
||||
pub expected: &'static str,
|
||||
/// Area covered by the search
|
||||
pub location: Location,
|
||||
}
|
||||
impl ProjectError for NotFound {
|
||||
@@ -130,12 +148,14 @@ impl ProjectError for NotFound {
|
||||
fn message(&self) -> String { format!("{} was expected", self.expected) }
|
||||
}
|
||||
|
||||
/// :: found on its own somewhere other than a general export
|
||||
pub struct LeadingNS(pub Location);
|
||||
impl ProjectError for LeadingNS {
|
||||
fn description(&self) -> &str { ":: can only follow a name token" }
|
||||
fn one_position(&self) -> Location { self.0.clone() }
|
||||
}
|
||||
|
||||
/// Parens don't pair up
|
||||
pub struct MisalignedParen(pub Entry);
|
||||
impl ProjectError for MisalignedParen {
|
||||
fn description(&self) -> &str { "(), [] and {} must always pair up" }
|
||||
@@ -143,30 +163,35 @@ impl ProjectError for MisalignedParen {
|
||||
fn message(&self) -> String { format!("This {} has no pair", self.0) }
|
||||
}
|
||||
|
||||
/// Export line contains a complex name
|
||||
pub struct NamespacedExport(pub Location);
|
||||
impl ProjectError for NamespacedExport {
|
||||
fn description(&self) -> &str { "Only local names may be exported" }
|
||||
fn one_position(&self) -> Location { self.0.clone() }
|
||||
}
|
||||
|
||||
/// Export line contains *
|
||||
pub struct GlobExport(pub Location);
|
||||
impl ProjectError for GlobExport {
|
||||
fn description(&self) -> &str { "Globstars are not allowed in exports" }
|
||||
fn one_position(&self) -> Location { self.0.clone() }
|
||||
}
|
||||
|
||||
/// String literal never ends
|
||||
pub struct NoStringEnd(pub Location);
|
||||
impl ProjectError for NoStringEnd {
|
||||
fn description(&self) -> &str { "A string literal was not closed with `\"`" }
|
||||
fn one_position(&self) -> Location { self.0.clone() }
|
||||
}
|
||||
|
||||
/// Comment never ends
|
||||
pub struct NoCommentEnd(pub Location);
|
||||
impl ProjectError for NoCommentEnd {
|
||||
fn description(&self) -> &str { "a comment was not closed with `]--`" }
|
||||
fn one_position(&self) -> Location { self.0.clone() }
|
||||
}
|
||||
|
||||
/// A placeholder's priority is a floating point number
|
||||
pub struct FloatPlacehPrio(pub Location);
|
||||
impl ProjectError for FloatPlacehPrio {
|
||||
fn description(&self) -> &str {
|
||||
@@ -175,12 +200,14 @@ impl ProjectError for FloatPlacehPrio {
|
||||
fn one_position(&self) -> Location { self.0.clone() }
|
||||
}
|
||||
|
||||
/// A number literal decodes to NaN
|
||||
pub struct NaNLiteral(pub Location);
|
||||
impl ProjectError for NaNLiteral {
|
||||
fn description(&self) -> &str { "float literal decoded to NaN" }
|
||||
fn one_position(&self) -> Location { self.0.clone() }
|
||||
}
|
||||
|
||||
/// A sequence of digits in a number literal overflows [usize].
|
||||
pub struct LiteralOverflow(pub Location);
|
||||
impl ProjectError for LiteralOverflow {
|
||||
fn description(&self) -> &str {
|
||||
@@ -189,18 +216,21 @@ impl ProjectError for LiteralOverflow {
|
||||
fn one_position(&self) -> Location { self.0.clone() }
|
||||
}
|
||||
|
||||
/// A digit was expected but something else was found
|
||||
pub struct ExpectedDigit(pub Location);
|
||||
impl ProjectError for ExpectedDigit {
|
||||
fn description(&self) -> &str { "expected a digit" }
|
||||
fn one_position(&self) -> Location { self.0.clone() }
|
||||
}
|
||||
|
||||
/// A unicode escape sequence contains something other than a hex digit
|
||||
pub struct NotHex(pub Location);
|
||||
impl ProjectError for NotHex {
|
||||
fn description(&self) -> &str { "Expected a hex digit" }
|
||||
fn one_position(&self) -> Location { self.0.clone() }
|
||||
}
|
||||
|
||||
/// A unicode escape sequence contains a number that isn't a unicode code point.
|
||||
pub struct BadCodePoint(pub Location);
|
||||
impl ProjectError for BadCodePoint {
|
||||
fn description(&self) -> &str {
|
||||
@@ -209,8 +239,36 @@ impl ProjectError for BadCodePoint {
|
||||
fn one_position(&self) -> Location { self.0.clone() }
|
||||
}
|
||||
|
||||
/// An unrecognized escape sequence occurred in a string.
|
||||
pub struct BadEscapeSequence(pub Location);
|
||||
impl ProjectError for BadEscapeSequence {
|
||||
fn description(&self) -> &str { "Unrecognized escape sequence" }
|
||||
fn one_position(&self) -> Location { self.0.clone() }
|
||||
}
|
||||
|
||||
/// Expected a parenthesized block at the end of the line
|
||||
pub struct ExpectedBlock(pub Location);
|
||||
impl ExpectedBlock {
|
||||
/// Remove two parentheses from the ends of the cursor
|
||||
pub fn expect(tail: Stream, typ: PType) -> ProjectResult<Stream> {
|
||||
let (lp, tail) = tail.trim().pop()?;
|
||||
Expected::expect(Lexeme::LP(typ), lp)?;
|
||||
let (rp, tail) = tail.pop_back()?;
|
||||
Expected::expect(Lexeme::RP(typ), rp)?;
|
||||
Ok(tail.trim())
|
||||
}
|
||||
}
|
||||
impl ProjectError for ExpectedBlock {
|
||||
fn description(&self) -> &str { "Expected a parenthesized block" }
|
||||
fn one_position(&self) -> Location { self.0.clone() }
|
||||
}
|
||||
|
||||
/// A namespaced name was expected but a glob pattern or a branching multiname
|
||||
/// was found.
|
||||
pub struct ExpectedSingleName(pub Location);
|
||||
impl ProjectError for ExpectedSingleName {
|
||||
fn one_position(&self) -> Location { self.0.clone() }
|
||||
fn description(&self) -> &str {
|
||||
"expected a single name, no wildcards, no branches"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,22 +1,46 @@
|
||||
use super::context::Context;
|
||||
use super::context::{Context, FlatLocContext};
|
||||
use super::lexer::lex;
|
||||
use super::sourcefile::parse_module_body;
|
||||
use super::sourcefile::{parse_exprv, parse_module_body, vec_to_single};
|
||||
use super::stream::Stream;
|
||||
use crate::error::{ParseErrorWithTokens, ProjectError, ProjectResult};
|
||||
use super::{parse_line, split_lines};
|
||||
use crate::ast::Expr;
|
||||
use crate::error::ProjectResult;
|
||||
use crate::representations::sourcefile::FileEntry;
|
||||
use crate::{Location, VName};
|
||||
|
||||
pub fn parse2(ctx: impl Context) -> ProjectResult<Vec<FileEntry>> {
|
||||
let tokens = lex(vec![], ctx.source().as_str(), &ctx).expect("debug");
|
||||
/// Parse a file
|
||||
pub fn parse_file(ctx: impl Context) -> ProjectResult<Vec<FileEntry>> {
|
||||
let tokens = lex(vec![], ctx.source().as_str(), &ctx)?;
|
||||
if tokens.is_empty() {
|
||||
Ok(Vec::new())
|
||||
} else {
|
||||
parse_module_body(Stream::from_slice(&tokens), &ctx).map_err(|error| {
|
||||
ParseErrorWithTokens {
|
||||
error,
|
||||
full_source: ctx.source().to_string(),
|
||||
tokens,
|
||||
}
|
||||
.rc()
|
||||
})
|
||||
parse_module_body(Stream::from_slice(&tokens), &ctx)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a ready-made expression
|
||||
pub fn parse_expr(
|
||||
ctx: &impl Context,
|
||||
text: &'static str,
|
||||
location: Location,
|
||||
) -> ProjectResult<Expr<VName>> {
|
||||
let ctx = FlatLocContext::new(ctx, &location);
|
||||
let tokens = lex(vec![], text, &ctx)?;
|
||||
let items = parse_exprv(Stream::from_slice(&tokens), None, &ctx)?.0;
|
||||
vec_to_single(tokens.first().expect("source must not be empty"), items)
|
||||
}
|
||||
|
||||
/// Parse a ready-made line
|
||||
pub fn parse_entries(
|
||||
ctx: &(impl Context + ?Sized),
|
||||
text: &'static str,
|
||||
location: Location,
|
||||
) -> ProjectResult<Vec<FileEntry>> {
|
||||
let ctx = FlatLocContext::new(ctx, &location);
|
||||
let tokens = lex(vec![], text, &ctx)?;
|
||||
let entries = split_lines(Stream::from_slice(&tokens))
|
||||
.flat_map(|tokens| parse_line(tokens, &ctx).expect("pre-specified source"))
|
||||
.map(|kind| kind.wrap(location.clone()))
|
||||
.collect();
|
||||
Ok(entries)
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use super::context::Context;
|
||||
use super::errors::{FloatPlacehPrio, NoCommentEnd};
|
||||
use super::numeric::{numstart, parse_num, print_nat16};
|
||||
use super::LexerPlugin;
|
||||
use crate::ast::{PHClass, Placeholder};
|
||||
use crate::ast::{PHClass, PType, Placeholder};
|
||||
use crate::error::{ProjectError, ProjectResult};
|
||||
use crate::foreign::Atom;
|
||||
use crate::interner::Tok;
|
||||
@@ -20,9 +20,12 @@ use crate::utils::pure_seq::next;
|
||||
use crate::utils::unwrap_or;
|
||||
use crate::{Location, VName};
|
||||
|
||||
/// A lexeme and the location where it was found
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Entry {
|
||||
/// the lexeme
|
||||
pub lexeme: Lexeme,
|
||||
/// the location. Always a range
|
||||
pub location: Location,
|
||||
}
|
||||
impl Entry {
|
||||
@@ -32,27 +35,17 @@ impl Entry {
|
||||
matches!(self.lexeme, Lexeme::Comment(_) | Lexeme::BR)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn is_keyword(&self) -> bool {
|
||||
false
|
||||
// matches!(
|
||||
// self.lexeme,
|
||||
// Lexeme::Const
|
||||
// | Lexeme::Export
|
||||
// | Lexeme::Import
|
||||
// | Lexeme::Macro
|
||||
// | Lexeme::Module
|
||||
// )
|
||||
}
|
||||
|
||||
/// Get location
|
||||
#[must_use]
|
||||
pub fn location(&self) -> Location { self.location.clone() }
|
||||
|
||||
/// Get range from location
|
||||
#[must_use]
|
||||
pub fn range(&self) -> Range<usize> {
|
||||
self.location.range().expect("An Entry can only have a known location")
|
||||
}
|
||||
|
||||
/// Get file path from location
|
||||
#[must_use]
|
||||
pub fn file(&self) -> Arc<VName> {
|
||||
self.location.file().expect("An Entry can only have a range location")
|
||||
@@ -73,32 +66,34 @@ impl AsRef<Location> for Entry {
|
||||
fn as_ref(&self) -> &Location { &self.location }
|
||||
}
|
||||
|
||||
/// A unit of syntax
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Lexeme {
|
||||
/// Atoms parsed by plugins
|
||||
Atom(Atom),
|
||||
/// Keyword or name
|
||||
Name(Tok<String>),
|
||||
/// Macro operator `=`number`=>`
|
||||
Arrow(NotNan<f64>),
|
||||
/// Walrus operator (formerly shorthand macro)
|
||||
/// `:=`
|
||||
Walrus,
|
||||
/// Line break
|
||||
BR,
|
||||
/// Namespace separator
|
||||
/// `::`
|
||||
NS,
|
||||
/// Left paren
|
||||
LP(char),
|
||||
/// Right paren
|
||||
RP(char),
|
||||
/// Backslash
|
||||
/// Left paren `([{`
|
||||
LP(PType),
|
||||
/// Right paren `)]}`
|
||||
RP(PType),
|
||||
/// `\`
|
||||
BS,
|
||||
/// `@``
|
||||
At,
|
||||
// Dot,
|
||||
Type, // type operator
|
||||
/// `:`
|
||||
Type,
|
||||
/// comment
|
||||
Comment(Arc<String>),
|
||||
// Export,
|
||||
// Import,
|
||||
// Module,
|
||||
// Macro,
|
||||
// Const,
|
||||
/// placeholder in a macro.
|
||||
Placeh(Placeholder),
|
||||
}
|
||||
|
||||
@@ -110,53 +105,26 @@ impl Display for Lexeme {
|
||||
Self::Walrus => write!(f, ":="),
|
||||
Self::Arrow(prio) => write!(f, "={}=>", print_nat16(*prio)),
|
||||
Self::NS => write!(f, "::"),
|
||||
Self::LP(l) => write!(f, "{}", l),
|
||||
Self::RP(l) => match l {
|
||||
'(' => write!(f, ")"),
|
||||
'[' => write!(f, "]"),
|
||||
'{' => write!(f, "}}"),
|
||||
_ => f.debug_tuple("RP").field(l).finish(),
|
||||
},
|
||||
Self::LP(t) => write!(f, "{}", t.l()),
|
||||
Self::RP(t) => write!(f, "{}", t.r()),
|
||||
Self::BR => writeln!(f),
|
||||
Self::BS => write!(f, "\\"),
|
||||
Self::At => write!(f, "@"),
|
||||
Self::Type => write!(f, ":"),
|
||||
Self::Comment(text) => write!(f, "--[{}]--", text),
|
||||
// Self::Export => write!(f, "export"),
|
||||
// Self::Import => write!(f, "import"),
|
||||
// Self::Module => write!(f, "module"),
|
||||
// Self::Const => write!(f, "const"),
|
||||
// Self::Macro => write!(f, "macro"),
|
||||
Self::Placeh(Placeholder { name, class }) => match *class {
|
||||
PHClass::Scalar => write!(f, "${}", **name),
|
||||
PHClass::Vec { nonzero, prio } => {
|
||||
if nonzero { write!(f, "...") } else { write!(f, "..") }?;
|
||||
write!(f, "${}", **name)?;
|
||||
if prio != 0 {
|
||||
write!(f, ":{}", prio)?;
|
||||
};
|
||||
Ok(())
|
||||
},
|
||||
},
|
||||
Self::Placeh(ph) => write!(f, "{ph}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Lexeme {
|
||||
#[must_use]
|
||||
pub fn rule(prio: impl Into<f64>) -> Self {
|
||||
Lexeme::Arrow(
|
||||
NotNan::new(prio.into()).expect("Rule priority cannot be NaN"),
|
||||
)
|
||||
}
|
||||
|
||||
/// Compare lexemes for equality. It's `strict` because for atoms it uses the
|
||||
/// strict equality comparison
|
||||
pub fn strict_eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(Self::Arrow(f1), Self::Arrow(f2)) => f1 == f2,
|
||||
(Self::At, Self::At) | (Self::BR, Self::BR) => true,
|
||||
(Self::BS, Self::BS) /*| (Self::Const, Self::Const)*/ => true,
|
||||
// (Self::Export, Self::Export) | (Self::Import, Self::Import) => true,
|
||||
// (Self::Macro, Self::Macro) | (Self::Module, Self::Module) => true,
|
||||
(Self::BS, Self::BS) => true,
|
||||
(Self::NS, Self::NS) | (Self::Type, Self::Type) => true,
|
||||
(Self::Walrus, Self::Walrus) => true,
|
||||
(Self::Atom(a1), Self::Atom(a2)) => a1.0.strict_eq(&a2.0),
|
||||
@@ -164,20 +132,25 @@ impl Lexeme {
|
||||
(Self::LP(p1), Self::LP(p2)) | (Self::RP(p1), Self::RP(p2)) => p1 == p2,
|
||||
(Self::Name(n1), Self::Name(n2)) => n1 == n2,
|
||||
(Self::Placeh(ph1), Self::Placeh(ph2)) => ph1 == ph2,
|
||||
(_, _) => false,
|
||||
(..) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Neatly format source code
|
||||
#[allow(unused)]
|
||||
pub fn format(lexed: &[Entry]) -> String { lexed.iter().join(" ") }
|
||||
|
||||
/// Character filter that can appear in a keyword or name
|
||||
pub fn namechar(c: char) -> bool { c.is_alphanumeric() | (c == '_') }
|
||||
/// Character filter that can start a name
|
||||
pub fn namestart(c: char) -> bool { c.is_alphabetic() | (c == '_') }
|
||||
/// Character filter that can appear in operators.
|
||||
pub fn opchar(c: char) -> bool {
|
||||
!namestart(c) && !numstart(c) && !c.is_whitespace() && !"()[]{},".contains(c)
|
||||
}
|
||||
|
||||
/// Split off all characters from the beginning that match a filter
|
||||
pub fn split_filter(
|
||||
s: &str,
|
||||
mut pred: impl FnMut(char) -> bool,
|
||||
@@ -189,12 +162,12 @@ fn lit_table() -> impl IntoIterator<Item = (&'static str, Lexeme)> {
|
||||
[
|
||||
("\\", Lexeme::BS),
|
||||
("@", Lexeme::At),
|
||||
("(", Lexeme::LP('(')),
|
||||
("[", Lexeme::LP('[')),
|
||||
("{", Lexeme::LP('{')),
|
||||
(")", Lexeme::RP('(')),
|
||||
("]", Lexeme::RP('[')),
|
||||
("}", Lexeme::RP('{')),
|
||||
("(", Lexeme::LP(PType::Par)),
|
||||
("[", Lexeme::LP(PType::Sqr)),
|
||||
("{", Lexeme::LP(PType::Curl)),
|
||||
(")", Lexeme::RP(PType::Par)),
|
||||
("]", Lexeme::RP(PType::Sqr)),
|
||||
("}", Lexeme::RP(PType::Curl)),
|
||||
("\n", Lexeme::BR),
|
||||
(":=", Lexeme::Walrus),
|
||||
("::", Lexeme::NS),
|
||||
@@ -282,20 +255,22 @@ pub fn lex(
|
||||
}
|
||||
// todo: parse placeholders, don't forget vectorials!
|
||||
if let Some(tail) = data.strip_prefix('$') {
|
||||
let (nameonly, tail) =
|
||||
tail.strip_prefix('_').map_or((false, tail), |t| (true, t));
|
||||
let (name, tail) = split_filter(tail, namechar);
|
||||
if !name.is_empty() {
|
||||
let name = ctx.interner().i(name);
|
||||
let location = ctx.location(name.len() + 1, tail);
|
||||
let lexeme =
|
||||
Lexeme::Placeh(Placeholder { name, class: PHClass::Scalar });
|
||||
let class = if nameonly { PHClass::Name } else { PHClass::Scalar };
|
||||
let lexeme = Lexeme::Placeh(Placeholder { name, class });
|
||||
tokens.push(Entry::new(location, lexeme));
|
||||
data = tail;
|
||||
continue 'tail;
|
||||
}
|
||||
}
|
||||
if let Some(vec) = data.strip_prefix("..") {
|
||||
if let Some(tail) = data.strip_prefix("..") {
|
||||
let (nonzero, tail) =
|
||||
vec.strip_prefix('.').map_or((false, vec), |t| (true, t));
|
||||
tail.strip_prefix('.').map_or((false, tail), |t| (true, t));
|
||||
if let Some(tail) = tail.strip_prefix('$') {
|
||||
let (name, tail) = split_filter(tail, namechar);
|
||||
if !name.is_empty() {
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
//! Types for interacting with the Orchid parser, and parts of the parser
|
||||
//! plugins can use to match the language's behaviour on certain tasks
|
||||
mod context;
|
||||
mod errors;
|
||||
pub mod errors;
|
||||
mod facade;
|
||||
mod lexer;
|
||||
mod multiname;
|
||||
@@ -8,11 +10,20 @@ mod sourcefile;
|
||||
mod stream;
|
||||
mod string;
|
||||
|
||||
pub use context::{Context, LexerPlugin, LineParser, ParsingContext};
|
||||
pub use facade::parse2;
|
||||
pub use context::{
|
||||
Context, LexerPlugin, LexerPluginOut, LineParser, LineParserOut,
|
||||
ParsingContext,
|
||||
};
|
||||
pub use facade::{parse_entries, parse_expr, parse_file};
|
||||
pub use lexer::{namechar, namestart, opchar, split_filter, Entry, Lexeme};
|
||||
pub use multiname::parse_multiname;
|
||||
pub use numeric::{
|
||||
lex_numeric, numchar, numstart, parse_num, print_nat16, NumError,
|
||||
NumErrorKind,
|
||||
};
|
||||
pub use sourcefile::{
|
||||
expr_slice_location, parse_const, parse_exprv, parse_line, parse_module,
|
||||
parse_module_body, parse_rule, split_lines, vec_to_single, parse_nsname
|
||||
};
|
||||
pub use stream::Stream;
|
||||
pub use string::{lex_string, parse_string, StringError, StringErrorKind};
|
||||
|
||||
@@ -4,6 +4,7 @@ use super::context::Context;
|
||||
use super::errors::Expected;
|
||||
use super::stream::Stream;
|
||||
use super::Lexeme;
|
||||
use crate::ast::PType;
|
||||
use crate::error::{ProjectError, ProjectResult};
|
||||
use crate::sourcefile::Import;
|
||||
use crate::utils::boxed_iter::{box_chain, box_once};
|
||||
@@ -43,7 +44,7 @@ impl Subresult {
|
||||
|
||||
fn parse_multiname_branch<'a>(
|
||||
cursor: Stream<'a>,
|
||||
ctx: &impl Context,
|
||||
ctx: &(impl Context + ?Sized),
|
||||
) -> ProjectResult<(BoxedIter<'a, Subresult>, Stream<'a>)> {
|
||||
let comma = ctx.interner().i(",");
|
||||
let (subnames, cursor) = parse_multiname_rec(cursor, ctx)?;
|
||||
@@ -53,10 +54,10 @@ fn parse_multiname_branch<'a>(
|
||||
let (tail, cont) = parse_multiname_branch(cursor, ctx)?;
|
||||
Ok((box_chain!(subnames, tail), cont))
|
||||
},
|
||||
Lexeme::RP('(') => Ok((subnames, cursor)),
|
||||
Lexeme::RP(PType::Par) => Ok((subnames, cursor)),
|
||||
_ => Err(
|
||||
Expected {
|
||||
expected: vec![Lexeme::Name(comma), Lexeme::RP('(')],
|
||||
expected: vec![Lexeme::Name(comma), Lexeme::RP(PType::Par)],
|
||||
or_name: false,
|
||||
found: delim.clone(),
|
||||
}
|
||||
@@ -67,24 +68,24 @@ fn parse_multiname_branch<'a>(
|
||||
|
||||
fn parse_multiname_rec<'a>(
|
||||
curosr: Stream<'a>,
|
||||
ctx: &impl Context,
|
||||
ctx: &(impl Context + ?Sized),
|
||||
) -> ProjectResult<(BoxedIter<'a, Subresult>, Stream<'a>)> {
|
||||
let star = ctx.interner().i("*");
|
||||
let comma = ctx.interner().i(",");
|
||||
let (head, mut cursor) = curosr.trim().pop()?;
|
||||
match &head.lexeme {
|
||||
Lexeme::LP('(') => parse_multiname_branch(cursor, ctx),
|
||||
Lexeme::LP('[') => {
|
||||
Lexeme::LP(PType::Par) => parse_multiname_branch(cursor, ctx),
|
||||
Lexeme::LP(PType::Sqr) => {
|
||||
let mut names = Vec::new();
|
||||
loop {
|
||||
let head;
|
||||
(head, cursor) = cursor.trim().pop()?;
|
||||
match &head.lexeme {
|
||||
Lexeme::Name(n) => names.push((n, head.location())),
|
||||
Lexeme::RP('[') => break,
|
||||
Lexeme::RP(PType::Sqr) => break,
|
||||
_ => {
|
||||
let err = Expected {
|
||||
expected: vec![Lexeme::RP('[')],
|
||||
expected: vec![Lexeme::RP(PType::Sqr)],
|
||||
or_name: true,
|
||||
found: head.clone(),
|
||||
};
|
||||
@@ -114,7 +115,7 @@ fn parse_multiname_rec<'a>(
|
||||
},
|
||||
_ => Err(
|
||||
Expected {
|
||||
expected: vec![Lexeme::LP('(')],
|
||||
expected: vec![Lexeme::LP(PType::Par)],
|
||||
or_name: true,
|
||||
found: head.clone(),
|
||||
}
|
||||
@@ -123,9 +124,25 @@ fn parse_multiname_rec<'a>(
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a tree that describes several names. The tree can be
|
||||
///
|
||||
/// - name (except `,` or `*`)
|
||||
/// - name (except `,` or `*`) `::` tree
|
||||
/// - `(` tree `,` tree ... `)`
|
||||
/// - `*` (wildcard)
|
||||
/// - `[` name name ... `]` (including `,` or `*`).
|
||||
///
|
||||
/// Examples of valid syntax:
|
||||
///
|
||||
/// ```txt
|
||||
/// foo
|
||||
/// foo::bar::baz
|
||||
/// foo::bar::(baz, quz::quux, fimble::*)
|
||||
/// foo::bar::[baz quz * +]
|
||||
/// ```
|
||||
pub fn parse_multiname<'a>(
|
||||
cursor: Stream<'a>,
|
||||
ctx: &impl Context,
|
||||
ctx: &(impl Context + ?Sized),
|
||||
) -> ProjectResult<(Vec<Import>, Stream<'a>)> {
|
||||
let (output, cont) = parse_multiname_rec(cursor, ctx)?;
|
||||
Ok((output.map(|sr| sr.finalize()).collect(), cont))
|
||||
|
||||
@@ -5,16 +5,22 @@ use std::rc::Rc;
|
||||
use ordered_float::NotNan;
|
||||
|
||||
use super::context::Context;
|
||||
use super::errors::NaNLiteral;
|
||||
#[allow(unused)] // for doc
|
||||
use super::context::LexerPlugin;
|
||||
use super::errors::{ExpectedDigit, LiteralOverflow, NaNLiteral};
|
||||
use super::lexer::split_filter;
|
||||
use crate::error::{ProjectError, ProjectResult};
|
||||
use crate::foreign::Atom;
|
||||
use crate::systems::stl::Numeric;
|
||||
|
||||
/// Rasons why [parse_num] might fail. See [NumError].
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum NumErrorKind {
|
||||
/// The literal describes [f64::NAN]
|
||||
NaN,
|
||||
/// Some integer appearing in the literal overflows [usize]
|
||||
Overflow,
|
||||
/// A character that isn't a digit in the given base was found
|
||||
InvalidDigit,
|
||||
}
|
||||
impl NumErrorKind {
|
||||
@@ -27,13 +33,17 @@ impl NumErrorKind {
|
||||
}
|
||||
}
|
||||
|
||||
/// Error produced by [parse_num]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct NumError {
|
||||
/// Location
|
||||
pub range: Range<usize>,
|
||||
/// Reason
|
||||
pub kind: NumErrorKind,
|
||||
}
|
||||
|
||||
impl NumError {
|
||||
/// Convert into [ProjectError] trait object
|
||||
pub fn into_proj(
|
||||
self,
|
||||
len: usize,
|
||||
@@ -44,12 +54,13 @@ impl NumError {
|
||||
let location = ctx.range_loc(start..start + self.range.len());
|
||||
match self.kind {
|
||||
NumErrorKind::NaN => NaNLiteral(location).rc(),
|
||||
_ => panic!(),
|
||||
// NumErrorKind::Int(iek) => IntError(location, iek).rc(),
|
||||
NumErrorKind::InvalidDigit => ExpectedDigit(location).rc(),
|
||||
NumErrorKind::Overflow => LiteralOverflow(location).rc(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a numbre literal out of text
|
||||
pub fn parse_num(string: &str) -> Result<Numeric, NumError> {
|
||||
let overflow_err =
|
||||
NumError { range: 0..string.len(), kind: NumErrorKind::Overflow };
|
||||
@@ -96,9 +107,12 @@ fn int_parse(s: &str, radix: u8, start: usize) -> Result<usize, NumError> {
|
||||
.map_err(|e| NumError { range, kind: NumErrorKind::from_int(e.kind()) })
|
||||
}
|
||||
|
||||
/// Filter for characters that can appear in numbers
|
||||
pub fn numchar(c: char) -> bool { c.is_alphanumeric() | "._-".contains(c) }
|
||||
/// Filter for characters that can start numbers
|
||||
pub fn numstart(c: char) -> bool { c.is_ascii_digit() }
|
||||
|
||||
/// [LexerPlugin] for a number literal
|
||||
pub fn lex_numeric<'a>(
|
||||
data: &'a str,
|
||||
ctx: &dyn Context,
|
||||
@@ -140,6 +154,7 @@ mod test {
|
||||
}
|
||||
}
|
||||
|
||||
/// Print a number as a base-16 floating point literal
|
||||
#[must_use]
|
||||
pub fn print_nat16(num: NotNan<f64>) -> String {
|
||||
if *num == 0.0 {
|
||||
|
||||
@@ -5,20 +5,23 @@ use itertools::Itertools;
|
||||
|
||||
use super::context::Context;
|
||||
use super::errors::{
|
||||
BadTokenInRegion, Expected, ExpectedName, GlobExport, LeadingNS,
|
||||
MisalignedParen, NamespacedExport, ReservedToken, UnexpectedEOL,
|
||||
BadTokenInRegion, Expected, ExpectedBlock, ExpectedName, ExpectedSingleName,
|
||||
GlobExport, LeadingNS, MisalignedParen, NamespacedExport, ReservedToken,
|
||||
UnexpectedEOL,
|
||||
};
|
||||
use super::lexer::Lexeme;
|
||||
use super::multiname::parse_multiname;
|
||||
use super::stream::Stream;
|
||||
use super::Entry;
|
||||
use crate::ast::{Clause, Constant, Expr, Rule};
|
||||
use crate::ast::{Clause, Constant, Expr, PType, Rule};
|
||||
use crate::error::{ProjectError, ProjectResult};
|
||||
use crate::representations::location::Location;
|
||||
use crate::representations::sourcefile::{FileEntry, MemberKind, ModuleBlock};
|
||||
use crate::representations::VName;
|
||||
use crate::sourcefile::{FileEntryKind, Import, Member};
|
||||
use crate::utils::pure_seq::pushed;
|
||||
|
||||
/// Split the stream at each line break outside parentheses
|
||||
pub fn split_lines(module: Stream<'_>) -> impl Iterator<Item = Stream<'_>> {
|
||||
let mut source = module.data.iter().enumerate();
|
||||
let mut fallback = module.fallback;
|
||||
@@ -47,15 +50,27 @@ pub fn split_lines(module: Stream<'_>) -> impl Iterator<Item = Stream<'_>> {
|
||||
}
|
||||
None
|
||||
})
|
||||
.map(Stream::trim)
|
||||
.map(|s| {
|
||||
s.pop()
|
||||
.and_then(|(first, inner)| {
|
||||
let (last, inner) = inner.pop_back()?;
|
||||
match (&first.lexeme, &last.lexeme) {
|
||||
(Lexeme::LP(PType::Par), Lexeme::RP(PType::Par)) => Ok(inner.trim()),
|
||||
_ => Ok(s),
|
||||
}
|
||||
})
|
||||
.unwrap_or(s)
|
||||
})
|
||||
.filter(|l| !l.data.is_empty())
|
||||
}
|
||||
|
||||
/// Parse linebreak-separated entries
|
||||
pub fn parse_module_body(
|
||||
cursor: Stream<'_>,
|
||||
ctx: &impl Context,
|
||||
ctx: &(impl Context + ?Sized),
|
||||
) -> ProjectResult<Vec<FileEntry>> {
|
||||
split_lines(cursor)
|
||||
.map(Stream::trim)
|
||||
.filter(|l| !l.data.is_empty())
|
||||
.map(|l| {
|
||||
parse_line(l, ctx).map(move |kinds| {
|
||||
kinds
|
||||
@@ -67,12 +82,13 @@ pub fn parse_module_body(
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Parse a single, possibly exported entry
|
||||
pub fn parse_line(
|
||||
cursor: Stream<'_>,
|
||||
ctx: &impl Context,
|
||||
ctx: &(impl Context + ?Sized),
|
||||
) -> ProjectResult<Vec<FileEntryKind>> {
|
||||
for line_parser in ctx.line_parsers() {
|
||||
if let Some(result) = line_parser(cursor, ctx) {
|
||||
if let Some(result) = line_parser(cursor, &ctx) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -100,9 +116,9 @@ pub fn parse_line(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_export_line(
|
||||
fn parse_export_line(
|
||||
cursor: Stream<'_>,
|
||||
ctx: &impl Context,
|
||||
ctx: &(impl Context + ?Sized),
|
||||
) -> ProjectResult<FileEntryKind> {
|
||||
let cursor = cursor.trim();
|
||||
match &cursor.get(0)?.lexeme {
|
||||
@@ -135,7 +151,7 @@ pub fn parse_export_line(
|
||||
|
||||
fn parse_member(
|
||||
cursor: Stream<'_>,
|
||||
ctx: &impl Context,
|
||||
ctx: &(impl Context + ?Sized),
|
||||
) -> ProjectResult<MemberKind> {
|
||||
let (typemark, cursor) = cursor.trim().pop()?;
|
||||
match &typemark.lexeme {
|
||||
@@ -144,7 +160,7 @@ fn parse_member(
|
||||
Ok(MemberKind::Constant(constant))
|
||||
},
|
||||
Lexeme::Name(n) if **n == "macro" => {
|
||||
let rule = parse_rule(cursor, ctx)?;
|
||||
let rule = parse_rule(cursor, &ctx)?;
|
||||
Ok(MemberKind::Rule(rule))
|
||||
},
|
||||
Lexeme::Name(n) if **n == "module" => {
|
||||
@@ -159,7 +175,8 @@ fn parse_member(
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_rule(
|
||||
/// Parse a macro rule
|
||||
pub fn parse_rule(
|
||||
cursor: Stream<'_>,
|
||||
ctx: &impl Context,
|
||||
) -> ProjectResult<Rule<VName>> {
|
||||
@@ -172,9 +189,10 @@ fn parse_rule(
|
||||
Ok(Rule { pattern, prio, template })
|
||||
}
|
||||
|
||||
fn parse_const(
|
||||
/// Parse a constant declaration
|
||||
pub fn parse_const(
|
||||
cursor: Stream<'_>,
|
||||
ctx: &impl Context,
|
||||
ctx: &(impl Context + ?Sized),
|
||||
) -> ProjectResult<Constant> {
|
||||
let (name_ent, cursor) = cursor.trim().pop()?;
|
||||
let name = ExpectedName::expect(name_ent)?;
|
||||
@@ -184,24 +202,38 @@ fn parse_const(
|
||||
Ok(Constant { name, value: vec_to_single(walrus_ent, body)? })
|
||||
}
|
||||
|
||||
fn parse_module(
|
||||
/// Parse a namespaced name. TODO: use this for modules
|
||||
pub fn parse_nsname<'a>(
|
||||
cursor: Stream<'a>,
|
||||
ctx: &(impl Context + ?Sized),
|
||||
) -> ProjectResult<(VName, Stream<'a>)> {
|
||||
let (name, tail) = parse_multiname(cursor, ctx)?;
|
||||
let name = match name.into_iter().exactly_one() {
|
||||
Ok(Import { name: Some(name), path, .. }) => pushed(path, name),
|
||||
_ => {
|
||||
let loc = cursor.data[0].location().to(tail.data[0].location());
|
||||
return Err(ExpectedSingleName(loc).rc());
|
||||
},
|
||||
};
|
||||
Ok((name, tail))
|
||||
}
|
||||
|
||||
/// Parse a submodule declaration
|
||||
pub fn parse_module(
|
||||
cursor: Stream<'_>,
|
||||
ctx: &impl Context,
|
||||
ctx: &(impl Context + ?Sized),
|
||||
) -> ProjectResult<ModuleBlock> {
|
||||
let (name_ent, cursor) = cursor.trim().pop()?;
|
||||
let name = ExpectedName::expect(name_ent)?;
|
||||
let (lp_ent, cursor) = cursor.trim().pop()?;
|
||||
Expected::expect(Lexeme::LP('('), lp_ent)?;
|
||||
let (last, cursor) = cursor.pop_back()?;
|
||||
Expected::expect(Lexeme::RP('('), last)?;
|
||||
let body = parse_module_body(cursor, ctx)?;
|
||||
Ok(ModuleBlock { name, body })
|
||||
let body = ExpectedBlock::expect(cursor, PType::Par)?;
|
||||
Ok(ModuleBlock { name, body: parse_module_body(body, ctx)? })
|
||||
}
|
||||
|
||||
fn parse_exprv<'a>(
|
||||
/// Parse a sequence of expressions
|
||||
pub fn parse_exprv<'a>(
|
||||
mut cursor: Stream<'a>,
|
||||
paren: Option<char>,
|
||||
ctx: &impl Context,
|
||||
paren: Option<PType>,
|
||||
ctx: &(impl Context + ?Sized),
|
||||
) -> ProjectResult<(Vec<Expr<VName>>, Stream<'a>)> {
|
||||
let mut output = Vec::new();
|
||||
cursor = cursor.trim();
|
||||
@@ -272,7 +304,8 @@ fn parse_exprv<'a>(
|
||||
Ok((output, Stream::new(cursor.fallback, &[])))
|
||||
}
|
||||
|
||||
fn vec_to_single(
|
||||
/// Wrap an expression list in parentheses if necessary
|
||||
pub fn vec_to_single(
|
||||
fallback: &Entry,
|
||||
v: Vec<Expr<VName>>,
|
||||
) -> ProjectResult<Expr<VName>> {
|
||||
@@ -281,11 +314,12 @@ fn vec_to_single(
|
||||
1 => Ok(v.into_iter().exactly_one().unwrap()),
|
||||
_ => Ok(Expr {
|
||||
location: expr_slice_location(&v),
|
||||
value: Clause::S('(', Rc::new(v)),
|
||||
value: Clause::S(PType::Par, Rc::new(v)),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the location of a sequence of consecutive expressions
|
||||
#[must_use]
|
||||
pub fn expr_slice_location(v: &[impl AsRef<Location>]) -> Location {
|
||||
v.first()
|
||||
|
||||
@@ -8,14 +8,18 @@ use crate::Location;
|
||||
#[must_use = "streams represent segments of code that must be parsed"]
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct Stream<'a> {
|
||||
/// Entry to place in errors if the stream contains no tokens
|
||||
pub fallback: &'a Entry,
|
||||
/// Tokens to parse
|
||||
pub data: &'a [Entry],
|
||||
}
|
||||
impl<'a> Stream<'a> {
|
||||
/// Create a new stream
|
||||
pub fn new(fallback: &'a Entry, data: &'a [Entry]) -> Self {
|
||||
Self { fallback, data }
|
||||
}
|
||||
|
||||
/// Remove comments and line breaks from both ends of the text
|
||||
pub fn trim(self) -> Self {
|
||||
let Self { data, fallback } = self;
|
||||
let front = data.iter().take_while(|e| e.is_filler()).count();
|
||||
@@ -25,12 +29,14 @@ impl<'a> Stream<'a> {
|
||||
Self { fallback, data }
|
||||
}
|
||||
|
||||
/// Discard the first entry
|
||||
pub fn step(self) -> ProjectResult<Self> {
|
||||
let (fallback, data) = (self.data.split_first())
|
||||
.ok_or_else(|| UnexpectedEOL { entry: self.fallback.clone() }.rc())?;
|
||||
Ok(Stream { data, fallback })
|
||||
}
|
||||
|
||||
/// Get the first entry
|
||||
pub fn pop(self) -> ProjectResult<(&'a Entry, Stream<'a>)> {
|
||||
Ok((self.get(0)?, self.step()?))
|
||||
}
|
||||
@@ -43,6 +49,7 @@ impl<'a> Stream<'a> {
|
||||
})
|
||||
}
|
||||
|
||||
/// Area covered by this stream
|
||||
#[must_use]
|
||||
pub fn location(self) -> Location {
|
||||
self.data.first().map_or_else(
|
||||
@@ -51,6 +58,8 @@ impl<'a> Stream<'a> {
|
||||
)
|
||||
}
|
||||
|
||||
/// Find a given token, split the stream there and read some value from the
|
||||
/// separator. See also [Stream::find]
|
||||
pub fn find_map<T>(
|
||||
self,
|
||||
expected: &'static str,
|
||||
@@ -65,6 +74,8 @@ impl<'a> Stream<'a> {
|
||||
Ok((Self::new(fallback, left), output, Self::new(middle_ent, right)))
|
||||
}
|
||||
|
||||
/// Split the stream at a token and return just the two sides.
|
||||
/// See also [Stream::find_map].
|
||||
pub fn find(
|
||||
self,
|
||||
expected: &'static str,
|
||||
@@ -75,6 +86,7 @@ impl<'a> Stream<'a> {
|
||||
Ok((left, right))
|
||||
}
|
||||
|
||||
/// Remove the last item from the stream
|
||||
pub fn pop_back(self) -> ProjectResult<(&'a Entry, Self)> {
|
||||
let Self { data, fallback } = self;
|
||||
let (last, data) = (data.split_last())
|
||||
@@ -91,6 +103,7 @@ impl<'a> Stream<'a> {
|
||||
Self { data, fallback }
|
||||
}
|
||||
|
||||
/// Assert that the stream is empty.
|
||||
pub fn expect_empty(self) -> ProjectResult<()> {
|
||||
if let Some(x) = self.data.first() {
|
||||
Err(ExpectedEOL { location: x.location() }.rc())
|
||||
|
||||
@@ -1,22 +1,32 @@
|
||||
use itertools::Itertools;
|
||||
|
||||
use super::context::Context;
|
||||
#[allow(unused)] // for doc
|
||||
use super::context::LexerPlugin;
|
||||
use super::errors::{BadCodePoint, BadEscapeSequence, NoStringEnd, NotHex};
|
||||
use crate::error::{ProjectError, ProjectResult};
|
||||
use crate::foreign::Atom;
|
||||
use crate::OrcString;
|
||||
|
||||
/// Reasons why [parse_string] might fail. See [StringError]
|
||||
pub enum StringErrorKind {
|
||||
/// A unicode escape sequence wasn't followed by 4 hex digits
|
||||
NotHex,
|
||||
/// A unicode escape sequence contained an unassigned code point
|
||||
BadCodePoint,
|
||||
/// An unrecognized escape sequence was found
|
||||
BadEscSeq,
|
||||
}
|
||||
|
||||
/// Error produced by [parse_string]
|
||||
pub struct StringError {
|
||||
/// Character where the error occured
|
||||
pos: usize,
|
||||
/// Reason for the error
|
||||
kind: StringErrorKind,
|
||||
}
|
||||
|
||||
/// Process escape sequences in a string literal
|
||||
pub fn parse_string(str: &str) -> Result<String, StringError> {
|
||||
let mut target = String::new();
|
||||
let mut iter = str.char_indices();
|
||||
@@ -65,6 +75,7 @@ pub fn parse_string(str: &str) -> Result<String, StringError> {
|
||||
Ok(target)
|
||||
}
|
||||
|
||||
/// [LexerPlugin] for a string literal.
|
||||
pub fn lex_string<'a>(
|
||||
data: &'a str,
|
||||
ctx: &dyn Context,
|
||||
|
||||
@@ -19,7 +19,7 @@ use crate::ProjectTree;
|
||||
/// import pointing to a module in the environment.
|
||||
pub fn parse_layer<'a>(
|
||||
targets: impl Iterator<Item = &'a [Tok<String>]>,
|
||||
loader: &impl Fn(&[Tok<String>]) -> IOResult,
|
||||
loader: &impl Fn(&[Tok<String>], &[Tok<String>]) -> IOResult,
|
||||
environment: &'a ProjectTree<VName>,
|
||||
prelude: &[FileEntry],
|
||||
lexer_plugins: &[&dyn LexerPlugin],
|
||||
|
||||
@@ -29,10 +29,11 @@ pub struct Context<'a> {
|
||||
/// Load the source at the given path or all within if it's a collection,
|
||||
/// and all sources imported from these.
|
||||
fn load_abs_path_rec(
|
||||
referrer: &[Tok<String>],
|
||||
abs_path: &[Tok<String>],
|
||||
mut all: Preparsed,
|
||||
source: &mut LoadedSourceTable,
|
||||
get_source: &impl Fn(&[Tok<String>]) -> IOResult,
|
||||
get_source: &impl Fn(&[Tok<String>], &[Tok<String>]) -> IOResult,
|
||||
is_injected_module: &impl Fn(&[Tok<String>]) -> bool,
|
||||
ctx @ Context { i, lexer_plugins, line_parsers, prelude }: Context,
|
||||
) -> ProjectResult<Preparsed> {
|
||||
@@ -46,7 +47,7 @@ fn load_abs_path_rec(
|
||||
|
||||
// try splitting the path to file, swallowing any IO errors
|
||||
let name_split = split_max_prefix(abs_path, &|p| {
|
||||
get_source(p).map(|l| l.is_code()).unwrap_or(false)
|
||||
get_source(p, referrer).map(|l| l.is_code()).unwrap_or(false)
|
||||
});
|
||||
if let Some((filename, _)) = name_split {
|
||||
// Termination: exit if entry already visited
|
||||
@@ -54,10 +55,10 @@ fn load_abs_path_rec(
|
||||
return Ok(all);
|
||||
}
|
||||
// if the filename is valid, load, preparse and record this file
|
||||
let text = unwrap_or!(get_source(filename)? => Loaded::Code; {
|
||||
let text = unwrap_or!(get_source(filename, referrer)? => Loaded::Code; {
|
||||
return Err(UnexpectedDirectory { path: filename.to_vec() }.rc())
|
||||
});
|
||||
let entries = parse::parse2(ParsingContext::new(
|
||||
let entries = parse::parse_file(ParsingContext::new(
|
||||
i,
|
||||
Arc::new(filename.to_vec()),
|
||||
text,
|
||||
@@ -73,6 +74,7 @@ fn load_abs_path_rec(
|
||||
mut all|
|
||||
-> ProjectResult<_> {
|
||||
let details = unwrap_or!(module.extra.details(); return Ok(all));
|
||||
let referrer = modpath.iter().rev_vec_clone();
|
||||
for import in &details.imports {
|
||||
let origin = &Location::Unknown;
|
||||
let abs_pathv = import_abs_path(
|
||||
@@ -87,6 +89,7 @@ fn load_abs_path_rec(
|
||||
}
|
||||
// recurse on imported module
|
||||
all = load_abs_path_rec(
|
||||
&referrer,
|
||||
&abs_pathv,
|
||||
all,
|
||||
source,
|
||||
@@ -101,7 +104,7 @@ fn load_abs_path_rec(
|
||||
all.0.overlay(preparsed.0).map(Preparsed)
|
||||
} else {
|
||||
// If the path is not within a file, load it as directory
|
||||
let coll = match get_source(abs_path) {
|
||||
let coll = match get_source(abs_path, referrer) {
|
||||
Ok(Loaded::Collection(coll)) => coll,
|
||||
Ok(Loaded::Code(_)) => {
|
||||
unreachable!("split_name returned None but the path is a file")
|
||||
@@ -118,6 +121,7 @@ fn load_abs_path_rec(
|
||||
for item in coll.iter() {
|
||||
let abs_subpath = pushed_ref(abs_path, i.i(item));
|
||||
all = load_abs_path_rec(
|
||||
referrer,
|
||||
&abs_subpath,
|
||||
all,
|
||||
source,
|
||||
@@ -139,7 +143,7 @@ fn load_abs_path_rec(
|
||||
pub fn load_source<'a>(
|
||||
targets: impl Iterator<Item = &'a [Tok<String>]>,
|
||||
ctx: Context,
|
||||
get_source: &impl Fn(&[Tok<String>]) -> IOResult,
|
||||
get_source: &impl Fn(&[Tok<String>], &[Tok<String>]) -> IOResult,
|
||||
is_injected_module: &impl Fn(&[Tok<String>]) -> bool,
|
||||
) -> ProjectResult<(Preparsed, LoadedSourceTable)> {
|
||||
let mut table = LoadedSourceTable::new();
|
||||
@@ -149,6 +153,7 @@ pub fn load_source<'a>(
|
||||
for target in targets {
|
||||
any_target |= true;
|
||||
all = load_abs_path_rec(
|
||||
&[],
|
||||
target,
|
||||
all,
|
||||
&mut table,
|
||||
|
||||
@@ -104,6 +104,8 @@ pub enum PHClass {
|
||||
},
|
||||
/// Matches exactly one token, lambda or parenthesized group
|
||||
Scalar,
|
||||
/// Matches exactly one name
|
||||
Name,
|
||||
}
|
||||
|
||||
/// Properties of a placeholder that matches unknown tokens in macros
|
||||
@@ -120,6 +122,7 @@ impl Display for Placeholder {
|
||||
let name = &self.name;
|
||||
match self.class {
|
||||
PHClass::Scalar => write!(f, "${name}"),
|
||||
PHClass::Name => write!(f, "$_{name}"),
|
||||
PHClass::Vec { nonzero, prio } => {
|
||||
if nonzero { write!(f, "...") } else { write!(f, "..") }?;
|
||||
write!(f, "${name}:{prio}")
|
||||
@@ -128,6 +131,36 @@ impl Display for Placeholder {
|
||||
}
|
||||
}
|
||||
|
||||
/// Different types of brackets supported by Orchid
|
||||
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
|
||||
pub enum PType {
|
||||
/// ()
|
||||
Par,
|
||||
/// []
|
||||
Sqr,
|
||||
/// {}
|
||||
Curl,
|
||||
}
|
||||
impl PType {
|
||||
/// Left paren character for this paren type
|
||||
pub fn l(self) -> char {
|
||||
match self {
|
||||
PType::Curl => '{',
|
||||
PType::Par => '(',
|
||||
PType::Sqr => '[',
|
||||
}
|
||||
}
|
||||
|
||||
/// Right paren character for this paren type
|
||||
pub fn r(self) -> char {
|
||||
match self {
|
||||
PType::Curl => '}',
|
||||
PType::Par => ')',
|
||||
PType::Sqr => ']',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An S-expression as read from a source file
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Clause<N: NameLike> {
|
||||
@@ -139,7 +172,7 @@ pub enum Clause<N: NameLike> {
|
||||
Name(N),
|
||||
/// A parenthesized expression
|
||||
/// eg. `(print out "hello")`, `[1, 2, 3]`, `{Some(t) => t}`
|
||||
S(char, Rc<Vec<Expr<N>>>),
|
||||
S(PType, Rc<Vec<Expr<N>>>),
|
||||
/// A function expression, eg. `\x. x + 1`
|
||||
Lambda(Rc<Vec<Expr<N>>>, Rc<Vec<Expr<N>>>),
|
||||
/// A placeholder for macros, eg. `$name`, `...$body`, `...$lhs:1`
|
||||
@@ -159,7 +192,7 @@ impl<N: NameLike> Clause<N> {
|
||||
/// Convert with identical meaning
|
||||
#[must_use]
|
||||
pub fn into_expr(self) -> Expr<N> {
|
||||
if let Self::S('(', body) = &self {
|
||||
if let Self::S(PType::Par, body) = &self {
|
||||
if body.len() == 1 {
|
||||
body[0].clone()
|
||||
} else {
|
||||
@@ -178,7 +211,7 @@ impl<N: NameLike> Clause<N> {
|
||||
} else if exprs.len() == 1 {
|
||||
Some(exprs[0].value.clone())
|
||||
} else {
|
||||
Some(Self::S('(', Rc::new(exprs.to_vec())))
|
||||
Some(Self::S(PType::Par, Rc::new(exprs.to_vec())))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -188,7 +221,7 @@ impl<N: NameLike> Clause<N> {
|
||||
if exprv.len() < 2 {
|
||||
Self::from_exprs(exprv)
|
||||
} else {
|
||||
Some(Self::S('(', exprv.clone()))
|
||||
Some(Self::S(PType::Par, exprv.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -304,6 +337,19 @@ impl<N: NameLike> Clause<N> {
|
||||
Clause::S(_, body) => search_all_slcs(body, f),
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a parenthesized expression sequence
|
||||
pub fn s(delimiter: char, items: impl IntoIterator<Item = Self>) -> Self {
|
||||
Self::S(
|
||||
match delimiter {
|
||||
'(' => PType::Par,
|
||||
'[' => PType::Sqr,
|
||||
'{' => PType::Curl,
|
||||
_ => panic!("not an opening paren"),
|
||||
},
|
||||
Rc::new(items.into_iter().map(Self::into_expr).collect()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clause<VName> {
|
||||
@@ -333,15 +379,9 @@ impl<N: NameLike> Display for Clause<N> {
|
||||
Self::ExternFn(fun) => write!(f, "{fun:?}"),
|
||||
Self::Atom(a) => write!(f, "{a:?}"),
|
||||
Self::Name(name) => write!(f, "{}", name.to_strv().join("::")),
|
||||
Self::S(del, items) => {
|
||||
Self::S(t, items) => {
|
||||
let body = items.iter().join(" ");
|
||||
let led = match del {
|
||||
'(' => ")",
|
||||
'[' => "]",
|
||||
'{' => "}",
|
||||
_ => "CLOSING_DELIM",
|
||||
};
|
||||
write!(f, "{del}{body}{led}")
|
||||
write!(f, "{}{body}{}", t.l(), t.r())
|
||||
},
|
||||
Self::Lambda(arg, body) => {
|
||||
let args = arg.iter().join(" ");
|
||||
|
||||
@@ -8,7 +8,8 @@ pub type AstError = ast_to_postmacro::Error;
|
||||
#[allow(unused)]
|
||||
pub fn ast_to_interpreted(
|
||||
ast: &ast::Expr<Sym>,
|
||||
symbol: Sym,
|
||||
) -> Result<interpreted::ExprInst, AstError> {
|
||||
let pmtree = ast_to_postmacro::expr(ast)?;
|
||||
let pmtree = ast_to_postmacro::expr(ast, symbol)?;
|
||||
Ok(postmacro_to_interpreted::expr(&pmtree))
|
||||
}
|
||||
|
||||
@@ -2,8 +2,10 @@ use std::rc::Rc;
|
||||
|
||||
use super::location::Location;
|
||||
use super::{ast, postmacro};
|
||||
use crate::ast::PType;
|
||||
use crate::error::ProjectError;
|
||||
use crate::utils::substack::Substack;
|
||||
use crate::utils::unwrap_or;
|
||||
use crate::Sym;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -12,7 +14,7 @@ pub enum ErrorKind {
|
||||
EmptyS,
|
||||
/// Only `(...)` may be converted to typed lambdas. `[...]` and `{...}`
|
||||
/// left in the code are signs of incomplete macro execution
|
||||
BadGroup(char),
|
||||
BadGroup(PType),
|
||||
/// Placeholders shouldn't even occur in the code during macro
|
||||
/// execution. Something is clearly terribly wrong
|
||||
Placeholder,
|
||||
@@ -24,11 +26,12 @@ pub enum ErrorKind {
|
||||
pub struct Error {
|
||||
pub location: Location,
|
||||
pub kind: ErrorKind,
|
||||
pub symbol: Sym,
|
||||
}
|
||||
impl Error {
|
||||
#[must_use]
|
||||
pub fn new(kind: ErrorKind, location: &Location) -> Self {
|
||||
Self { location: location.clone(), kind }
|
||||
pub fn new(kind: ErrorKind, location: &Location, symbol: Sym) -> Self {
|
||||
Self { location: location.clone(), kind, symbol }
|
||||
}
|
||||
}
|
||||
impl ProjectError for Error {
|
||||
@@ -46,22 +49,31 @@ impl ProjectError for Error {
|
||||
}
|
||||
|
||||
fn message(&self) -> String {
|
||||
match self.kind {
|
||||
ErrorKind::BadGroup(char) => format!("{} block found in the code", char),
|
||||
_ => self.description().to_string(),
|
||||
if let ErrorKind::BadGroup(t) = self.kind {
|
||||
let sym = self.symbol.extern_vec().join("::");
|
||||
return format!("{}{} block found in {sym}", t.l(), t.r());
|
||||
}
|
||||
format!(
|
||||
"in {}, {}",
|
||||
self.symbol.extern_vec().join("::"),
|
||||
self.description()
|
||||
)
|
||||
}
|
||||
fn one_position(&self) -> Location { self.location.clone() }
|
||||
}
|
||||
|
||||
/// Try to convert an expression from AST format to typed lambda
|
||||
pub fn expr(expr: &ast::Expr<Sym>) -> Result<postmacro::Expr, Error> {
|
||||
expr_rec(expr, Context::new())
|
||||
pub fn expr(
|
||||
expr: &ast::Expr<Sym>,
|
||||
symbol: Sym,
|
||||
) -> Result<postmacro::Expr, Error> {
|
||||
expr_rec(expr, Context::new(symbol))
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct Context<'a> {
|
||||
names: Substack<'a, Sym>,
|
||||
symbol: Sym,
|
||||
}
|
||||
|
||||
impl<'a> Context<'a> {
|
||||
@@ -70,11 +82,12 @@ impl<'a> Context<'a> {
|
||||
where
|
||||
'a: 'b,
|
||||
{
|
||||
Context { names: self.names.push(name) }
|
||||
Context { names: self.names.push(name), symbol: self.symbol.clone() }
|
||||
}
|
||||
|
||||
}
|
||||
impl Context<'static> {
|
||||
#[must_use]
|
||||
fn new() -> Context<'static> { Context { names: Substack::Bottom } }
|
||||
fn new(symbol: Sym) -> Self { Self { names: Substack::Bottom, symbol } }
|
||||
}
|
||||
|
||||
/// Process an expression sequence
|
||||
@@ -83,8 +96,9 @@ fn exprv_rec<'a>(
|
||||
v: &'a [ast::Expr<Sym>],
|
||||
ctx: Context<'a>,
|
||||
) -> Result<postmacro::Expr, Error> {
|
||||
let (last, rest) =
|
||||
(v.split_last()).ok_or_else(|| Error::new(ErrorKind::EmptyS, location))?;
|
||||
let (last, rest) = unwrap_or! {v.split_last(); {
|
||||
return Err(Error::new(ErrorKind::EmptyS, location, ctx.symbol));
|
||||
}};
|
||||
if rest.is_empty() {
|
||||
return expr_rec(&v[0], ctx);
|
||||
}
|
||||
@@ -99,13 +113,16 @@ fn expr_rec<'a>(
|
||||
ast::Expr { value, location }: &'a ast::Expr<Sym>,
|
||||
ctx: Context<'a>,
|
||||
) -> Result<postmacro::Expr, Error> {
|
||||
if let ast::Clause::S(paren, body) = value {
|
||||
if *paren != '(' {
|
||||
return Err(Error::new(ErrorKind::BadGroup(*paren), location));
|
||||
match value {
|
||||
ast::Clause::S(PType::Par, body) =>
|
||||
return Ok(postmacro::Expr {
|
||||
value: exprv_rec(location, body.as_ref(), ctx)?.value,
|
||||
location: location.clone(),
|
||||
}),
|
||||
ast::Clause::S(paren, _) =>
|
||||
return Err(Error::new(ErrorKind::BadGroup(*paren), location, ctx.symbol)),
|
||||
_ => (),
|
||||
}
|
||||
let expr = exprv_rec(location, body.as_ref(), ctx)?;
|
||||
Ok(postmacro::Expr { value: expr.value, location: location.clone() })
|
||||
} else {
|
||||
let value = match value {
|
||||
ast::Clause::Atom(a) => postmacro::Clause::Atom(a.clone()),
|
||||
ast::Clause::ExternFn(fun) => postmacro::Clause::ExternFn(fun.clone()),
|
||||
@@ -113,8 +130,9 @@ fn expr_rec<'a>(
|
||||
let name = match &arg[..] {
|
||||
[ast::Expr { value: ast::Clause::Name(name), .. }] => name,
|
||||
[ast::Expr { value: ast::Clause::Placeh { .. }, .. }] =>
|
||||
return Err(Error::new(ErrorKind::Placeholder, location)),
|
||||
_ => return Err(Error::new(ErrorKind::InvalidArg, location)),
|
||||
return Err(Error::new(ErrorKind::Placeholder, location, ctx.symbol)),
|
||||
_ =>
|
||||
return Err(Error::new(ErrorKind::InvalidArg, location, ctx.symbol)),
|
||||
};
|
||||
let body_ctx = ctx.w_name(name.clone());
|
||||
let body = exprv_rec(location, b.as_ref(), body_ctx)?;
|
||||
@@ -130,16 +148,12 @@ fn expr_rec<'a>(
|
||||
None => postmacro::Clause::Constant(name.clone()),
|
||||
}
|
||||
},
|
||||
ast::Clause::S(paren, entries) => {
|
||||
if *paren != '(' {
|
||||
return Err(Error::new(ErrorKind::BadGroup(*paren), location));
|
||||
}
|
||||
let expr = exprv_rec(location, entries.as_ref(), ctx)?;
|
||||
expr.value
|
||||
},
|
||||
ast::Clause::S(PType::Par, entries) =>
|
||||
exprv_rec(location, entries.as_ref(), ctx)?.value,
|
||||
ast::Clause::S(paren, _) =>
|
||||
return Err(Error::new(ErrorKind::BadGroup(*paren), location, ctx.symbol)),
|
||||
ast::Clause::Placeh { .. } =>
|
||||
return Err(Error::new(ErrorKind::Placeholder, location)),
|
||||
return Err(Error::new(ErrorKind::Placeholder, location, ctx.symbol)),
|
||||
};
|
||||
Ok(postmacro::Expr { value, location: location.clone() })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
//! functions have to define
|
||||
use std::fmt::{Debug, Display};
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::rc::Rc;
|
||||
use std::sync::{Arc, Mutex, TryLockError};
|
||||
|
||||
#[allow(unused)] // for doc
|
||||
@@ -13,7 +12,7 @@ use super::location::Location;
|
||||
use super::path_set::PathSet;
|
||||
#[allow(unused)] // for doc
|
||||
use crate::foreign::Atomic;
|
||||
use crate::foreign::{Atom, ExFn, ExternError};
|
||||
use crate::foreign::{Atom, ExFn, XfnResult};
|
||||
use crate::utils::ddispatch::request;
|
||||
use crate::utils::take_with_output;
|
||||
use crate::Sym;
|
||||
@@ -53,11 +52,11 @@ pub struct NotALiteral;
|
||||
/// Types automatically convertible from an [ExprInst]
|
||||
pub trait TryFromExprInst: Sized {
|
||||
/// Match and clone the value out of an [ExprInst]
|
||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>>;
|
||||
fn from_exi(exi: ExprInst) -> XfnResult<Self>;
|
||||
}
|
||||
|
||||
impl TryFromExprInst for ExprInst {
|
||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> { Ok(exi) }
|
||||
fn from_exi(exi: ExprInst) -> XfnResult<Self> { Ok(exi) }
|
||||
}
|
||||
|
||||
/// A wrapper around expressions to handle their multiple occurences in
|
||||
@@ -162,7 +161,7 @@ impl ExprInst {
|
||||
/// Convert into any type that implements [FromExprInst]. Calls to this
|
||||
/// function are generated wherever a conversion is elided in an extern
|
||||
/// function.
|
||||
pub fn downcast<T: TryFromExprInst>(self) -> Result<T, Rc<dyn ExternError>> {
|
||||
pub fn downcast<T: TryFromExprInst>(self) -> XfnResult<T> {
|
||||
T::from_exi(self)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
//! Building blocks of a source file
|
||||
use std::fmt::Display;
|
||||
use std::iter;
|
||||
|
||||
use itertools::{Either, Itertools};
|
||||
|
||||
@@ -9,7 +8,7 @@ use crate::ast::{Constant, Rule};
|
||||
use crate::error::{ProjectError, ProjectResult, TooManySupers};
|
||||
use crate::interner::{Interner, Tok};
|
||||
use crate::utils::pure_seq::pushed;
|
||||
use crate::utils::{unwrap_or, BoxedIter};
|
||||
use crate::utils::BoxedIter;
|
||||
use crate::Location;
|
||||
|
||||
/// An import pointing at another module, either specifying the symbol to be
|
||||
@@ -79,6 +78,12 @@ pub enum MemberKind {
|
||||
/// A prefixed set of other entries
|
||||
Module(ModuleBlock),
|
||||
}
|
||||
impl MemberKind {
|
||||
/// Convert to [FileEntry]
|
||||
pub fn to_entry(self, exported: bool, location: Location) -> FileEntry {
|
||||
FileEntryKind::Member(Member { exported, kind: self }).wrap(location)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for MemberKind {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
@@ -122,6 +127,12 @@ pub enum FileEntryKind {
|
||||
/// tokens that the local module doesn't actually define a role for
|
||||
Export(Vec<(Tok<String>, Location)>),
|
||||
}
|
||||
impl FileEntryKind {
|
||||
/// Wrap with no location
|
||||
pub fn wrap(self, location: Location) -> FileEntry {
|
||||
FileEntry { kind: self, locations: vec![location] }
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for FileEntryKind {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
@@ -239,25 +250,22 @@ pub fn absolute_path(
|
||||
|
||||
#[must_use = "this could be None which means that there are too many supers"]
|
||||
fn absolute_path_rec(
|
||||
abs_location: &[Tok<String>],
|
||||
rel_path: &[Tok<String>],
|
||||
mut abs_location: &[Tok<String>],
|
||||
mut rel_path: &[Tok<String>],
|
||||
i: &Interner,
|
||||
) -> Option<VName> {
|
||||
let (head, tail) = unwrap_or!(rel_path.split_first();
|
||||
return Some(vec![])
|
||||
);
|
||||
if *head == i.i("super") {
|
||||
let (_, new_abs) = abs_location.split_last()?;
|
||||
if tail.is_empty() {
|
||||
Some(new_abs.to_vec())
|
||||
} else {
|
||||
let new_rel =
|
||||
iter::once(i.i("self")).chain(tail.iter().cloned()).collect::<Vec<_>>();
|
||||
absolute_path_rec(new_abs, &new_rel, i)
|
||||
let mut relative = false;
|
||||
while rel_path.first() == Some(&i.i("super")) {
|
||||
abs_location = abs_location.split_last()?.1;
|
||||
rel_path = rel_path.split_first().expect("checked above").1;
|
||||
relative = true;
|
||||
}
|
||||
} else if *head == i.i("self") {
|
||||
Some(abs_location.iter().chain(tail.iter()).cloned().collect())
|
||||
} else {
|
||||
Some(rel_path.to_vec())
|
||||
if rel_path.first() == Some(&i.i("self")) {
|
||||
relative = true;
|
||||
rel_path = rel_path.split_first().expect("checked above").1;
|
||||
}
|
||||
match relative {
|
||||
true => Some(abs_location.iter().chain(rel_path).cloned().collect()),
|
||||
false => Some(rel_path.to_vec()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,5 +14,9 @@ pub trait Matcher {
|
||||
fn new(pattern: Rc<Vec<RuleExpr>>) -> Self;
|
||||
/// Apply matcher to a token sequence
|
||||
#[must_use]
|
||||
fn apply<'a>(&self, source: &'a [RuleExpr]) -> Option<State<'a>>;
|
||||
fn apply<'a>(
|
||||
&self,
|
||||
source: &'a [RuleExpr],
|
||||
save_loc: &impl Fn(Sym) -> bool,
|
||||
) -> Option<State<'a>>;
|
||||
}
|
||||
|
||||
@@ -3,24 +3,27 @@ use super::shared::AnyMatcher;
|
||||
use super::vec_match::vec_match;
|
||||
use crate::rule::matcher::RuleExpr;
|
||||
use crate::rule::state::State;
|
||||
use crate::Sym;
|
||||
|
||||
#[must_use]
|
||||
pub fn any_match<'a>(
|
||||
matcher: &AnyMatcher,
|
||||
seq: &'a [RuleExpr],
|
||||
save_loc: &impl Fn(Sym) -> bool,
|
||||
) -> Option<State<'a>> {
|
||||
match matcher {
|
||||
AnyMatcher::Scalar(scalv) => scalv_match(scalv, seq),
|
||||
AnyMatcher::Scalar(scalv) => scalv_match(scalv, seq, save_loc),
|
||||
AnyMatcher::Vec { left, mid, right } => {
|
||||
if seq.len() < left.len() + right.len() {
|
||||
return None;
|
||||
};
|
||||
let left_split = left.len();
|
||||
let right_split = seq.len() - right.len();
|
||||
let mut state = scalv_match(left, &seq[..left_split])?;
|
||||
state.extend(scalv_match(right, &seq[right_split..])?);
|
||||
state.extend(vec_match(mid, &seq[left_split..right_split])?);
|
||||
Some(state)
|
||||
Some(
|
||||
scalv_match(left, &seq[..left_split], save_loc)?
|
||||
.combine(scalv_match(right, &seq[right_split..], save_loc)?)
|
||||
.combine(vec_match(mid, &seq[left_split..right_split], save_loc)?),
|
||||
)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -110,12 +110,14 @@ fn mk_scalar(pattern: &RuleExpr) -> ScalMatcher {
|
||||
Clause::Atom(a) => ScalMatcher::Atom(a.clone()),
|
||||
Clause::ExternFn(_) => panic!("Cannot match on ExternFn"),
|
||||
Clause::Name(n) => ScalMatcher::Name(n.clone()),
|
||||
Clause::Placeh(Placeholder { name, class }) => {
|
||||
debug_assert!(
|
||||
!matches!(class, PHClass::Vec { .. }),
|
||||
"Scalar matcher cannot be built from vector pattern"
|
||||
);
|
||||
ScalMatcher::Placeh(name.clone())
|
||||
Clause::Placeh(Placeholder { name, class }) => match class {
|
||||
PHClass::Vec { .. } => {
|
||||
panic!("Scalar matcher cannot be built from vector pattern")
|
||||
},
|
||||
PHClass::Scalar | PHClass::Name => ScalMatcher::Placeh {
|
||||
key: name.clone(),
|
||||
name_only: class == &PHClass::Name,
|
||||
},
|
||||
},
|
||||
Clause::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body))),
|
||||
Clause::Lambda(arg, body) =>
|
||||
@@ -128,7 +130,7 @@ mod test {
|
||||
use std::rc::Rc;
|
||||
|
||||
use super::mk_any;
|
||||
use crate::ast::{Clause, PHClass, Placeholder};
|
||||
use crate::ast::{Clause, PHClass, PType, Placeholder};
|
||||
use crate::interner::Interner;
|
||||
|
||||
#[test]
|
||||
@@ -142,7 +144,7 @@ mod test {
|
||||
.into_expr(),
|
||||
Clause::Name(i.i(&[i.i("prelude"), i.i("do")][..])).into_expr(),
|
||||
Clause::S(
|
||||
'(',
|
||||
PType::Par,
|
||||
Rc::new(vec![
|
||||
Clause::Placeh(Placeholder {
|
||||
class: PHClass::Vec { nonzero: false, prio: 0 },
|
||||
|
||||
@@ -3,25 +3,32 @@ use super::shared::ScalMatcher;
|
||||
use crate::ast::Clause;
|
||||
use crate::rule::matcher::RuleExpr;
|
||||
use crate::rule::state::{State, StateEntry};
|
||||
use crate::Sym;
|
||||
|
||||
#[must_use]
|
||||
pub fn scal_match<'a>(
|
||||
matcher: &ScalMatcher,
|
||||
expr: &'a RuleExpr,
|
||||
save_loc: &impl Fn(Sym) -> bool,
|
||||
) -> Option<State<'a>> {
|
||||
match (matcher, &expr.value) {
|
||||
(ScalMatcher::Atom(a1), Clause::Atom(a2)) if a1.0.strict_eq(&a2.0) =>
|
||||
Some(State::new()),
|
||||
(ScalMatcher::Name(n1), Clause::Name(n2)) if n1 == n2 => Some(State::new()),
|
||||
(ScalMatcher::Placeh(key), _) =>
|
||||
Some(State::from([(key.clone(), StateEntry::Scalar(expr))])),
|
||||
Some(State::default()),
|
||||
(ScalMatcher::Name(n1), Clause::Name(n2)) if n1 == n2 =>
|
||||
Some(match save_loc(n1.clone()) {
|
||||
true => State::from_name(n1.clone(), expr.location.clone()),
|
||||
false => State::default(),
|
||||
}),
|
||||
(ScalMatcher::Placeh { key, name_only: true }, Clause::Name(n)) =>
|
||||
Some(State::from_ph(key.clone(), StateEntry::Name(n, &expr.location))),
|
||||
(ScalMatcher::Placeh { key, name_only: false }, _) =>
|
||||
Some(State::from_ph(key.clone(), StateEntry::Scalar(expr))),
|
||||
(ScalMatcher::S(c1, b_mat), Clause::S(c2, body)) if c1 == c2 =>
|
||||
any_match(b_mat, &body[..]),
|
||||
(ScalMatcher::Lambda(arg_mat, b_mat), Clause::Lambda(arg, body)) => {
|
||||
let mut state = any_match(arg_mat, arg)?;
|
||||
state.extend(any_match(b_mat, body)?);
|
||||
Some(state)
|
||||
},
|
||||
any_match(b_mat, &body[..], save_loc),
|
||||
(ScalMatcher::Lambda(arg_mat, b_mat), Clause::Lambda(arg, body)) => Some(
|
||||
any_match(arg_mat, arg, save_loc)?
|
||||
.combine(any_match(b_mat, body, save_loc)?),
|
||||
),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@@ -30,13 +37,14 @@ pub fn scal_match<'a>(
|
||||
pub fn scalv_match<'a>(
|
||||
matchers: &[ScalMatcher],
|
||||
seq: &'a [RuleExpr],
|
||||
save_loc: &impl Fn(Sym) -> bool,
|
||||
) -> Option<State<'a>> {
|
||||
if seq.len() != matchers.len() {
|
||||
return None;
|
||||
}
|
||||
let mut state = State::new();
|
||||
let mut state = State::default();
|
||||
for (matcher, expr) in matchers.iter().zip(seq.iter()) {
|
||||
state.extend(scal_match(matcher, expr)?);
|
||||
state = state.combine(scal_match(matcher, expr, save_loc)?);
|
||||
}
|
||||
Some(state)
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ use itertools::Itertools;
|
||||
|
||||
use super::any_match::any_match;
|
||||
use super::build::mk_any;
|
||||
use crate::ast::PType;
|
||||
use crate::foreign::Atom;
|
||||
use crate::interner::Tok;
|
||||
use crate::rule::matcher::{Matcher, RuleExpr};
|
||||
@@ -15,9 +16,9 @@ use crate::{Sym, VName};
|
||||
pub enum ScalMatcher {
|
||||
Atom(Atom),
|
||||
Name(Sym),
|
||||
S(char, Box<AnyMatcher>),
|
||||
S(PType, Box<AnyMatcher>),
|
||||
Lambda(Box<AnyMatcher>, Box<AnyMatcher>),
|
||||
Placeh(Tok<String>),
|
||||
Placeh { key: Tok<String>, name_only: bool },
|
||||
}
|
||||
|
||||
pub enum VecMatcher {
|
||||
@@ -58,8 +59,12 @@ pub enum AnyMatcher {
|
||||
impl Matcher for AnyMatcher {
|
||||
fn new(pattern: Rc<Vec<RuleExpr>>) -> Self { mk_any(&pattern) }
|
||||
|
||||
fn apply<'a>(&self, source: &'a [RuleExpr]) -> Option<State<'a>> {
|
||||
any_match(self, source)
|
||||
fn apply<'a>(
|
||||
&self,
|
||||
source: &'a [RuleExpr],
|
||||
save_loc: &impl Fn(Sym) -> bool,
|
||||
) -> Option<State<'a>> {
|
||||
any_match(self, source, save_loc)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,20 +74,13 @@ impl Display for ScalMatcher {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Atom(a) => write!(f, "{a:?}"),
|
||||
Self::Placeh(n) => write!(f, "${n}"),
|
||||
Self::Placeh { key, name_only } => match name_only {
|
||||
false => write!(f, "${key}"),
|
||||
true => write!(f, "$_{key}"),
|
||||
},
|
||||
Self::Name(n) => write!(f, "{}", n.extern_vec().join("::")),
|
||||
Self::S(c, body) => {
|
||||
let pair = match c {
|
||||
'(' => ')',
|
||||
'[' => ']',
|
||||
'{' => '}',
|
||||
_ => unreachable!(),
|
||||
};
|
||||
write!(f, "{c}{body}{pair}")
|
||||
},
|
||||
Self::Lambda(arg, body) => {
|
||||
write!(f, "\\{arg}.{body}")
|
||||
},
|
||||
Self::S(t, body) => write!(f, "{}{body}{}", t.l(), t.r()),
|
||||
Self::Lambda(arg, body) => write!(f, "\\{arg}.{body}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -136,8 +134,12 @@ pub struct VectreeMatcher(AnyMatcher);
|
||||
impl Matcher for VectreeMatcher {
|
||||
fn new(pattern: Rc<Vec<RuleExpr>>) -> Self { Self(AnyMatcher::new(pattern)) }
|
||||
|
||||
fn apply<'a>(&self, source: &'a [RuleExpr]) -> Option<State<'a>> {
|
||||
self.0.apply(source)
|
||||
fn apply<'a>(
|
||||
&self,
|
||||
source: &'a [RuleExpr],
|
||||
save_loc: &impl Fn(Sym) -> bool,
|
||||
) -> Option<State<'a>> {
|
||||
self.0.apply(source, save_loc)
|
||||
}
|
||||
}
|
||||
impl Display for VectreeMatcher {
|
||||
|
||||
@@ -6,19 +6,20 @@ use super::scal_match::scalv_match;
|
||||
use super::shared::VecMatcher;
|
||||
use crate::rule::matcher::RuleExpr;
|
||||
use crate::rule::state::{State, StateEntry};
|
||||
use crate::utils::unwrap_or;
|
||||
use crate::Sym;
|
||||
|
||||
#[must_use]
|
||||
pub fn vec_match<'a>(
|
||||
matcher: &VecMatcher,
|
||||
seq: &'a [RuleExpr],
|
||||
save_loc: &impl Fn(Sym) -> bool,
|
||||
) -> Option<State<'a>> {
|
||||
match matcher {
|
||||
VecMatcher::Placeh { key, nonzero } => {
|
||||
if *nonzero && seq.is_empty() {
|
||||
return None;
|
||||
}
|
||||
return Some(State::from([(key.clone(), StateEntry::Vec(seq))]));
|
||||
return Some(State::from_ph(key.clone(), StateEntry::Vec(seq)));
|
||||
},
|
||||
VecMatcher::Scan { left, sep, right, direction } => {
|
||||
if seq.len() < sep.len() {
|
||||
@@ -26,10 +27,16 @@ pub fn vec_match<'a>(
|
||||
}
|
||||
for lpos in direction.walk(0..=seq.len() - sep.len()) {
|
||||
let rpos = lpos + sep.len();
|
||||
let mut state = unwrap_or!(vec_match(left, &seq[..lpos]); continue);
|
||||
state.extend(unwrap_or!(scalv_match(sep, &seq[lpos..rpos]); continue));
|
||||
state.extend(unwrap_or!(vec_match(right, &seq[rpos..]); continue));
|
||||
return Some(state);
|
||||
let state = vec_match(left, &seq[..lpos], save_loc)
|
||||
.and_then(|s| {
|
||||
Some(s.combine(scalv_match(sep, &seq[lpos..rpos], save_loc)?))
|
||||
})
|
||||
.and_then(|s| {
|
||||
Some(s.combine(vec_match(right, &seq[rpos..], save_loc)?))
|
||||
});
|
||||
if let Some(s) = state {
|
||||
return Some(s);
|
||||
}
|
||||
}
|
||||
None
|
||||
},
|
||||
@@ -42,14 +49,16 @@ pub fn vec_match<'a>(
|
||||
let lposv = seq[..seq.len() - right_sep.len()]
|
||||
.windows(left_sep.len())
|
||||
.enumerate()
|
||||
.filter_map(|(i, window)| scalv_match(left_sep, window).map(|s| (i, s)))
|
||||
.filter_map(|(i, window)| {
|
||||
scalv_match(left_sep, window, save_loc).map(|s| (i, s))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
// Valid locations for the right separator
|
||||
let rposv = seq[left_sep.len()..]
|
||||
.windows(right_sep.len())
|
||||
.enumerate()
|
||||
.filter_map(|(i, window)| {
|
||||
scalv_match(right_sep, window).map(|s| (i, s))
|
||||
scalv_match(right_sep, window, save_loc).map(|s| (i, s))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
// Valid combinations of locations for the separators
|
||||
@@ -57,9 +66,8 @@ pub fn vec_match<'a>(
|
||||
.into_iter()
|
||||
.cartesian_product(rposv)
|
||||
.filter(|((lpos, _), (rpos, _))| lpos + left_sep.len() <= *rpos)
|
||||
.map(|((lpos, mut lstate), (rpos, rstate))| {
|
||||
lstate.extend(rstate);
|
||||
(lpos, rpos, lstate)
|
||||
.map(|((lpos, lstate), (rpos, rstate))| {
|
||||
(lpos, rpos, lstate.combine(rstate))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
// In descending order of size
|
||||
@@ -68,25 +76,29 @@ pub fn vec_match<'a>(
|
||||
for (_gap_size, cluster) in eql_clusters.into_iter() {
|
||||
let best_candidate = cluster
|
||||
.into_iter()
|
||||
.filter_map(|(lpos, rpos, mut state)| {
|
||||
state.extend(vec_match(left, &seq[..lpos])?);
|
||||
state.extend(vec_match(mid, &seq[lpos + left_sep.len()..rpos])?);
|
||||
state.extend(vec_match(right, &seq[rpos + right_sep.len()..])?);
|
||||
Some(state)
|
||||
.filter_map(|(lpos, rpos, state)| {
|
||||
Some(
|
||||
state
|
||||
.combine(vec_match(left, &seq[..lpos], save_loc)?)
|
||||
.combine(vec_match(
|
||||
mid,
|
||||
&seq[lpos + left_sep.len()..rpos],
|
||||
save_loc,
|
||||
)?)
|
||||
.combine(vec_match(
|
||||
right,
|
||||
&seq[rpos + right_sep.len()..],
|
||||
save_loc,
|
||||
)?),
|
||||
)
|
||||
})
|
||||
.max_by(|a, b| {
|
||||
for key in key_order {
|
||||
let aslc = if let Some(StateEntry::Vec(s)) = a.get(key) {
|
||||
s
|
||||
} else {
|
||||
panic!("key_order references scalar or missing")
|
||||
};
|
||||
let bslc = if let Some(StateEntry::Vec(s)) = b.get(key) {
|
||||
s
|
||||
} else {
|
||||
panic!("key_order references scalar or missing")
|
||||
};
|
||||
match aslc.len().cmp(&bslc.len()) {
|
||||
let alen =
|
||||
a.ph_len(key).expect("key_order references scalar or missing");
|
||||
let blen =
|
||||
b.ph_len(key).expect("key_order references scalar or missing");
|
||||
match alen.cmp(&blen) {
|
||||
Ordering::Equal => (),
|
||||
any => return any,
|
||||
}
|
||||
|
||||
@@ -43,6 +43,7 @@ fn pad(mut rule: Rule<Sym>, i: &Interner) -> Rule<Sym> {
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
enum PHType {
|
||||
Scalar,
|
||||
Name,
|
||||
Vec { nonzero: bool },
|
||||
}
|
||||
impl From<PHClass> for PHType {
|
||||
@@ -50,6 +51,7 @@ impl From<PHClass> for PHType {
|
||||
match value {
|
||||
PHClass::Scalar => Self::Scalar,
|
||||
PHClass::Vec { nonzero, .. } => Self::Vec { nonzero },
|
||||
PHClass::Name => Self::Name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,13 +18,20 @@ use crate::Sym;
|
||||
pub struct CachedRule<M: Matcher> {
|
||||
matcher: M,
|
||||
pattern: Vec<RuleExpr>,
|
||||
pat_glossary: HashSet<Sym>,
|
||||
template: Vec<RuleExpr>,
|
||||
save_location: HashSet<Sym>,
|
||||
}
|
||||
|
||||
impl<M: Display + Matcher> Display for CachedRule<M> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let patterns = self.pattern.iter().join(" ");
|
||||
write!(f, "{patterns} is matched by {}", self.matcher)
|
||||
write!(
|
||||
f,
|
||||
"{patterns} is matched by {} and generates {}",
|
||||
self.matcher,
|
||||
self.template.iter().map(|e| e.to_string()).join(" ")
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,7 +43,7 @@ impl<M: Display + Matcher> Display for CachedRule<M> {
|
||||
///
|
||||
/// If you don't know what to put in the generic parameter, use [Repo]
|
||||
pub struct Repository<M: Matcher> {
|
||||
cache: Vec<(CachedRule<M>, HashSet<Sym>, NotNan<f64>)>,
|
||||
cache: Vec<(CachedRule<M>, NotNan<f64>)>,
|
||||
}
|
||||
impl<M: Matcher> Repository<M> {
|
||||
/// Build a new repository to hold the given set of rules
|
||||
@@ -48,19 +55,27 @@ impl<M: Matcher> Repository<M> {
|
||||
let cache = rules
|
||||
.into_iter()
|
||||
.map(|r| {
|
||||
let prio = r.prio;
|
||||
let rule = prepare_rule(r.clone(), i).map_err(|e| (r, e))?;
|
||||
let mut glossary = HashSet::new();
|
||||
for e in rule.pattern.iter() {
|
||||
glossary.extend(e.value.collect_names().into_iter());
|
||||
}
|
||||
let matcher = M::new(Rc::new(rule.pattern.clone()));
|
||||
let Rule { pattern, prio, template } =
|
||||
prepare_rule(r.clone(), i).map_err(|e| (r, e))?;
|
||||
let mut pat_glossary = HashSet::new();
|
||||
pat_glossary.extend(
|
||||
pattern.iter().flat_map(|e| e.value.collect_names().into_iter()),
|
||||
);
|
||||
let mut tpl_glossary = HashSet::new();
|
||||
tpl_glossary.extend(
|
||||
template.iter().flat_map(|e| e.value.collect_names().into_iter()),
|
||||
);
|
||||
let save_location =
|
||||
pat_glossary.intersection(&tpl_glossary).cloned().collect();
|
||||
let matcher = M::new(Rc::new(pattern.clone()));
|
||||
let prep = CachedRule {
|
||||
matcher,
|
||||
pattern: rule.pattern,
|
||||
template: rule.template,
|
||||
pattern,
|
||||
template,
|
||||
pat_glossary,
|
||||
save_location,
|
||||
};
|
||||
Ok((prep, glossary, prio))
|
||||
Ok((prep, prio))
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
Ok(Self { cache })
|
||||
@@ -70,12 +85,13 @@ impl<M: Matcher> Repository<M> {
|
||||
#[must_use]
|
||||
pub fn step(&self, code: &RuleExpr) -> Option<RuleExpr> {
|
||||
let glossary = code.value.collect_names();
|
||||
for (rule, deps, _) in self.cache.iter() {
|
||||
if !deps.is_subset(&glossary) {
|
||||
for (rule, _) in self.cache.iter() {
|
||||
if !rule.pat_glossary.is_subset(&glossary) {
|
||||
continue;
|
||||
}
|
||||
let product = update_first_seq::expr(code, &mut |exprv| {
|
||||
let state = rule.matcher.apply(exprv.as_slice())?;
|
||||
let save_loc = |n| rule.save_location.contains(&n);
|
||||
let state = rule.matcher.apply(exprv.as_slice(), &save_loc)?;
|
||||
let result = apply_exprv(&rule.template, &state);
|
||||
Some(Rc::new(result))
|
||||
});
|
||||
@@ -142,9 +158,10 @@ impl<M: Debug + Matcher> Debug for Repository<M> {
|
||||
impl<M: Display + Matcher> Display for Repository<M> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
writeln!(f, "Repository[")?;
|
||||
for (rule, deps, p) in self.cache.iter() {
|
||||
for (rule, p) in self.cache.iter() {
|
||||
let prio = print_nat16(*p);
|
||||
let deps = deps.iter().map(|t| t.extern_vec().join("::")).join(", ");
|
||||
let deps =
|
||||
rule.pat_glossary.iter().map(|t| t.extern_vec().join("::")).join(", ");
|
||||
writeln!(f, " priority: {prio}\tdependencies: [{deps}]")?;
|
||||
writeln!(f, " {rule}")?;
|
||||
}
|
||||
|
||||
@@ -180,6 +180,7 @@ impl ProjectError for ArityMismatch {
|
||||
"This instance represents ".to_string()
|
||||
+ match class {
|
||||
ast::PHClass::Scalar => "one clause",
|
||||
ast::PHClass::Name => "one name",
|
||||
ast::PHClass::Vec { nonzero: true, .. } => "one or more clauses",
|
||||
ast::PHClass::Vec { nonzero: false, .. } =>
|
||||
"any number of clauses",
|
||||
|
||||
@@ -1,18 +1,72 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use itertools::{EitherOrBoth, Itertools};
|
||||
|
||||
use super::matcher::RuleExpr;
|
||||
use crate::ast::{Clause, Expr, PHClass, Placeholder};
|
||||
use crate::interner::Tok;
|
||||
use crate::utils::unwrap_or;
|
||||
use crate::{Location, Sym};
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum StateEntry<'a> {
|
||||
Vec(&'a [RuleExpr]),
|
||||
Scalar(&'a RuleExpr),
|
||||
Name(&'a Sym, &'a Location),
|
||||
}
|
||||
#[derive(Clone)]
|
||||
pub struct State<'a> {
|
||||
placeholders: HashMap<Tok<String>, StateEntry<'a>>,
|
||||
name_locations: HashMap<Sym, Vec<Location>>,
|
||||
}
|
||||
impl<'a> State<'a> {
|
||||
pub fn from_ph(key: Tok<String>, entry: StateEntry<'a>) -> Self {
|
||||
Self {
|
||||
placeholders: HashMap::from([(key, entry)]),
|
||||
name_locations: HashMap::new(),
|
||||
}
|
||||
}
|
||||
pub fn combine(self, s: Self) -> Self {
|
||||
Self {
|
||||
placeholders: self
|
||||
.placeholders
|
||||
.into_iter()
|
||||
.chain(s.placeholders)
|
||||
.collect(),
|
||||
name_locations: (self.name_locations.into_iter())
|
||||
.sorted_unstable_by_key(|(k, _)| k.id())
|
||||
.merge_join_by(
|
||||
(s.name_locations.into_iter())
|
||||
.sorted_unstable_by_key(|(k, _)| k.id()),
|
||||
|(k, _), (k2, _)| k.id().cmp(&k2.id()),
|
||||
)
|
||||
.map(|ent| match ent {
|
||||
EitherOrBoth::Left(i) | EitherOrBoth::Right(i) => i,
|
||||
EitherOrBoth::Both((k, l), (_, r)) =>
|
||||
(k, l.into_iter().chain(r).collect()),
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
pub fn ph_len(&self, key: &Tok<String>) -> Option<usize> {
|
||||
match self.placeholders.get(key)? {
|
||||
StateEntry::Vec(slc) => Some(slc.len()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
pub fn from_name(name: Sym, location: Location) -> Self {
|
||||
Self {
|
||||
name_locations: HashMap::from([(name, vec![location])]),
|
||||
placeholders: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Default for State<'static> {
|
||||
fn default() -> Self {
|
||||
Self { name_locations: HashMap::new(), placeholders: HashMap::new() }
|
||||
}
|
||||
}
|
||||
pub type State<'a> = HashMap<Tok<String>, StateEntry<'a>>;
|
||||
|
||||
#[must_use]
|
||||
pub fn apply_exprv(template: &[RuleExpr], state: &State) -> Vec<RuleExpr> {
|
||||
@@ -35,12 +89,15 @@ pub fn apply_expr(template: &RuleExpr, state: &State) -> Vec<RuleExpr> {
|
||||
value: Clause::S(*c, Rc::new(apply_exprv(body.as_slice(), state))),
|
||||
}],
|
||||
Clause::Placeh(Placeholder { name, class }) => {
|
||||
let value = *unwrap_or!(state.get(name);
|
||||
let value = *unwrap_or!(state.placeholders.get(name);
|
||||
panic!("Placeholder does not have a value in state")
|
||||
);
|
||||
match (class, value) {
|
||||
(PHClass::Scalar, StateEntry::Scalar(item)) => vec![item.clone()],
|
||||
(PHClass::Vec { .. }, StateEntry::Vec(chunk)) => chunk.to_vec(),
|
||||
(PHClass::Name, StateEntry::Name(n, l)) => {
|
||||
vec![RuleExpr { value: Clause::Name(n.clone()), location: l.clone() }]
|
||||
},
|
||||
_ => panic!("Type mismatch between template and state"),
|
||||
}
|
||||
},
|
||||
|
||||
@@ -55,6 +55,7 @@ impl InertAtomic for Yield {
|
||||
|
||||
/// Error indicating a yield command when all event producers and timers had
|
||||
/// exited
|
||||
#[derive(Clone)]
|
||||
pub struct InfiniteBlock;
|
||||
impl ExternError for InfiniteBlock {}
|
||||
impl Display for InfiniteBlock {
|
||||
@@ -187,8 +188,8 @@ impl<'a> IntoSystem<'a> for AsynchSystem<'a> {
|
||||
});
|
||||
System {
|
||||
name: vec!["system".to_string(), "asynch".to_string()],
|
||||
lexer_plugin: None,
|
||||
line_parser: None,
|
||||
lexer_plugins: vec![],
|
||||
line_parsers: vec![],
|
||||
constants: ConstTree::namespace(
|
||||
[i.i("system"), i.i("async")],
|
||||
ConstTree::tree([
|
||||
|
||||
@@ -183,8 +183,8 @@ impl IntoSystem<'static> for DirectFS {
|
||||
name: ["system", "directfs"].into_iter().map_into().collect(),
|
||||
code: HashMap::new(),
|
||||
prelude: Vec::new(),
|
||||
lexer_plugin: None,
|
||||
line_parser: None,
|
||||
lexer_plugins: vec![],
|
||||
line_parsers: vec![],
|
||||
constants: ConstTree::namespace(
|
||||
[i.i("system"), i.i("fs")],
|
||||
ConstTree::tree([
|
||||
|
||||
@@ -113,8 +113,8 @@ impl<'a, ST: IntoIterator<Item = (&'a str, Stream)>> IntoSystem<'static>
|
||||
name: None,
|
||||
}]),
|
||||
}],
|
||||
lexer_plugin: None,
|
||||
line_parser: None,
|
||||
lexer_plugins: vec![],
|
||||
line_parsers: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,3 +5,5 @@ pub mod directfs;
|
||||
pub mod io;
|
||||
pub mod scheduler;
|
||||
pub mod stl;
|
||||
pub mod parse_custom_line;
|
||||
|
||||
|
||||
40
src/systems/parse_custom_line.rs
Normal file
40
src/systems/parse_custom_line.rs
Normal file
@@ -0,0 +1,40 @@
|
||||
//! A helper for defining custom lines. See [custom_line]
|
||||
use crate::error::{ProjectError, ProjectResult};
|
||||
use crate::parse::errors::{Expected, ExpectedName};
|
||||
use crate::parse::{Entry, Lexeme, Stream};
|
||||
use crate::{Location, Tok};
|
||||
|
||||
/// An exported line with a name for which the line parser denies exports
|
||||
pub struct Unexportable(Entry);
|
||||
impl ProjectError for Unexportable {
|
||||
fn description(&self) -> &str { "this line type cannot be exported" }
|
||||
fn message(&self) -> String { format!("{} cannot be exported", &self.0) }
|
||||
fn one_position(&self) -> Location { self.0.location() }
|
||||
}
|
||||
|
||||
/// Parse a line identified by the specified leading keyword. Although not
|
||||
/// required, plugins are encouraged to prefix their lines with a globally
|
||||
/// unique keyword which makes or breaks their parsing, to avoid accidental
|
||||
/// failure to recognize
|
||||
pub fn custom_line(
|
||||
tail: Stream<'_>,
|
||||
keyword: Tok<String>,
|
||||
exportable: bool,
|
||||
) -> Option<ProjectResult<(bool, Stream<'_>, Location)>> {
|
||||
let line_loc = tail.location();
|
||||
let (fst, tail) = tail.pop().ok()?;
|
||||
let fst_name = ExpectedName::expect(fst).ok()?;
|
||||
let (exported, n_ent, tail) = if fst_name == keyword {
|
||||
(false, fst, tail.trim())
|
||||
} else if fst_name.as_str() == "export" {
|
||||
let (snd, tail) = tail.pop().ok()?;
|
||||
Expected::expect(Lexeme::Name(keyword), snd).ok()?;
|
||||
(true, snd, tail.trim())
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
Some(match exported && !exportable {
|
||||
true => Err(Unexportable(n_ent.clone()).rc()),
|
||||
false => Ok((exported, tail, line_loc)),
|
||||
})
|
||||
}
|
||||
@@ -332,8 +332,8 @@ impl IntoSystem<'static> for SeqScheduler {
|
||||
prelude: Vec::new(),
|
||||
code: HashMap::new(),
|
||||
handlers,
|
||||
lexer_plugin: None,
|
||||
line_parser: None,
|
||||
lexer_plugins: vec![],
|
||||
line_parsers: vec![],
|
||||
constants: ConstTree::namespace(
|
||||
[i.i("system"), i.i("scheduler")],
|
||||
ConstTree::tree([
|
||||
|
||||
@@ -3,6 +3,7 @@ use std::fmt::Display;
|
||||
use crate::foreign::ExternError;
|
||||
|
||||
/// Various errors produced by arithmetic operations
|
||||
#[derive(Clone)]
|
||||
pub enum ArithmeticError {
|
||||
/// Integer overflow
|
||||
Overflow,
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import std::match
|
||||
|
||||
export ::(!=, ==)
|
||||
|
||||
export const not := \bool. if bool then false else true
|
||||
@@ -8,3 +10,37 @@ export macro ...$a or ...$b =0x4p36=> (ifthenelse (...$a) true (...$b))
|
||||
export macro if ...$cond then ...$true else ...$false:1 =0x1p84=> (
|
||||
ifthenelse (...$cond) (...$true) (...$false)
|
||||
)
|
||||
|
||||
(
|
||||
macro match::request (== ...$other)
|
||||
=0x1p230=> match::response (
|
||||
if match::value == (...$other)
|
||||
then match::pass
|
||||
else match::fail
|
||||
)
|
||||
( match::no_binds )
|
||||
)
|
||||
|
||||
(
|
||||
macro match::request (!= ...$other)
|
||||
=0x1p230=> match::response (
|
||||
if match::value != (...$other)
|
||||
then match::pass
|
||||
else match::fail
|
||||
)
|
||||
( match::no_binds )
|
||||
)
|
||||
|
||||
(
|
||||
macro match::request (true)
|
||||
=0x1p230=> match::response
|
||||
(if match::value then match::pass else match::fail)
|
||||
( match::no_binds )
|
||||
)
|
||||
|
||||
(
|
||||
macro match::request (false)
|
||||
=0x1p230=> match::response
|
||||
(if match::value then match::fail else match::pass)
|
||||
( match::no_binds )
|
||||
)
|
||||
|
||||
112
src/systems/stl/cross_pipeline.rs
Normal file
112
src/systems/stl/cross_pipeline.rs
Normal file
@@ -0,0 +1,112 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::fmt::Debug;
|
||||
use std::iter;
|
||||
use std::ops::Deref;
|
||||
use std::rc::Rc;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use crate::ast::{self, PType};
|
||||
use crate::ddispatch::Responder;
|
||||
use crate::foreign::{
|
||||
xfn_1ary, Atomic, AtomicReturn, ExFn, StrictEq, ToClause, XfnResult,
|
||||
};
|
||||
use crate::interpreted::{self, TryFromExprInst};
|
||||
use crate::utils::pure_seq::pushed;
|
||||
use crate::{interpreter, VName};
|
||||
|
||||
pub trait DeferredRuntimeCallback<T, U, R: ToClause>:
|
||||
Fn(Vec<(T, U)>) -> XfnResult<R> + Clone + Send + 'static
|
||||
{
|
||||
}
|
||||
impl<
|
||||
T,
|
||||
U,
|
||||
R: ToClause,
|
||||
F: Fn(Vec<(T, U)>) -> XfnResult<R> + Clone + Send + 'static,
|
||||
> DeferredRuntimeCallback<T, U, R> for F
|
||||
{
|
||||
}
|
||||
|
||||
fn table_receiver_rec<
|
||||
T: Clone + Send + 'static,
|
||||
U: TryFromExprInst + Clone + Send + 'static,
|
||||
R: ToClause + 'static,
|
||||
>(
|
||||
results: Vec<(T, U)>,
|
||||
mut remaining_keys: VecDeque<T>,
|
||||
callback: impl DeferredRuntimeCallback<T, U, R>,
|
||||
) -> XfnResult<interpreted::Clause> {
|
||||
match remaining_keys.pop_front() {
|
||||
None => callback(results).map(|v| v.to_clause()),
|
||||
Some(t) => Ok(interpreted::Clause::ExternFn(ExFn(Box::new(xfn_1ary(
|
||||
move |u: U| {
|
||||
table_receiver_rec(pushed(results, (t, u)), remaining_keys, callback)
|
||||
},
|
||||
))))),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct EphemeralAtom(
|
||||
Arc<dyn Fn() -> XfnResult<interpreted::Clause> + Sync + Send>,
|
||||
);
|
||||
impl Debug for EphemeralAtom {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str("EphemeralAtom")
|
||||
}
|
||||
}
|
||||
impl Responder for EphemeralAtom {
|
||||
fn respond(&self, _request: crate::ddispatch::Request) {}
|
||||
}
|
||||
impl StrictEq for EphemeralAtom {
|
||||
fn strict_eq(&self, _: &dyn std::any::Any) -> bool { false }
|
||||
}
|
||||
impl Atomic for EphemeralAtom {
|
||||
fn as_any(self: Box<Self>) -> Box<dyn std::any::Any> { self }
|
||||
fn as_any_ref(&self) -> &dyn std::any::Any { self }
|
||||
fn run(
|
||||
self: Box<Self>,
|
||||
ctx: interpreter::Context,
|
||||
) -> crate::foreign::AtomicResult {
|
||||
Ok(AtomicReturn { clause: (self.0)()?, gas: ctx.gas, inert: false })
|
||||
}
|
||||
}
|
||||
|
||||
fn table_receiver<
|
||||
T: Clone + Send + 'static,
|
||||
U: TryFromExprInst + Clone + Send + 'static,
|
||||
R: ToClause + 'static,
|
||||
>(
|
||||
keys: VecDeque<T>,
|
||||
callback: impl DeferredRuntimeCallback<T, U, R>,
|
||||
) -> ast::Clause<VName> {
|
||||
if keys.is_empty() {
|
||||
let result =
|
||||
Arc::new(Mutex::new(callback(Vec::new()).map(|v| v.to_clause())));
|
||||
EphemeralAtom(Arc::new(move || result.lock().unwrap().deref().clone()))
|
||||
.ast_cls()
|
||||
} else {
|
||||
match table_receiver_rec(Vec::new(), keys, callback) {
|
||||
Ok(interpreted::Clause::ExternFn(xfn)) => ast::Clause::ExternFn(xfn),
|
||||
_ => unreachable!("details"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn defer_to_runtime<
|
||||
T: Clone + Send + 'static,
|
||||
U: TryFromExprInst + Clone + Send + 'static,
|
||||
R: ToClause + 'static,
|
||||
>(
|
||||
pairs: impl IntoIterator<Item = (T, Vec<ast::Expr<VName>>)>,
|
||||
callback: impl DeferredRuntimeCallback<T, U, R>,
|
||||
) -> ast::Clause<VName> {
|
||||
let (keys, ast_values) =
|
||||
pairs.into_iter().unzip::<_, _, VecDeque<_>, Vec<_>>();
|
||||
ast::Clause::s(
|
||||
'(',
|
||||
iter::once(table_receiver(keys, callback)).chain(
|
||||
ast_values.into_iter().map(|v| ast::Clause::S(PType::Par, Rc::new(v))),
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -1,4 +1,7 @@
|
||||
import super::known::*
|
||||
import super::match::*
|
||||
import super::macro
|
||||
import super::match::(match, =>)
|
||||
|
||||
--[ Do nothing. Especially useful as a passive cps operation ]--
|
||||
export const identity := \x.x
|
||||
@@ -21,6 +24,19 @@ export const return := \a. \b.a
|
||||
export macro ...$prefix $ ...$suffix:1 =0x1p38=> ...$prefix (...$suffix)
|
||||
export macro ...$prefix |> $fn ..$suffix:1 =0x2p32=> $fn (...$prefix) ..$suffix
|
||||
|
||||
export macro ($name) => ...$body =0x2p127=> (\$name. ...$body)
|
||||
export macro ($name, ...$argv) => ...$body =0x2p127=> (\$name. (...$argv) => ...$body)
|
||||
export macro $name => ...$body =0x1p127=> (\$name. ...$body)
|
||||
( macro (..$argv) => ...$body
|
||||
=0x2p127=> lambda_walker macro::comma_list (..$argv) (...$body)
|
||||
)
|
||||
( macro $_arg => ...$body
|
||||
=0x2p127=> \$_arg. ...$body)
|
||||
( macro lambda_walker ( macro::list_item ($_argname) $tail ) $body
|
||||
=0x2p254=> \$_argname. lambda_walker $tail $body
|
||||
)
|
||||
( macro lambda_walker ( macro::list_item (...$head) $tail ) $body
|
||||
=0x1p254=> \arg. match arg {
|
||||
...$head => lambda_walker $tail $body;
|
||||
}
|
||||
)
|
||||
( macro lambda_walker macro::list_end $body
|
||||
=0x1p254=> $body
|
||||
)
|
||||
|
||||
@@ -1 +1 @@
|
||||
export ::[,]
|
||||
export ::[, _ ; . =]
|
||||
|
||||
@@ -1,18 +1,26 @@
|
||||
import super::option
|
||||
import super::(functional::*, procedural::*, loop::*, bool::*, known::*, number::*, tuple::*)
|
||||
import super::(option, match, macro)
|
||||
import super::(functional::*, procedural::*)
|
||||
import super::(loop::*, bool::*, known::*, number::*, tuple::*)
|
||||
|
||||
const pair := \a. \b. \f. f a b
|
||||
export type ty (
|
||||
import super::super::(option, tuple, panic)
|
||||
import super::super::(known::*, bool::*)
|
||||
|
||||
-- Constructors
|
||||
export const cons := \hd. \tl. wrap (option::some tuple::t[hd, unwrap tl])
|
||||
export const end := wrap option::none
|
||||
export const pop := \list. \default. \f. (
|
||||
option::handle (unwrap list)
|
||||
default
|
||||
\pair. tuple::apply pair
|
||||
\len. if len == 2
|
||||
then ( \hd. \tl. f hd (wrap tl) )
|
||||
else panic "list element must be 2-ple"
|
||||
)
|
||||
)
|
||||
|
||||
export const cons := \hd. \tl. option::some t[hd, tl]
|
||||
export const end := option::none
|
||||
|
||||
export const pop := \list. \default. \f. do{
|
||||
cps tuple = list default;
|
||||
cps head, tail = tuple;
|
||||
f head tail
|
||||
}
|
||||
export const cons := ty::cons
|
||||
export const end := ty::end
|
||||
export const pop := ty::pop
|
||||
|
||||
-- Operators
|
||||
|
||||
@@ -124,8 +132,34 @@ export const chain := \list. \cont. loop_over (list) {
|
||||
cps head;
|
||||
}
|
||||
|
||||
macro new[...$item, ...$rest:1] =0x2p84=> (cons (...$item) new[...$rest])
|
||||
macro new[...$end] =0x1p84=> (cons (...$end) end)
|
||||
macro new[] =0x1p84=> end
|
||||
macro new[..$items] =0x2p84=> mk_list macro::comma_list (..$items)
|
||||
|
||||
macro mk_list ( macro::list_item $item $tail ) =0x1p254=> (cons $item mk_list $tail)
|
||||
macro mk_list macro::list_end =0x1p254=> end
|
||||
|
||||
export ::(new)
|
||||
|
||||
( macro match::request (cons $head $tail)
|
||||
=0x1p230=> await_subpatterns
|
||||
(match::request ($head))
|
||||
(match::request ($tail))
|
||||
)
|
||||
( macro await_subpatterns
|
||||
(match::response $h_expr ( $h_binds ))
|
||||
(match::response $t_expr ( $t_binds ))
|
||||
=0x1p230=> match::response (
|
||||
pop
|
||||
match::value
|
||||
match::fail
|
||||
\head. \tail. (
|
||||
(\match::pass. (\match::value. $h_expr) head)
|
||||
(match::take_binds $h_binds (
|
||||
(\match::pass. (\match::value. $t_expr) tail)
|
||||
(match::take_binds $t_binds (
|
||||
match::give_binds match::chain_binds $h_binds $t_binds match::pass
|
||||
))
|
||||
))
|
||||
)
|
||||
)
|
||||
(match::chain_binds $h_binds $t_binds)
|
||||
)
|
||||
|
||||
@@ -37,6 +37,7 @@ macro parse_binds (...$item) =0x1p250=> (
|
||||
()
|
||||
)
|
||||
|
||||
|
||||
-- while loop
|
||||
export macro statement (
|
||||
while ..$condition (..$binds) {
|
||||
@@ -64,7 +65,7 @@ macro init_binds ( ($name $value) $tail ) =0x1p250=> $value init_binds $tail
|
||||
macro $fn init_binds () =0x1p250=> $fn
|
||||
|
||||
-- apply_binds passes the name for initializers
|
||||
macro apply_binds ( ($name $_value) $tail ) =0x1p250=> $name apply_binds $tail
|
||||
macro apply_binds ( ($name $value) $tail ) =0x1p250=> $name apply_binds $tail
|
||||
macro $fn apply_binds () =0x1p250=> $fn
|
||||
|
||||
--[
|
||||
|
||||
68
src/systems/stl/macro.orc
Normal file
68
src/systems/stl/macro.orc
Normal file
@@ -0,0 +1,68 @@
|
||||
import std::number::add
|
||||
import std::known::*
|
||||
|
||||
-- convert a comma-separated list into a linked list, with support for trailing commas
|
||||
export ::comma_list
|
||||
( macro comma_list ( ...$head, ...$tail:1 )
|
||||
=0x2p254=> ( await_comma_list ( ...$head ) comma_list ( ...$tail ) )
|
||||
)
|
||||
( macro comma_list (...$only)
|
||||
=0x1p254=> ( list_item (...$only) list_end )
|
||||
)
|
||||
( macro ( await_comma_list $head $tail )
|
||||
=0x2p254=> ( list_item $head $tail )
|
||||
)
|
||||
( macro comma_list ()
|
||||
=0x1p254=> list_end
|
||||
)
|
||||
( macro comma_list (...$data,)
|
||||
=0x3p254=> comma_list (...$data)
|
||||
)
|
||||
|
||||
-- convert a comma-separated list into a linked list, with support for trailing commas
|
||||
export ::semi_list
|
||||
( macro semi_list ( ...$head; ...$tail:1 )
|
||||
=0x2p254=> ( await_semi_list ( ...$head ) semi_list ( ...$tail ) )
|
||||
)
|
||||
( macro semi_list (...$only)
|
||||
=0x1p254=> ( list_item (...$only) list_end )
|
||||
)
|
||||
( macro ( await_semi_list $head $tail )
|
||||
=0x2p254=> ( list_item $head $tail )
|
||||
)
|
||||
( macro semi_list ()
|
||||
=0x1p254=> list_end
|
||||
)
|
||||
( macro semi_list (...$data;)
|
||||
=0x3p254=> semi_list (...$data)
|
||||
)
|
||||
|
||||
-- calculate the length of a linked list
|
||||
export ::length
|
||||
( macro length ( list_item $discard $tail )
|
||||
=0x1p254=> await_length ( length $tail )
|
||||
)
|
||||
( macro await_length ( $len )
|
||||
=0x1p254=> (add 1 $len)
|
||||
)
|
||||
macro length list_end =0x1p254=> (0)
|
||||
|
||||
|
||||
export ::error
|
||||
( macro ( ..$prefix error $details ..$suffix )
|
||||
=0x2p255=> error $details
|
||||
)
|
||||
( macro [ ..$prefix error $details ..$suffix ]
|
||||
=0x2p255=> error $details
|
||||
)
|
||||
( macro { ..$prefix error $details ..$suffix }
|
||||
=0x2p255=> error $details
|
||||
)
|
||||
( macro error $details
|
||||
=0x1p255=>
|
||||
)
|
||||
|
||||
export ::leftover_error
|
||||
( macro leftover_error $details
|
||||
=0x1p255=> error ( "Token fails to parse" $details )
|
||||
)
|
||||
@@ -1,73 +1,96 @@
|
||||
import super::(bool::*, functional::*, known::*, list, option, loop::*, procedural::*)
|
||||
import std::panic
|
||||
import super::(bool::*, functional::*, known::*, loop::*, procedural::*)
|
||||
import super::(panic, match, macro, option, list)
|
||||
|
||||
-- utilities for using lists as pairs
|
||||
export type ty (
|
||||
import super::super::(panic, macro, list, tuple, option)
|
||||
import super::super::(bool::*, functional::*, known::*, loop::*, procedural::*)
|
||||
|
||||
const fst := \l. (
|
||||
list::get l 0
|
||||
(panic "nonempty expected")
|
||||
\x.x
|
||||
)
|
||||
const snd := \l. (
|
||||
list::get l 1
|
||||
(panic "2 elements expected")
|
||||
\x.x
|
||||
)
|
||||
--[ Constructors ]--
|
||||
|
||||
-- constructors
|
||||
|
||||
export const empty := list::end
|
||||
export const add := \m. \k. \v. (
|
||||
const empty := wrap list::end
|
||||
const add := \m. \k. \v. wrap (
|
||||
list::cons
|
||||
list::new[k, v]
|
||||
m
|
||||
)
|
||||
tuple::t[k, v]
|
||||
(unwrap m)
|
||||
)
|
||||
|
||||
-- queries
|
||||
--[ List constructor ]--
|
||||
|
||||
-- return the last occurrence of a key if exists
|
||||
export const get := \m. \key. (
|
||||
loop_over (m) {
|
||||
export ::new
|
||||
macro new[..$items] =0x2p84=> mk_map macro::comma_list (..$items)
|
||||
|
||||
macro mk_map macro::list_end =0x1p254=> empty
|
||||
( macro mk_map ( macro::list_item ( ...$key = ...$value:1 ) $tail )
|
||||
=0x1p254=> ( set mk_map $tail (...$key) (...$value) )
|
||||
)
|
||||
|
||||
--[ Queries ]--
|
||||
|
||||
-- return the last occurrence of a key if exists
|
||||
export const get := \m. \key. (
|
||||
loop_over (m=unwrap m) {
|
||||
cps record, m = list::pop m option::none;
|
||||
cps if fst record == key
|
||||
then return $ option::some $ snd record
|
||||
cps if tuple::pick record 0 == key
|
||||
then return $ option::some $ tuple::pick record 1
|
||||
else identity;
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
-- commands
|
||||
--[ Commands ]--
|
||||
|
||||
-- remove one occurrence of a key
|
||||
export const del := \m. \k. (
|
||||
recursive r (m)
|
||||
-- remove one occurrence of a key
|
||||
export const del := \m. \k. wrap (
|
||||
recursive r (m=unwrap m)
|
||||
list::pop m list::end \head. \tail.
|
||||
if fst head == k then tail
|
||||
if tuple::pick head 0 == k then tail
|
||||
else list::cons head $ r tail
|
||||
)
|
||||
|
||||
-- replace at most one occurrence of a key
|
||||
export const set := \m. \k. \v. m |> del k |> add k v
|
||||
)
|
||||
|
||||
-- remove all occurrences of a key
|
||||
export const delall := \m. \k. (
|
||||
list::filter m \record. fst record != k
|
||||
)
|
||||
macro new =0x1p200=> ty::new
|
||||
|
||||
-- replace at most one occurrence of a key
|
||||
export const set := \m. \k. \v. (
|
||||
m
|
||||
|> del k
|
||||
|> add k v
|
||||
)
|
||||
export const empty := ty::empty
|
||||
export const add := ty::add
|
||||
export const get := ty::get
|
||||
export const set := ty::set
|
||||
export const del := ty::del
|
||||
|
||||
-- ensure that there's only one instance of each key in the map
|
||||
export const normalize := \m. (
|
||||
recursive r (m, normal=empty)
|
||||
list::pop m normal \head. \tail.
|
||||
r tail $ set normal (fst head) (snd head)
|
||||
export ::having
|
||||
( macro match::request (having [..$items])
|
||||
=0x1p230=> having_pattern (
|
||||
pattern_walker
|
||||
macro::comma_list ( ..$items )
|
||||
)
|
||||
)
|
||||
|
||||
macro new[...$tail:2, ...$key = ...$value:1] =0x2p84=> (
|
||||
set new[...$tail] (...$key) (...$value)
|
||||
( macro having_pattern ( tail_result $expr ( $binds ) )
|
||||
=0x1p254=> match::response $expr ( $binds )
|
||||
)
|
||||
( macro pattern_walker macro::list_end
|
||||
=0x1p254=> tail_result match::pass ( match::no_binds )
|
||||
)
|
||||
( macro pattern_walker ( macro::list_item ( ...$key = ...$value:1 ) $tail )
|
||||
=0x1p254=> await_pattern ( ...$key )
|
||||
( match::request (...$value) )
|
||||
( pattern_walker $tail )
|
||||
)
|
||||
( macro await_pattern $key
|
||||
( match::response $expr ( $binds ) )
|
||||
( tail_result $t_expr ( $t_binds ) )
|
||||
=0x1p254=> tail_result (
|
||||
option::handle (get match::value $key)
|
||||
match::fail
|
||||
\value. (\match::pass. (\match::value. $expr) value) (
|
||||
match::take_binds $binds (
|
||||
(\match::pass. $t_expr) (
|
||||
match::take_binds $t_binds (
|
||||
match::give_binds match::chain_binds $binds $t_binds match::pass
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
( match::chain_binds $binds $t_binds )
|
||||
)
|
||||
macro new[...$key = ...$value:1] =0x1p84=> (add empty (...$key) (...$value))
|
||||
macro new[] =0x1p84=> empty
|
||||
|
||||
export ::(new)
|
||||
|
||||
104
src/systems/stl/match.orc
Normal file
104
src/systems/stl/match.orc
Normal file
@@ -0,0 +1,104 @@
|
||||
import std::known::(_, ;)
|
||||
import std::procedural
|
||||
import std::bool
|
||||
import std::macro
|
||||
import std::panic
|
||||
|
||||
--[
|
||||
The protocol:
|
||||
|
||||
Request contains the pattern
|
||||
Response contains an expression and the list of names
|
||||
]--
|
||||
|
||||
(
|
||||
macro ..$prefix:1 match ...$argument:0 { ..$body } ..$suffix:1
|
||||
=0x1p130=> ..$prefix (
|
||||
(\value. match_walker macro::semi_list ( ..$body ) )
|
||||
( ...$argument )
|
||||
) ..$suffix
|
||||
)
|
||||
|
||||
macro match_walker macro::list_end =0x1p254=> panic "no arms match"
|
||||
( macro match_walker ( macro::list_item (...$pattern => ...$handler:1) $tail )
|
||||
=0x1p254=> match_await ( request (...$pattern) ) (...$handler) ( match_walker $tail )
|
||||
)
|
||||
( macro match_await ( response $expr ( $binds ) ) $handler $tail
|
||||
=0x1p254=> (\fail. (\pass. $expr) (take_binds $binds $handler)) $tail
|
||||
)
|
||||
|
||||
macro request (( ..$pattern )) =0x1p254=> request ( ..$pattern )
|
||||
|
||||
-- bindings list
|
||||
|
||||
export ::(no_binds, add_bind, chain_binds, give_binds, take_binds)
|
||||
|
||||
macro add_bind $_new no_binds =0x1p254=> ( binds_list $_new no_binds )
|
||||
( macro add_bind $_new ( binds_list ...$tail )
|
||||
=0x1p254=> ( binds_list $_new ( binds_list ...$tail ) )
|
||||
)
|
||||
macro give_binds no_binds $cont =0x1p254=> $cont
|
||||
( macro give_binds ( binds_list $_name $tail ) $cont
|
||||
=0x1p254=> (give_binds $tail $cont $_name)
|
||||
)
|
||||
macro take_binds no_binds $cont =0x1p254=> $cont
|
||||
( macro take_binds ( binds_list $_name $tail ) $cont
|
||||
=0x1p254=> \$_name. take_binds $tail $cont
|
||||
)
|
||||
macro chain_binds no_binds $second =0x1p254=> $second
|
||||
( macro chain_binds ( binds_list $_head $tail ) $second
|
||||
=0x1p254=> add_bind $_head chain_binds $tail $second
|
||||
)
|
||||
|
||||
--[ primitive pattern ( _ ) ]--
|
||||
|
||||
(
|
||||
macro request ( _ )
|
||||
=0x1p230=> response pass ( no_binds )
|
||||
)
|
||||
|
||||
--[ primitive name pattern ]--
|
||||
|
||||
(
|
||||
macro request ( $_name )
|
||||
=0x1p226=> response ( pass value ) ( add_bind $_name no_binds )
|
||||
)
|
||||
|
||||
--[ primitive pattern ( and ) ]--
|
||||
|
||||
( macro request ( ...$lhs bool::and ...$rhs )
|
||||
=0x3p230=> await_and_subpatterns ( request (...$lhs ) ) ( request ( ...$rhs ) )
|
||||
)
|
||||
|
||||
( macro await_and_subpatterns ( response $lh_expr ( $lh_binds ) ) ( response $rh_expr ( $rh_binds ) )
|
||||
=0x1p254=> response (
|
||||
(\pass. $lh_expr) (take_binds $lh_binds (
|
||||
(\pass. $rh_expr) (take_binds $rh_binds (
|
||||
give_binds chain_binds $lh_binds $rh_binds pass
|
||||
))
|
||||
))
|
||||
)
|
||||
( chain_binds $lh_binds $rh_binds )
|
||||
)
|
||||
|
||||
--[ primitive pattern ( or ) ]--
|
||||
|
||||
(
|
||||
macro request ( ...$lhs bool::or ...$rhs )
|
||||
=0x3p230=> await_or_subpatterns
|
||||
( request ( ...$lhs ) )
|
||||
( request ( ...$rhs ) )
|
||||
)
|
||||
|
||||
( -- for this to work, lh and rh must produce the same bindings
|
||||
macro await_or_subpatterns ( response $lh_expr ( $lh_binds) ) ( response $rh_expr ( $rh_binds ) )
|
||||
=0x1p254=> response (
|
||||
(\cancel. $lh_expr) -- lh works with pass directly because its bindings are reported up
|
||||
($rh_expr (take_binds $rh_binds -- rh runs if lh cancels
|
||||
(give_binds $lh_binds pass) -- translate rh binds to lh binds
|
||||
))
|
||||
)
|
||||
( $lh_binds ) -- report lh bindings
|
||||
)
|
||||
|
||||
export ::(match, cancel, argument, request, response, =>)
|
||||
@@ -4,10 +4,13 @@ mod arithmetic_error;
|
||||
mod binary;
|
||||
mod bool;
|
||||
mod conv;
|
||||
mod cross_pipeline;
|
||||
mod exit_status;
|
||||
mod inspect;
|
||||
mod number;
|
||||
mod panic;
|
||||
mod protocol;
|
||||
mod reflect;
|
||||
mod state;
|
||||
mod stl_system;
|
||||
mod string;
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use ordered_float::NotNan;
|
||||
|
||||
use super::ArithmeticError;
|
||||
@@ -39,7 +37,7 @@ impl Numeric {
|
||||
}
|
||||
|
||||
/// Wrap a f64 in a Numeric
|
||||
pub fn new(value: f64) -> Result<Self, Rc<dyn ExternError>> {
|
||||
pub fn new(value: f64) -> XfnResult<Self> {
|
||||
if value.is_finite() {
|
||||
NotNan::new(value)
|
||||
.map(Self::Float)
|
||||
@@ -50,7 +48,7 @@ impl Numeric {
|
||||
}
|
||||
}
|
||||
impl TryFromExprInst for Numeric {
|
||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> {
|
||||
fn from_exi(exi: ExprInst) -> XfnResult<Self> {
|
||||
(exi.request())
|
||||
.ok_or_else(|| AssertionError::ext(Location::Unknown, "a numeric value"))
|
||||
}
|
||||
|
||||
@@ -1,9 +1,40 @@
|
||||
import std::panic
|
||||
import std::(panic, match)
|
||||
|
||||
export const some := \v. \d. \f. f v
|
||||
export const none := \d. \f. d
|
||||
export type ty (
|
||||
export const some := \v. wrap \d. \f. f v
|
||||
export const none := wrap \d. \f. d
|
||||
|
||||
export const map := \option. \f. option none f
|
||||
export const flatten := \option. option none \opt. opt
|
||||
export const flatmap := \option. \f. option none \opt. map opt f
|
||||
export const unwrap := \option. option (panic "value expected") \x.x
|
||||
export const handle := \t. \d. \f. (unwrap t) d f
|
||||
)
|
||||
|
||||
export const some := ty::some
|
||||
export const none := ty::none
|
||||
export const handle := ty::handle
|
||||
|
||||
export const map := \option. \f. handle option none f
|
||||
export const flatten := \option. handle option none \opt. opt
|
||||
export const flatmap := \option. \f. handle option none \opt. map opt f
|
||||
export const unwrap := \option. handle option (panic "value expected") \x.x
|
||||
|
||||
(
|
||||
macro match::request ( none )
|
||||
=0x1p230=> match::response (
|
||||
handle match::value
|
||||
match::pass
|
||||
\_. match::fail
|
||||
) ( match::no_binds )
|
||||
)
|
||||
|
||||
(
|
||||
macro match::request ( some ...$value )
|
||||
=0x1p230=> await_some_subpattern ( match::request (...$value) )
|
||||
)
|
||||
|
||||
(
|
||||
macro await_some_subpattern ( match::response $expr ( $binds ) )
|
||||
=0x1p254=> match::response (
|
||||
handle match::value
|
||||
match::fail
|
||||
\match::value. $expr
|
||||
) ( $binds )
|
||||
)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use std::fmt::Display;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::foreign::{xfn_1ary, ExternError, XfnResult};
|
||||
use crate::interpreted::Clause;
|
||||
@@ -7,7 +7,8 @@ use crate::{ConstTree, Interner, OrcString};
|
||||
|
||||
/// An unrecoverable error in Orchid land. Because Orchid is lazy, this only
|
||||
/// invalidates expressions that reference the one that generated it.
|
||||
pub struct OrchidPanic(Rc<String>);
|
||||
#[derive(Clone)]
|
||||
pub struct OrchidPanic(Arc<String>);
|
||||
|
||||
impl Display for OrchidPanic {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
@@ -20,7 +21,7 @@ impl ExternError for OrchidPanic {}
|
||||
/// Takes a message, returns an [ExternError] unconditionally.
|
||||
pub fn orc_panic(msg: OrcString) -> XfnResult<Clause> {
|
||||
// any return value would work, but Clause is the simplest
|
||||
Err(OrchidPanic(Rc::new(msg.get_string())).into_extern())
|
||||
Err(OrchidPanic(Arc::new(msg.get_string())).into_extern())
|
||||
}
|
||||
|
||||
pub fn panic(i: &Interner) -> ConstTree {
|
||||
|
||||
@@ -5,11 +5,13 @@ export ::[++]
|
||||
import std::bool::*
|
||||
export ::([== !=], if, then, else, true, false, and, or, not)
|
||||
import std::functional::*
|
||||
export ::([$ |> =>], identity, pass, pass2, return)
|
||||
export ::([$ |>], identity, pass, pass2, return)
|
||||
import std::procedural::*
|
||||
export ::(do, let, cps, [; =])
|
||||
import std::tuple::*
|
||||
export ::(do, let, cps, [;])
|
||||
import std::tuple::t
|
||||
export ::(t)
|
||||
import std::match::(match, [=>])
|
||||
export ::(match, [=>])
|
||||
import std::tuple
|
||||
import std::list
|
||||
import std::map
|
||||
@@ -19,4 +21,4 @@ import std::loop::*
|
||||
export ::(loop_over, recursive, while)
|
||||
|
||||
import std::known::*
|
||||
export ::[,]
|
||||
export ::[, _ ; . =]
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import super::functional::=>
|
||||
import super::match::=>
|
||||
import super::known::*
|
||||
|
||||
-- remove duplicate ;-s
|
||||
export macro do {
|
||||
@@ -14,8 +15,11 @@ export macro do { ...$return } =0x1p130=> (...$return)
|
||||
-- modular operation block that returns a CPS function
|
||||
export macro do cps { ...$body } =0x1p130=> \cont. do { ...$body ; cont }
|
||||
|
||||
export macro statement (let $name = ...$value) (...$next) =0x1p230=> (
|
||||
( \$name. ...$next) (...$value)
|
||||
export macro statement (let $_name = ...$value) (...$next) =0x2p230=> (
|
||||
( \$_name. ...$next) (...$value)
|
||||
)
|
||||
export macro statement (let ...$pattern = ...$value:1) (...$next) =0x1p230=> (
|
||||
( (...$pattern) => (...$next) ) (...$value)
|
||||
)
|
||||
export macro statement (cps ...$names = ...$operation:1) (...$next) =0x2p230=> (
|
||||
(...$operation) ( (...$names) => ...$next )
|
||||
|
||||
283
src/systems/stl/protocol.rs
Normal file
283
src/systems/stl/protocol.rs
Normal file
@@ -0,0 +1,283 @@
|
||||
use std::fmt::Debug;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use itertools::Itertools;
|
||||
|
||||
use super::cross_pipeline::defer_to_runtime;
|
||||
use super::reflect::RefEqual;
|
||||
use crate::ast::{self, Constant, Expr, PType};
|
||||
use crate::error::{ProjectResult, RuntimeError};
|
||||
use crate::foreign::{xfn_2ary, Atomic, InertAtomic, XfnResult};
|
||||
use crate::interpreted::ExprInst;
|
||||
use crate::parse::errors::{Expected, ExpectedBlock, ExpectedName};
|
||||
use crate::parse::{
|
||||
parse_entries, parse_exprv, parse_line, parse_nsname, split_lines,
|
||||
vec_to_single, Context, Lexeme, LineParser, LineParserOut, Stream,
|
||||
};
|
||||
use crate::sourcefile::{
|
||||
FileEntry, FileEntryKind, Member, MemberKind, ModuleBlock,
|
||||
};
|
||||
use crate::systems::parse_custom_line::custom_line;
|
||||
use crate::utils::pure_seq::pushed;
|
||||
use crate::{ConstTree, Interner, Location, Tok, VName};
|
||||
|
||||
pub struct TypeData {
|
||||
pub id: RefEqual,
|
||||
pub display_name: Tok<String>,
|
||||
pub impls: HashMap<RefEqual, ExprInst>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Protocol(pub Arc<TypeData>);
|
||||
impl Debug for Protocol {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple(&self.0.display_name).field(&self.0.id.id()).finish()
|
||||
}
|
||||
}
|
||||
impl InertAtomic for Protocol {
|
||||
fn type_str() -> &'static str { "Protocol" }
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Tag(pub Arc<TypeData>);
|
||||
impl Debug for Tag {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple(&self.0.display_name).field(&self.0.id.id()).finish()
|
||||
}
|
||||
}
|
||||
impl InertAtomic for Tag {
|
||||
fn type_str() -> &'static str { "Tag" }
|
||||
fn strict_eq(&self, other: &Self) -> bool { self.0.id == other.0.id }
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Tagged {
|
||||
pub tag: Tag,
|
||||
pub value: ExprInst,
|
||||
}
|
||||
impl Debug for Tagged {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("Tagged").field(&self.tag).field(&self.value).finish()
|
||||
}
|
||||
}
|
||||
impl InertAtomic for Tagged {
|
||||
fn type_str() -> &'static str { "Tagged" }
|
||||
}
|
||||
|
||||
fn parse_impl(
|
||||
tail: Stream,
|
||||
ctx: &(impl Context + ?Sized),
|
||||
) -> Option<ProjectResult<(VName, Expr<VName>)>> {
|
||||
custom_line(tail, ctx.interner().i("impl"), false).map(|res| {
|
||||
let (_, tail, _) = res?;
|
||||
let (name, tail) = parse_nsname(tail, ctx)?;
|
||||
let (walrus, tail) = tail.trim().pop()?;
|
||||
Expected::expect(Lexeme::Walrus, walrus)?;
|
||||
let (body, empty) = parse_exprv(tail, None, ctx)?;
|
||||
empty.expect_empty()?;
|
||||
let value = vec_to_single(tail.fallback, body)?;
|
||||
Ok((name, value))
|
||||
})
|
||||
}
|
||||
|
||||
struct Impl {
|
||||
target: VName,
|
||||
value: Expr<VName>,
|
||||
}
|
||||
|
||||
fn extract_impls(
|
||||
tail: Stream,
|
||||
ctx: &(impl Context + ?Sized),
|
||||
location: Location,
|
||||
typeid_name: Tok<String>,
|
||||
) -> ProjectResult<(Vec<FileEntry>, Vec<Impl>)> {
|
||||
let mut lines = Vec::new();
|
||||
let mut impls = Vec::new(); // name1, value1, name2, value2, etc...
|
||||
for line in split_lines(tail) {
|
||||
match parse_impl(line, ctx) {
|
||||
Some(result) => {
|
||||
let (name, value) = result?;
|
||||
impls.push(Impl { target: pushed(name, typeid_name.clone()), value });
|
||||
},
|
||||
None => lines.extend(
|
||||
parse_line(line, ctx)?.into_iter().map(|k| k.wrap(location.clone())),
|
||||
),
|
||||
}
|
||||
}
|
||||
Ok((lines, impls))
|
||||
}
|
||||
|
||||
pub fn protocol_parser<'a>(
|
||||
tail: Stream<'_>,
|
||||
ctx: &'a (impl Context + ?Sized + 'a),
|
||||
) -> LineParserOut {
|
||||
let i = ctx.interner();
|
||||
custom_line(tail, i.i("protocol"), true).map(|res| {
|
||||
let (exported, tail, line_loc) = res?;
|
||||
let (name, tail) = tail.pop()?;
|
||||
let name = ExpectedName::expect(name)?;
|
||||
let tail = ExpectedBlock::expect(tail, PType::Par)?;
|
||||
let protoid = RefEqual::new();
|
||||
let (lines, impls) =
|
||||
extract_impls(tail, ctx, line_loc.clone(), i.i("__type_id__"))?;
|
||||
let prelude = "
|
||||
import std::protocol
|
||||
const resolve := protocol::resolve __protocol__
|
||||
const get_impl := protocol::get_impl __protocol__
|
||||
";
|
||||
let body = parse_entries(ctx, prelude, line_loc.clone())?
|
||||
.into_iter()
|
||||
.chain(
|
||||
[
|
||||
("__protocol_id__", protoid.clone().ast_cls()),
|
||||
(
|
||||
"__protocol__",
|
||||
defer_to_runtime(
|
||||
impls.into_iter().flat_map(|Impl { target, value }| {
|
||||
[ast::Clause::Name(target).into_expr(), value]
|
||||
.map(|e| ((), vec![e]))
|
||||
}),
|
||||
{
|
||||
let name = name.clone();
|
||||
move |pairs: Vec<((), ExprInst)>| {
|
||||
let mut impls = HashMap::new();
|
||||
debug_assert!(
|
||||
pairs.len() % 2 == 0,
|
||||
"names and values pair up"
|
||||
);
|
||||
let mut nvnvnv = pairs.into_iter().map(|t| t.1);
|
||||
while let Some((name, value)) = nvnvnv.next_tuple() {
|
||||
let key = name.downcast::<RefEqual>()?;
|
||||
impls.insert(key, value);
|
||||
}
|
||||
let id = protoid.clone();
|
||||
let display_name = name.clone();
|
||||
Ok(Protocol(Arc::new(TypeData { id, display_name, impls })))
|
||||
}
|
||||
},
|
||||
),
|
||||
),
|
||||
]
|
||||
.map(|(n, value)| {
|
||||
let value = Expr { value, location: line_loc.clone() };
|
||||
MemberKind::Constant(Constant { name: i.i(n), value })
|
||||
.to_entry(true, line_loc.clone())
|
||||
}),
|
||||
)
|
||||
.chain(lines)
|
||||
.collect();
|
||||
let kind = MemberKind::Module(ModuleBlock { name, body });
|
||||
Ok(vec![FileEntryKind::Member(Member { exported, kind })])
|
||||
})
|
||||
}
|
||||
|
||||
pub fn type_parser(
|
||||
tail: Stream,
|
||||
ctx: &(impl Context + ?Sized),
|
||||
) -> LineParserOut {
|
||||
let i = ctx.interner();
|
||||
custom_line(tail, ctx.interner().i("type"), true).map(|res| {
|
||||
let (exported, tail, line_loc) = res?;
|
||||
let (name, tail) = tail.pop()?;
|
||||
let name = ExpectedName::expect(name)?;
|
||||
let tail = ExpectedBlock::expect(tail, PType::Par)?;
|
||||
let typeid = RefEqual::new();
|
||||
let (lines, impls) =
|
||||
extract_impls(tail, ctx, line_loc.clone(), i.i("__protocol_id__"))?;
|
||||
let prelude = "
|
||||
import std::protocol
|
||||
const unwrap := protocol::unwrap __type_tag__
|
||||
const wrap := protocol::wrap __type_tag__
|
||||
";
|
||||
let body = parse_entries(ctx, prelude, line_loc.clone())?
|
||||
.into_iter()
|
||||
.chain(
|
||||
[
|
||||
("__type_id__", typeid.clone().ast_cls()),
|
||||
(
|
||||
"__type_tag__",
|
||||
defer_to_runtime(
|
||||
impls.into_iter().flat_map(|Impl { target, value }| {
|
||||
[ast::Clause::Name(target).into_expr(), value]
|
||||
.map(|e| ((), vec![e]))
|
||||
}),
|
||||
{
|
||||
let name = name.clone();
|
||||
move |pairs: Vec<((), ExprInst)>| {
|
||||
let mut impls = HashMap::new();
|
||||
debug_assert!(
|
||||
pairs.len() % 2 == 0,
|
||||
"names and values pair up"
|
||||
);
|
||||
let mut nvnvnv = pairs.into_iter().map(|t| t.1);
|
||||
while let Some((name, value)) = nvnvnv.next_tuple() {
|
||||
let key = name.downcast::<RefEqual>()?;
|
||||
impls.insert(key, value);
|
||||
}
|
||||
let id = typeid.clone();
|
||||
let display_name = name.clone();
|
||||
Ok(Tag(Arc::new(TypeData { id, display_name, impls })))
|
||||
}
|
||||
},
|
||||
),
|
||||
),
|
||||
]
|
||||
.map(|(n, value)| {
|
||||
let value = Expr { value, location: line_loc.clone() };
|
||||
MemberKind::Constant(Constant { name: i.i(n), value })
|
||||
.to_entry(true, line_loc.clone())
|
||||
}),
|
||||
)
|
||||
.chain(lines)
|
||||
.collect();
|
||||
let kind = MemberKind::Module(ModuleBlock { name, body });
|
||||
Ok(vec![FileEntryKind::Member(Member { exported, kind })])
|
||||
})
|
||||
}
|
||||
|
||||
pub fn parsers() -> Vec<Box<dyn LineParser>> {
|
||||
vec![
|
||||
Box::new(|tail, ctx| protocol_parser(tail, ctx)),
|
||||
Box::new(|tail, ctx| type_parser(tail, ctx)),
|
||||
]
|
||||
}
|
||||
|
||||
pub fn unwrap(tag: Tag, tagged: Tagged) -> XfnResult<ExprInst> {
|
||||
if tagged.tag.strict_eq(&tag) {
|
||||
return Ok(tagged.value);
|
||||
}
|
||||
let msg = format!("{:?} is not {:?}", tagged.tag, tag);
|
||||
RuntimeError::fail(msg, "unwrapping type-tagged value")
|
||||
}
|
||||
|
||||
pub fn wrap(tag: Tag, value: ExprInst) -> XfnResult<Tagged> {
|
||||
Ok(Tagged { tag, value })
|
||||
}
|
||||
|
||||
pub fn resolve(protocol: Protocol, tagged: Tagged) -> XfnResult<ExprInst> {
|
||||
get_impl(protocol, tagged.tag)
|
||||
}
|
||||
|
||||
pub fn get_impl(proto: Protocol, tag: Tag) -> XfnResult<ExprInst> {
|
||||
if let Some(implem) = proto.0.impls.get(&tag.0.id) {
|
||||
return Ok(implem.clone());
|
||||
}
|
||||
if let Some(implem) = tag.0.impls.get(&proto.0.id) {
|
||||
return Ok(implem.clone());
|
||||
}
|
||||
let message = format!("{:?} doesn't implement {:?}", tag, proto);
|
||||
RuntimeError::fail(message, "dispatching protocol")
|
||||
}
|
||||
|
||||
pub fn protocol_lib(i: &Interner) -> ConstTree {
|
||||
ConstTree::namespace(
|
||||
[i.i("protocol")],
|
||||
ConstTree::tree([
|
||||
(i.i("unwrap"), ConstTree::xfn(xfn_2ary(unwrap))),
|
||||
(i.i("wrap"), ConstTree::xfn(xfn_2ary(wrap))),
|
||||
(i.i("get_impl"), ConstTree::xfn(xfn_2ary(get_impl))),
|
||||
(i.i("resolve"), ConstTree::xfn(xfn_2ary(resolve))),
|
||||
]),
|
||||
)
|
||||
}
|
||||
69
src/systems/stl/reflect.rs
Normal file
69
src/systems/stl/reflect.rs
Normal file
@@ -0,0 +1,69 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt::Debug;
|
||||
use std::hash::Hash;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::foreign::{xfn_2ary, InertAtomic};
|
||||
use crate::{ConstTree, Interner, Sym};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SymbolName(pub Sym);
|
||||
impl InertAtomic for SymbolName {
|
||||
fn type_str() -> &'static str { "SymbolName" }
|
||||
}
|
||||
|
||||
// #[derive(Debug, Clone)]
|
||||
// pub struct GetSymName;
|
||||
// impl ExternFn for GetSymName {
|
||||
// fn name(&self) -> &str { "GetSymName" }
|
||||
// fn apply(
|
||||
// self: Box<Self>,
|
||||
// arg: ExprInst,
|
||||
// _: Context,
|
||||
// ) -> XfnResult<Clause> { arg.inspect(|c| match c { Clause::Constant(name)
|
||||
// => Ok(SymbolName(name.clone()).atom_cls()), _ =>
|
||||
// AssertionError::fail(arg.location(), "is not a constant name"), })
|
||||
// }
|
||||
// }
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct RefEqual(Arc<u8>);
|
||||
impl RefEqual {
|
||||
pub fn new() -> Self { Self(Arc::new(0u8)) }
|
||||
pub fn id(&self) -> usize { &*self.0 as *const u8 as usize }
|
||||
}
|
||||
impl Debug for RefEqual {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("RefEqual").field(&self.id()).finish()
|
||||
}
|
||||
}
|
||||
impl InertAtomic for RefEqual {
|
||||
fn type_str() -> &'static str { "RefEqual" }
|
||||
fn strict_eq(&self, other: &Self) -> bool { self == other }
|
||||
}
|
||||
impl Eq for RefEqual {}
|
||||
impl PartialEq for RefEqual {
|
||||
fn eq(&self, other: &Self) -> bool { self.id() == other.id() }
|
||||
}
|
||||
impl Ord for RefEqual {
|
||||
fn cmp(&self, other: &Self) -> Ordering { self.id().cmp(&other.id()) }
|
||||
}
|
||||
impl PartialOrd for RefEqual {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
impl Hash for RefEqual {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.id().hash(state) }
|
||||
}
|
||||
|
||||
pub fn reflect(i: &Interner) -> ConstTree {
|
||||
// ConstTree::tree([])
|
||||
ConstTree::namespace(
|
||||
[i.i("reflect")],
|
||||
ConstTree::tree([(
|
||||
i.i("ref_equal"),
|
||||
ConstTree::xfn(xfn_2ary(|l: RefEqual, r: RefEqual| Ok(l.id() == r.id()))),
|
||||
)]),
|
||||
)
|
||||
}
|
||||
@@ -10,6 +10,8 @@ use super::exit_status::exit_status;
|
||||
use super::inspect::inspect;
|
||||
use super::number::num;
|
||||
use super::panic::panic;
|
||||
use super::protocol::{parsers, protocol_lib};
|
||||
use super::reflect::reflect;
|
||||
use super::state::{state_handlers, state_lib};
|
||||
use super::string::str;
|
||||
use crate::facade::{IntoSystem, System};
|
||||
@@ -40,8 +42,10 @@ impl IntoSystem<'static> for StlConfig {
|
||||
+ exit_status(i)
|
||||
+ num(i)
|
||||
+ panic(i)
|
||||
+ reflect(i)
|
||||
+ state_lib(i)
|
||||
+ str(i);
|
||||
+ str(i)
|
||||
+ protocol_lib(i);
|
||||
let mk_impure_fns = || inspect(i);
|
||||
let fns = if self.impure { pure_tree + mk_impure_fns() } else { pure_tree };
|
||||
System {
|
||||
@@ -57,8 +61,8 @@ impl IntoSystem<'static> for StlConfig {
|
||||
}]),
|
||||
}],
|
||||
handlers: state_handlers(),
|
||||
lexer_plugin: None,
|
||||
line_parser: None,
|
||||
lexer_plugins: vec![],
|
||||
line_parsers: parsers(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,84 @@
|
||||
import super::(known::*, bool::*, number::*)
|
||||
import super::(known::*, bool::*, number::*, match, macro)
|
||||
|
||||
const discard_args := \n. \value. (
|
||||
export type ty (
|
||||
import super::super::(number::*, bool::*, macro, panic)
|
||||
|
||||
const discard_args := \n. \value. (
|
||||
if n == 0 then value
|
||||
else \_. discard_args (n - 1) value
|
||||
)
|
||||
|
||||
macro gen_call macro::list_end =0x1p254=> \f.f
|
||||
macro gen_call ( macro::list_item $item $tail ) =0x1p254=> \f. (gen_call $tail) (f $item)
|
||||
export macro new ( $list ) =0x1p84=> wrap \f. (gen_call $list) (f (macro::length $list))
|
||||
|
||||
export const pick := \tuple. \i. (unwrap tuple) ( \size.
|
||||
if size <= i then panic "Tuple index out of bounds"
|
||||
else discard_args i \val. discard_args (size - 1 - i) val
|
||||
)
|
||||
|
||||
export const length := \tuple. (unwrap tuple) \size. discard_args size size
|
||||
|
||||
export const apply := \tuple. \f. (unwrap tuple) f
|
||||
)
|
||||
|
||||
export const pick := \tuple. \i. \n. tuple (
|
||||
discard_args i \val. discard_args (n - 1 - i) val
|
||||
const pick := ty::pick
|
||||
const length := ty::length
|
||||
const apply := ty::apply
|
||||
|
||||
macro t[..$items] =0x2p84=> ( ty::new ( macro::comma_list (..$items) ) )
|
||||
|
||||
export ::(t, size)
|
||||
|
||||
macro size ( t[..$items] ) =0x1p230=> macro::length macro::comma_list (..$items)
|
||||
|
||||
--[
|
||||
request l -> pattern_walker l
|
||||
pattern_walker end -> pattern_result
|
||||
pattern_walker h ++ t -> await_pattern
|
||||
await_pattern -> pattern_result
|
||||
]--
|
||||
|
||||
( macro match::request ( t[ ..$items ] )
|
||||
=0x1p230=> tuple_pattern
|
||||
( macro::length macro::comma_list ( ..$items ) )
|
||||
(
|
||||
pattern_walker
|
||||
(0) -- index of next item
|
||||
macro::comma_list ( ..$items ) -- leftover items
|
||||
)
|
||||
)
|
||||
( macro tuple_pattern $length ( pattern_result $expr ( $binds ) )
|
||||
=0x1p254=> match::response (
|
||||
if length match::value == $length
|
||||
then $expr
|
||||
else match::fail
|
||||
) ( $binds )
|
||||
)
|
||||
( macro pattern_walker $length macro::list_end
|
||||
=0x1p254=> pattern_result match::pass ( match::no_binds )
|
||||
)
|
||||
( macro pattern_walker (...$length) ( macro::list_item $next $tail )
|
||||
=0x1p254=> pattern_await
|
||||
(...$length)
|
||||
( match::request $next )
|
||||
( pattern_walker (...$length + 1) $tail )
|
||||
)
|
||||
( macro pattern_await $length
|
||||
( match::response $expr ( $binds ) )
|
||||
( pattern_result $tail_expr ( $tail_binds ) )
|
||||
=0x1p254=>
|
||||
pattern_result
|
||||
(
|
||||
(\match::pass. (\match::value. $expr) (pick match::value $length)) (
|
||||
match::take_binds $binds (
|
||||
(\match::pass. $tail_expr) ( match::take_binds $tail_binds (
|
||||
match::give_binds
|
||||
match::chain_binds $binds $tail_binds
|
||||
match::pass
|
||||
))
|
||||
)
|
||||
)
|
||||
)
|
||||
( match::chain_binds $binds $tail_binds )
|
||||
)
|
||||
|
||||
macro t[...$item, ...$rest:1] =0x2p84=> (\f. t[...$rest] (f (...$item)))
|
||||
macro t[...$end] =0x1p84=> (\f. f (...$end))
|
||||
macro t[] =0x1p84=> \f.f
|
||||
|
||||
export ::(t)
|
||||
|
||||
Reference in New Issue
Block a user