October commit
- custom parser support and infra - type-tagging and traits (untested) - match expressions
This commit is contained in:
16
.vscode/launch.json
vendored
Normal file
16
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
// Use IntelliSense to learn about possible attributes.
|
||||||
|
// Hover to view descriptions of existing attributes.
|
||||||
|
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"type": "lldb",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Debug",
|
||||||
|
"program": "${workspaceFolder}/<executable file>",
|
||||||
|
"args": [],
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -5,7 +5,7 @@ const folder_view := (path, next) => do{
|
|||||||
cps println $ "Contents of " ++ fs::os_print path;
|
cps println $ "Contents of " ++ fs::os_print path;
|
||||||
cps entries = async::block_on $ fs::read_dir path;
|
cps entries = async::block_on $ fs::read_dir path;
|
||||||
cps list::enumerate entries
|
cps list::enumerate entries
|
||||||
|> list::map (pass \id. pass \name. \is_dir.
|
|> list::map ((t[id, t[name, is_dir]]) =>
|
||||||
println $ to_string id ++ ": " ++ fs::os_print name ++ if is_dir then "/" else ""
|
println $ to_string id ++ ": " ++ fs::os_print name ++ if is_dir then "/" else ""
|
||||||
)
|
)
|
||||||
|> list::chain;
|
|> list::chain;
|
||||||
@@ -17,7 +17,7 @@ const folder_view := (path, next) => do{
|
|||||||
|> tuple::pick 0 2;
|
|> tuple::pick 0 2;
|
||||||
next parent_path
|
next parent_path
|
||||||
} else do {
|
} else do {
|
||||||
cps subname, is_dir = to_uint choice
|
let t[subname, is_dir] = to_uint choice
|
||||||
|> (list::get entries)
|
|> (list::get entries)
|
||||||
|> option::unwrap;
|
|> option::unwrap;
|
||||||
let subpath = fs::join_paths path subname;
|
let subpath = fs::join_paths path subname;
|
||||||
|
|||||||
21
examples/match/main.orc
Normal file
21
examples/match/main.orc
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
import std::to_string
|
||||||
|
|
||||||
|
const foo := t[option::some "world!", option::none]
|
||||||
|
|
||||||
|
const test1 := match foo {
|
||||||
|
t[option::some balh, option::none] => balh;
|
||||||
|
}
|
||||||
|
|
||||||
|
const bar := map::new[
|
||||||
|
"age" = 22,
|
||||||
|
"name" = "lbfalvy",
|
||||||
|
"is_alive" = true,
|
||||||
|
"species" = "human",
|
||||||
|
"greeting" = "Hello"
|
||||||
|
]
|
||||||
|
|
||||||
|
const test2 := match bar {
|
||||||
|
map::having ["is_alive" = true, "greeting" = foo] => foo
|
||||||
|
}
|
||||||
|
|
||||||
|
const main := test2 ++ ", " ++ test1
|
||||||
@@ -30,7 +30,7 @@ struct Args {
|
|||||||
pub macro_limit: usize,
|
pub macro_limit: usize,
|
||||||
/// Print the parsed ruleset and exit
|
/// Print the parsed ruleset and exit
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
pub dump_repo: bool,
|
pub list_macros: bool,
|
||||||
/// Step through the macro execution process in the specified symbol
|
/// Step through the macro execution process in the specified symbol
|
||||||
#[arg(long, default_value = "")]
|
#[arg(long, default_value = "")]
|
||||||
pub macro_debug: String,
|
pub macro_debug: String,
|
||||||
@@ -112,6 +112,16 @@ pub fn macro_debug(premacro: PreMacro, sym: Sym) -> ExitCode {
|
|||||||
"p" | "print" => print_for_debug(&code),
|
"p" | "print" => print_for_debug(&code),
|
||||||
"d" | "dump" => print!("Rules: {}", premacro.repo),
|
"d" | "dump" => print!("Rules: {}", premacro.repo),
|
||||||
"q" | "quit" => return ExitCode::SUCCESS,
|
"q" | "quit" => return ExitCode::SUCCESS,
|
||||||
|
"complete" => {
|
||||||
|
if let Some((idx, c)) = steps.last() {
|
||||||
|
code = c;
|
||||||
|
print!("Step {idx}: ");
|
||||||
|
print_for_debug(&code);
|
||||||
|
} else {
|
||||||
|
print!("Already halted")
|
||||||
|
}
|
||||||
|
return ExitCode::SUCCESS;
|
||||||
|
},
|
||||||
"h" | "help" => print!(
|
"h" | "help" => print!(
|
||||||
"Available commands:
|
"Available commands:
|
||||||
\t<blank>, n, next\t\ttake a step
|
\t<blank>, n, next\t\ttake a step
|
||||||
@@ -148,7 +158,7 @@ pub fn main() -> ExitCode {
|
|||||||
.add_system(io::Service::new(scheduler.clone(), std_streams))
|
.add_system(io::Service::new(scheduler.clone(), std_streams))
|
||||||
.add_system(directfs::DirectFS::new(scheduler));
|
.add_system(directfs::DirectFS::new(scheduler));
|
||||||
let premacro = env.load_dir(&dir, &main).unwrap();
|
let premacro = env.load_dir(&dir, &main).unwrap();
|
||||||
if args.dump_repo {
|
if args.list_macros {
|
||||||
println!("Parsed rules: {}", premacro.repo);
|
println!("Parsed rules: {}", premacro.repo);
|
||||||
return ExitCode::SUCCESS;
|
return ExitCode::SUCCESS;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::foreign::ExternError;
|
use crate::foreign::{ExternError, XfnResult};
|
||||||
use crate::Location;
|
use crate::Location;
|
||||||
|
|
||||||
/// Some expectation (usually about the argument types of a function) did not
|
/// Some expectation (usually about the argument types of a function) did not
|
||||||
@@ -15,15 +15,15 @@ pub struct AssertionError {
|
|||||||
impl AssertionError {
|
impl AssertionError {
|
||||||
/// Construct, upcast and wrap in a Result that never succeeds for easy
|
/// Construct, upcast and wrap in a Result that never succeeds for easy
|
||||||
/// short-circuiting
|
/// short-circuiting
|
||||||
pub fn fail<T>(
|
pub fn fail<T>(location: Location, message: &'static str) -> XfnResult<T> {
|
||||||
location: Location,
|
|
||||||
message: &'static str,
|
|
||||||
) -> Result<T, Rc<dyn ExternError>> {
|
|
||||||
Err(Self::ext(location, message))
|
Err(Self::ext(location, message))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Construct and upcast to [ExternError]
|
/// Construct and upcast to [ExternError]
|
||||||
pub fn ext(location: Location, message: &'static str) -> Rc<dyn ExternError> {
|
pub fn ext(
|
||||||
|
location: Location,
|
||||||
|
message: &'static str,
|
||||||
|
) -> Arc<dyn ExternError> {
|
||||||
Self { location, message }.into_extern()
|
Self { location, message }.into_extern()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ mod conflicting_roles;
|
|||||||
mod import_all;
|
mod import_all;
|
||||||
mod no_targets;
|
mod no_targets;
|
||||||
mod not_exported;
|
mod not_exported;
|
||||||
mod parse_error_with_tokens;
|
|
||||||
mod project_error;
|
mod project_error;
|
||||||
mod runtime_error;
|
mod runtime_error;
|
||||||
mod too_many_supers;
|
mod too_many_supers;
|
||||||
@@ -16,7 +15,6 @@ pub use conflicting_roles::ConflictingRoles;
|
|||||||
pub use import_all::ImportAll;
|
pub use import_all::ImportAll;
|
||||||
pub use no_targets::NoTargets;
|
pub use no_targets::NoTargets;
|
||||||
pub use not_exported::NotExported;
|
pub use not_exported::NotExported;
|
||||||
pub use parse_error_with_tokens::ParseErrorWithTokens;
|
|
||||||
pub use project_error::{ErrorPosition, ProjectError, ProjectResult};
|
pub use project_error::{ErrorPosition, ProjectError, ProjectResult};
|
||||||
pub use runtime_error::RuntimeError;
|
pub use runtime_error::RuntimeError;
|
||||||
pub use too_many_supers::TooManySupers;
|
pub use too_many_supers::TooManySupers;
|
||||||
|
|||||||
@@ -1,28 +0,0 @@
|
|||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use itertools::Itertools;
|
|
||||||
|
|
||||||
use super::{ErrorPosition, ProjectError};
|
|
||||||
use crate::parse::Entry;
|
|
||||||
use crate::utils::BoxedIter;
|
|
||||||
|
|
||||||
/// Produced by stages that parse text when it fails.
|
|
||||||
pub struct ParseErrorWithTokens {
|
|
||||||
/// The complete source of the faulty file
|
|
||||||
pub full_source: String,
|
|
||||||
/// Tokens, if the error did not occur during tokenization
|
|
||||||
pub tokens: Vec<Entry>,
|
|
||||||
/// The parse error produced by Chumsky
|
|
||||||
pub error: Rc<dyn ProjectError>,
|
|
||||||
}
|
|
||||||
impl ProjectError for ParseErrorWithTokens {
|
|
||||||
fn description(&self) -> &str { self.error.description() }
|
|
||||||
fn message(&self) -> String {
|
|
||||||
format!(
|
|
||||||
"Failed to parse code: {}\nTokenized source for context:\n{}",
|
|
||||||
self.error.message(),
|
|
||||||
self.tokens.iter().map(|t| t.to_string()).join(" "),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
fn positions(&self) -> BoxedIter<ErrorPosition> { self.error.positions() }
|
|
||||||
}
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::foreign::ExternError;
|
use crate::foreign::{ExternError, XfnResult};
|
||||||
|
|
||||||
/// Some external event prevented the operation from succeeding
|
/// Some external event prevented the operation from succeeding
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@@ -13,15 +13,15 @@ pub struct RuntimeError {
|
|||||||
impl RuntimeError {
|
impl RuntimeError {
|
||||||
/// Construct, upcast and wrap in a Result that never succeeds for easy
|
/// Construct, upcast and wrap in a Result that never succeeds for easy
|
||||||
/// short-circuiting
|
/// short-circuiting
|
||||||
pub fn fail<T>(
|
pub fn fail<T>(message: String, operation: &'static str) -> XfnResult<T> {
|
||||||
message: String,
|
|
||||||
operation: &'static str,
|
|
||||||
) -> Result<T, Rc<dyn ExternError>> {
|
|
||||||
Err(Self { message, operation }.into_extern())
|
Err(Self { message, operation }.into_extern())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Construct and upcast to [ExternError]
|
/// Construct and upcast to [ExternError]
|
||||||
pub fn ext(message: String, operation: &'static str) -> Rc<dyn ExternError> {
|
pub fn ext(
|
||||||
|
message: String,
|
||||||
|
operation: &'static str,
|
||||||
|
) -> Arc<dyn ExternError> {
|
||||||
Self { message, operation }.into_extern()
|
Self { message, operation }.into_extern()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -46,10 +46,12 @@ impl<'a> Environment<'a> {
|
|||||||
let mut line_parsers = vec![];
|
let mut line_parsers = vec![];
|
||||||
let mut prelude = vec![];
|
let mut prelude = vec![];
|
||||||
for sys in systems.iter() {
|
for sys in systems.iter() {
|
||||||
|
lexer_plugins.extend(sys.lexer_plugins.iter().map(|b| &**b));
|
||||||
|
line_parsers.extend(sys.line_parsers.iter().map(|b| &**b));
|
||||||
if !sys.code.is_empty() {
|
if !sys.code.is_empty() {
|
||||||
tree = parse_layer(
|
tree = parse_layer(
|
||||||
sys.code.keys().map(|sym| &sym[..]),
|
sys.code.keys().map(|sym| &sym[..]),
|
||||||
&|k| sys.load_file(k),
|
&|k, referrer| sys.load_file(k, referrer),
|
||||||
&tree,
|
&tree,
|
||||||
&prelude,
|
&prelude,
|
||||||
&lexer_plugins,
|
&lexer_plugins,
|
||||||
@@ -57,8 +59,6 @@ impl<'a> Environment<'a> {
|
|||||||
i,
|
i,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
lexer_plugins.extend(sys.lexer_plugin.as_deref().iter());
|
|
||||||
line_parsers.extend(sys.line_parser.as_deref().iter());
|
|
||||||
prelude.extend_from_slice(&sys.prelude);
|
prelude.extend_from_slice(&sys.prelude);
|
||||||
}
|
}
|
||||||
Ok(CompiledEnv { prelude, tree, systems })
|
Ok(CompiledEnv { prelude, tree, systems })
|
||||||
@@ -74,14 +74,14 @@ impl<'a> Environment<'a> {
|
|||||||
let CompiledEnv { prelude, systems, tree } = self.compile()?;
|
let CompiledEnv { prelude, systems, tree } = self.compile()?;
|
||||||
let file_cache = file_loader::mk_dir_cache(dir.to_path_buf());
|
let file_cache = file_loader::mk_dir_cache(dir.to_path_buf());
|
||||||
let lexer_plugins = (systems.iter())
|
let lexer_plugins = (systems.iter())
|
||||||
.filter_map(|s| s.lexer_plugin.as_deref())
|
.flat_map(|s| s.lexer_plugins.iter().map(|b| &**b))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
let line_parsers = (systems.iter())
|
let line_parsers = (systems.iter())
|
||||||
.filter_map(|s| s.line_parser.as_deref())
|
.flat_map(|s| s.line_parsers.iter().map(|b| &**b))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
let vname_tree = parse_layer(
|
let vname_tree = parse_layer(
|
||||||
iter::once(target),
|
iter::once(target),
|
||||||
&|path| file_cache.find(path),
|
&|path, _| file_cache.find(path),
|
||||||
&tree,
|
&tree,
|
||||||
&prelude,
|
&prelude,
|
||||||
&lexer_plugins,
|
&lexer_plugins,
|
||||||
|
|||||||
@@ -86,7 +86,8 @@ impl<'a> PreMacro<'a> {
|
|||||||
} else {
|
} else {
|
||||||
repo.pass(source).unwrap_or_else(|| source.clone())
|
repo.pass(source).unwrap_or_else(|| source.clone())
|
||||||
};
|
};
|
||||||
let runtree = ast_to_interpreted(&unmatched).map_err(|e| e.rc())?;
|
let runtree =
|
||||||
|
ast_to_interpreted(&unmatched, name.clone()).map_err(|e| e.rc())?;
|
||||||
symbols.insert(name.clone(), runtree);
|
symbols.insert(name.clone(), runtree);
|
||||||
}
|
}
|
||||||
Ok(Process {
|
Ok(Process {
|
||||||
|
|||||||
@@ -27,10 +27,10 @@ pub struct System<'a> {
|
|||||||
/// Custom lexer for the source code representation atomic data.
|
/// Custom lexer for the source code representation atomic data.
|
||||||
/// These take priority over builtin lexers so the syntax they
|
/// These take priority over builtin lexers so the syntax they
|
||||||
/// match should be unambiguous
|
/// match should be unambiguous
|
||||||
pub lexer_plugin: Option<Box<dyn LexerPlugin>>,
|
pub lexer_plugins: Vec<Box<dyn LexerPlugin>>,
|
||||||
/// Parser that processes custom line types into their representation in the
|
/// Parser that processes custom line types into their representation in the
|
||||||
/// module tree
|
/// module tree
|
||||||
pub line_parser: Option<Box<dyn LineParser>>,
|
pub line_parsers: Vec<Box<dyn LineParser>>,
|
||||||
}
|
}
|
||||||
impl<'a> System<'a> {
|
impl<'a> System<'a> {
|
||||||
/// Intern the name of the system so that it can be used as an Orchid
|
/// Intern the name of the system so that it can be used as an Orchid
|
||||||
@@ -41,10 +41,17 @@ impl<'a> System<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Load a file from the system
|
/// Load a file from the system
|
||||||
pub fn load_file(&self, path: &[Tok<String>]) -> IOResult {
|
pub fn load_file(
|
||||||
|
&self,
|
||||||
|
path: &[Tok<String>],
|
||||||
|
referrer: &[Tok<String>],
|
||||||
|
) -> IOResult {
|
||||||
(self.code.get(path)).cloned().ok_or_else(|| {
|
(self.code.get(path)).cloned().ok_or_else(|| {
|
||||||
let err =
|
let err = MissingSystemCode {
|
||||||
MissingSystemCode { path: path.to_vec(), system: self.name.clone() };
|
path: path.to_vec(),
|
||||||
|
system: self.name.clone(),
|
||||||
|
referrer: referrer.to_vec(),
|
||||||
|
};
|
||||||
err.rc()
|
err.rc()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -56,6 +63,7 @@ impl<'a> System<'a> {
|
|||||||
pub struct MissingSystemCode {
|
pub struct MissingSystemCode {
|
||||||
path: VName,
|
path: VName,
|
||||||
system: Vec<String>,
|
system: Vec<String>,
|
||||||
|
referrer: VName,
|
||||||
}
|
}
|
||||||
impl ProjectError for MissingSystemCode {
|
impl ProjectError for MissingSystemCode {
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
@@ -63,8 +71,9 @@ impl ProjectError for MissingSystemCode {
|
|||||||
}
|
}
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
format!(
|
format!(
|
||||||
"Path {} is not defined by {} or any system before it",
|
"Path {} imported by {} is not defined by {} or any system before it",
|
||||||
Interner::extern_all(&self.path).join("::"),
|
Interner::extern_all(&self.path).join("::"),
|
||||||
|
Interner::extern_all(&self.referrer).join("::"),
|
||||||
self.system.join("::")
|
self.system.join("::")
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use dyn_clone::DynClone;
|
use dyn_clone::DynClone;
|
||||||
|
|
||||||
use super::ExternError;
|
use super::XfnResult;
|
||||||
use crate::ddispatch::request;
|
use crate::ddispatch::request;
|
||||||
use crate::error::AssertionError;
|
use crate::error::AssertionError;
|
||||||
use crate::interpreted::{ExprInst, TryFromExprInst};
|
use crate::interpreted::{ExprInst, TryFromExprInst};
|
||||||
use crate::interpreter::{Context, RuntimeError};
|
use crate::interpreter::{Context, RuntimeError};
|
||||||
use crate::representations::interpreted::Clause;
|
use crate::representations::interpreted::Clause;
|
||||||
use crate::utils::ddispatch::Responder;
|
use crate::utils::ddispatch::Responder;
|
||||||
|
use crate::{ast, NameLike};
|
||||||
|
|
||||||
/// Information returned by [Atomic::run]. This mirrors
|
/// Information returned by [Atomic::run]. This mirrors
|
||||||
/// [crate::interpreter::Return] but with a clause instead of an Expr.
|
/// [crate::interpreter::Return] but with a clause instead of an Expr.
|
||||||
@@ -77,6 +77,24 @@ where
|
|||||||
{
|
{
|
||||||
self.atom_cls().wrap()
|
self.atom_cls().wrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Wrap the atom in a clause to be placed in a [sourcefile::FileEntry].
|
||||||
|
#[must_use]
|
||||||
|
fn ast_cls<N: NameLike>(self) -> ast::Clause<N>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
ast::Clause::Atom(Atom::new(self))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Wrap the atom in an expression to be placed in a [sourcefile::FileEntry].
|
||||||
|
#[must_use]
|
||||||
|
fn ast_exp<N: NameLike>(self) -> ast::Expr<N>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
self.ast_cls().into_expr()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Represents a black box unit of code with its own normalization steps.
|
/// Represents a black box unit of code with its own normalization steps.
|
||||||
@@ -129,7 +147,7 @@ impl Debug for Atom {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TryFromExprInst for Atom {
|
impl TryFromExprInst for Atom {
|
||||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> {
|
fn from_exi(exi: ExprInst) -> XfnResult<Self> {
|
||||||
let loc = exi.location();
|
let loc = exi.location();
|
||||||
match exi.expr_val().clause {
|
match exi.expr_val().clause {
|
||||||
Clause::Atom(a) => Ok(a),
|
Clause::Atom(a) => Ok(a),
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::fmt::{Debug, Display};
|
use std::fmt::{Debug, Display};
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use dyn_clone::{clone_box, DynClone};
|
use dyn_clone::{clone_box, DynClone};
|
||||||
|
|
||||||
@@ -9,16 +9,17 @@ use super::XfnResult;
|
|||||||
use crate::interpreted::ExprInst;
|
use crate::interpreted::ExprInst;
|
||||||
use crate::interpreter::Context;
|
use crate::interpreter::Context;
|
||||||
use crate::representations::interpreted::Clause;
|
use crate::representations::interpreted::Clause;
|
||||||
|
use crate::{ast, NameLike};
|
||||||
|
|
||||||
/// Errors produced by external code
|
/// Errors produced by external code
|
||||||
pub trait ExternError: Display {
|
pub trait ExternError: Display + Send + Sync + DynClone {
|
||||||
/// Convert into trait object
|
/// Convert into trait object
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn into_extern(self) -> Rc<dyn ExternError>
|
fn into_extern(self) -> Arc<dyn ExternError>
|
||||||
where
|
where
|
||||||
Self: 'static + Sized,
|
Self: 'static + Sized,
|
||||||
{
|
{
|
||||||
Rc::new(self)
|
Arc::new(self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -51,6 +52,14 @@ pub trait ExternFn: DynClone + Send {
|
|||||||
{
|
{
|
||||||
Clause::ExternFn(ExFn(Box::new(self)))
|
Clause::ExternFn(ExFn(Box::new(self)))
|
||||||
}
|
}
|
||||||
|
/// Wrap this function in a clause to be placed in a [FileEntry].
|
||||||
|
#[must_use]
|
||||||
|
fn xfn_ast_cls<N: NameLike>(self) -> ast::Clause<N>
|
||||||
|
where
|
||||||
|
Self: Sized + 'static,
|
||||||
|
{
|
||||||
|
ast::Clause::ExternFn(ExFn(Box::new(self)))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Eq for dyn ExternFn {}
|
impl Eq for dyn ExternFn {}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::atom::StrictEq;
|
use super::atom::StrictEq;
|
||||||
use super::{
|
use super::{
|
||||||
@@ -61,7 +61,7 @@ impl<T, U, F> Param<T, U, F> {
|
|||||||
/// Wrap a new function in a parametric struct
|
/// Wrap a new function in a parametric struct
|
||||||
pub fn new(f: F) -> Self
|
pub fn new(f: F) -> Self
|
||||||
where
|
where
|
||||||
F: FnOnce(T) -> Result<U, Rc<dyn ExternError>>,
|
F: FnOnce(T) -> Result<U, Arc<dyn ExternError>>,
|
||||||
{
|
{
|
||||||
Self { data: f, _t: PhantomData, _u: PhantomData }
|
Self { data: f, _t: PhantomData, _u: PhantomData }
|
||||||
}
|
}
|
||||||
@@ -77,7 +77,7 @@ impl<T, U, F: Clone> Clone for Param<T, U, F> {
|
|||||||
impl<
|
impl<
|
||||||
T: 'static + TryFromExprInst,
|
T: 'static + TryFromExprInst,
|
||||||
U: 'static + ToClause,
|
U: 'static + ToClause,
|
||||||
F: 'static + Clone + Send + FnOnce(T) -> Result<U, Rc<dyn ExternError>>,
|
F: 'static + Clone + Send + FnOnce(T) -> Result<U, Arc<dyn ExternError>>,
|
||||||
> ToClause for Param<T, U, F>
|
> ToClause for Param<T, U, F>
|
||||||
{
|
{
|
||||||
fn to_clause(self) -> Clause { self.xfn_cls() }
|
fn to_clause(self) -> Clause { self.xfn_cls() }
|
||||||
@@ -109,7 +109,7 @@ impl<T, U, F> Responder for FnMiddleStage<T, U, F> {}
|
|||||||
impl<
|
impl<
|
||||||
T: 'static + TryFromExprInst,
|
T: 'static + TryFromExprInst,
|
||||||
U: 'static + ToClause,
|
U: 'static + ToClause,
|
||||||
F: 'static + Clone + FnOnce(T) -> Result<U, Rc<dyn ExternError>> + Send,
|
F: 'static + Clone + FnOnce(T) -> Result<U, Arc<dyn ExternError>> + Send,
|
||||||
> Atomic for FnMiddleStage<T, U, F>
|
> Atomic for FnMiddleStage<T, U, F>
|
||||||
{
|
{
|
||||||
fn as_any(self: Box<Self>) -> Box<dyn std::any::Any> { self }
|
fn as_any(self: Box<Self>) -> Box<dyn std::any::Any> { self }
|
||||||
@@ -127,7 +127,7 @@ impl<
|
|||||||
impl<
|
impl<
|
||||||
T: 'static + TryFromExprInst,
|
T: 'static + TryFromExprInst,
|
||||||
U: 'static + ToClause,
|
U: 'static + ToClause,
|
||||||
F: 'static + Clone + Send + FnOnce(T) -> Result<U, Rc<dyn ExternError>>,
|
F: 'static + Clone + Send + FnOnce(T) -> Result<U, Arc<dyn ExternError>>,
|
||||||
> ExternFn for Param<T, U, F>
|
> ExternFn for Param<T, U, F>
|
||||||
{
|
{
|
||||||
fn name(&self) -> &str { "anonymous Rust function" }
|
fn name(&self) -> &str { "anonymous Rust function" }
|
||||||
@@ -137,8 +137,9 @@ impl<
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub mod constructors {
|
pub mod constructors {
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
|
|
||||||
|
use std::sync::Arc;
|
||||||
use super::{Param, ToClause};
|
use super::{Param, ToClause};
|
||||||
use crate::foreign::{ExternError, ExternFn};
|
use crate::foreign::{ExternError, ExternFn};
|
||||||
use crate::interpreted::TryFromExprInst;
|
use crate::interpreted::TryFromExprInst;
|
||||||
@@ -163,7 +164,7 @@ pub mod constructors {
|
|||||||
TLast: TryFromExprInst + 'static,
|
TLast: TryFromExprInst + 'static,
|
||||||
TReturn: ToClause + Send + 'static,
|
TReturn: ToClause + Send + 'static,
|
||||||
TFunction: FnOnce( $( $t , )* TLast )
|
TFunction: FnOnce( $( $t , )* TLast )
|
||||||
-> Result<TReturn, Rc<dyn ExternError>> + Clone + Send + 'static
|
-> Result<TReturn, Arc<dyn ExternError>> + Clone + Send + 'static
|
||||||
>(function: TFunction) -> impl ExternFn {
|
>(function: TFunction) -> impl ExternFn {
|
||||||
xfn_variant!(@BODY_LOOP function
|
xfn_variant!(@BODY_LOOP function
|
||||||
( $( ( $t [< $t:lower >] ) )* )
|
( $( ( $t [< $t:lower >] ) )* )
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use ordered_float::NotNan;
|
use ordered_float::NotNan;
|
||||||
|
|
||||||
use super::atom::StrictEq;
|
use super::atom::StrictEq;
|
||||||
use super::{AtomicResult, AtomicReturn, ExternError};
|
use super::{AtomicResult, AtomicReturn, XfnResult};
|
||||||
use crate::error::AssertionError;
|
use crate::error::AssertionError;
|
||||||
#[allow(unused)] // for doc
|
#[allow(unused)] // for doc
|
||||||
// use crate::define_fn;
|
// use crate::define_fn;
|
||||||
@@ -62,7 +61,7 @@ impl<T: InertAtomic> Atomic for T {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<T: InertAtomic> TryFromExprInst for T {
|
impl<T: InertAtomic> TryFromExprInst for T {
|
||||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> {
|
fn from_exi(exi: ExprInst) -> XfnResult<Self> {
|
||||||
let Expr { clause, location } = exi.expr_val();
|
let Expr { clause, location } = exi.expr_val();
|
||||||
match clause {
|
match clause {
|
||||||
Clause::Atom(a) => match a.0.as_any().downcast() {
|
Clause::Atom(a) => match a.0.as_any().downcast() {
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ mod extern_fn;
|
|||||||
mod fn_bridge;
|
mod fn_bridge;
|
||||||
mod inert;
|
mod inert;
|
||||||
|
|
||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub use atom::{Atom, Atomic, AtomicResult, AtomicReturn, StrictEq};
|
pub use atom::{Atom, Atomic, AtomicResult, AtomicReturn, StrictEq};
|
||||||
pub use extern_fn::{ExFn, ExternError, ExternFn};
|
pub use extern_fn::{ExFn, ExternError, ExternFn};
|
||||||
@@ -22,4 +22,4 @@ pub use inert::InertAtomic;
|
|||||||
pub use crate::representations::interpreted::Clause;
|
pub use crate::representations::interpreted::Clause;
|
||||||
|
|
||||||
/// Return type of the argument to the [xfn_1ary] family of functions
|
/// Return type of the argument to the [xfn_1ary] family of functions
|
||||||
pub type XfnResult<T> = Result<T, Rc<dyn ExternError>>;
|
pub type XfnResult<T> = Result<T, Arc<dyn ExternError>>;
|
||||||
|
|||||||
@@ -1,22 +1,22 @@
|
|||||||
use std::fmt::Display;
|
use std::fmt::{Debug, Display};
|
||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::foreign::ExternError;
|
use crate::foreign::ExternError;
|
||||||
use crate::{Location, Sym};
|
use crate::{Location, Sym};
|
||||||
|
|
||||||
/// Problems in the process of execution
|
/// Problems in the process of execution
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum RuntimeError {
|
pub enum RuntimeError {
|
||||||
/// A Rust function encountered an error
|
/// A Rust function encountered an error
|
||||||
Extern(Rc<dyn ExternError>),
|
Extern(Arc<dyn ExternError>),
|
||||||
/// Primitive applied as function
|
/// Primitive applied as function
|
||||||
NonFunctionApplication(Location),
|
NonFunctionApplication(Location),
|
||||||
/// Symbol not in context
|
/// Symbol not in context
|
||||||
MissingSymbol(Sym, Location),
|
MissingSymbol(Sym, Location),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Rc<dyn ExternError>> for RuntimeError {
|
impl From<Arc<dyn ExternError>> for RuntimeError {
|
||||||
fn from(value: Rc<dyn ExternError>) -> Self { Self::Extern(value) }
|
fn from(value: Arc<dyn ExternError>) -> Self { Self::Extern(value) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for RuntimeError {
|
impl Display for RuntimeError {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
use std::any::{Any, TypeId};
|
use std::any::{Any, TypeId};
|
||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use trait_set::trait_set;
|
use trait_set::trait_set;
|
||||||
@@ -58,7 +58,7 @@ impl<'a> HandlerTable<'a> {
|
|||||||
|
|
||||||
/// Various possible outcomes of a [Handler] execution. Ok returns control to
|
/// Various possible outcomes of a [Handler] execution. Ok returns control to
|
||||||
/// the interpreter. The meaning of Err is decided by the value in it.
|
/// the interpreter. The meaning of Err is decided by the value in it.
|
||||||
pub type HandlerRes = Result<ExprInst, Rc<dyn ExternError>>;
|
pub type HandlerRes = Result<ExprInst, Arc<dyn ExternError>>;
|
||||||
|
|
||||||
/// [run] orchid code, executing any commands it returns using the specified
|
/// [run] orchid code, executing any commands it returns using the specified
|
||||||
/// [Handler]s.
|
/// [Handler]s.
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ pub mod facade;
|
|||||||
pub mod foreign;
|
pub mod foreign;
|
||||||
pub mod interner;
|
pub mod interner;
|
||||||
pub mod interpreter;
|
pub mod interpreter;
|
||||||
mod parse;
|
pub mod parse;
|
||||||
pub mod pipeline;
|
pub mod pipeline;
|
||||||
mod representations;
|
mod representations;
|
||||||
pub mod rule;
|
pub mod rule;
|
||||||
|
|||||||
@@ -13,16 +13,27 @@ use crate::{Location, VName};
|
|||||||
/// Hiding type parameters in associated types allows for simpler
|
/// Hiding type parameters in associated types allows for simpler
|
||||||
/// parser definitions
|
/// parser definitions
|
||||||
pub trait Context {
|
pub trait Context {
|
||||||
|
/// Get the path to the current file
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn file(&self) -> Arc<VName>;
|
fn file(&self) -> Arc<VName>;
|
||||||
|
/// Get a reference to the interner
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn interner(&self) -> &Interner;
|
fn interner(&self) -> &Interner;
|
||||||
|
/// Get a reference to the full source text for position math and to build
|
||||||
|
/// [Location]s.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn source(&self) -> Arc<String>;
|
fn source(&self) -> Arc<String>;
|
||||||
|
/// Get the list of all lexer plugins
|
||||||
|
#[must_use]
|
||||||
fn lexers(&self) -> &[&dyn LexerPlugin];
|
fn lexers(&self) -> &[&dyn LexerPlugin];
|
||||||
|
/// Get the list of all parser plugins
|
||||||
|
#[must_use]
|
||||||
fn line_parsers(&self) -> &[&dyn LineParser];
|
fn line_parsers(&self) -> &[&dyn LineParser];
|
||||||
|
/// Find our position in the text given the text we've yet to parse
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn pos(&self, tail: &str) -> usize { self.source().len() - tail.len() }
|
fn pos(&self, tail: &str) -> usize { self.source().len() - tail.len() }
|
||||||
|
/// Generate a location given the length of a token and the unparsed text
|
||||||
|
/// after it. See also [Context::range_loc] if the maths gets complex.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn location(&self, len: usize, tail: &str) -> Location {
|
fn location(&self, len: usize, tail: &str) -> Location {
|
||||||
match self.pos(tail).checked_sub(len) {
|
match self.pos(tail).checked_sub(len) {
|
||||||
@@ -33,14 +44,35 @@ pub trait Context {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
/// Generate a location given a range in the source file. The location can be
|
||||||
|
/// computed with [Context::pos]. See also [Context::location].
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn range_loc(&self, range: Range<usize>) -> Location {
|
fn range_loc(&self, range: Range<usize>) -> Location {
|
||||||
Location::Range { file: self.file(), range, source: self.source() }
|
Location::Range { file: self.file(), range, source: self.source() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<C: Context + ?Sized> Context for &C {
|
||||||
|
fn file(&self) -> Arc<VName> { (*self).file() }
|
||||||
|
fn interner(&self) -> &Interner { (*self).interner() }
|
||||||
|
fn lexers(&self) -> &[&dyn LexerPlugin] { (*self).lexers() }
|
||||||
|
fn line_parsers(&self) -> &[&dyn LineParser] { (*self).line_parsers() }
|
||||||
|
fn location(&self, len: usize, tail: &str) -> Location {
|
||||||
|
(*self).location(len, tail)
|
||||||
|
}
|
||||||
|
fn pos(&self, tail: &str) -> usize { (*self).pos(tail) }
|
||||||
|
fn range_loc(&self, range: Range<usize>) -> Location {
|
||||||
|
(*self).range_loc(range)
|
||||||
|
}
|
||||||
|
fn source(&self) -> Arc<String> { (*self).source() }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return value of a lexer plugin; the parsed data and the remaining string
|
||||||
pub type LexerPluginOut<'a> = Option<ProjectResult<(Atom, &'a str)>>;
|
pub type LexerPluginOut<'a> = Option<ProjectResult<(Atom, &'a str)>>;
|
||||||
|
/// Return value of a line parser; the meaningful lines derived from this parser
|
||||||
pub type LineParserOut = Option<ProjectResult<Vec<FileEntryKind>>>;
|
pub type LineParserOut = Option<ProjectResult<Vec<FileEntryKind>>>;
|
||||||
|
|
||||||
|
/// A plugin callback that reads a custom lexeme.
|
||||||
pub trait LexerPlugin:
|
pub trait LexerPlugin:
|
||||||
for<'a> Fn(&'a str, &dyn Context) -> LexerPluginOut<'a> + Sync + Send
|
for<'a> Fn(&'a str, &dyn Context) -> LexerPluginOut<'a> + Sync + Send
|
||||||
{
|
{
|
||||||
@@ -50,12 +82,17 @@ impl<F> LexerPlugin for F where
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A plugin callback that parses a custom file entry
|
||||||
pub trait LineParser:
|
pub trait LineParser:
|
||||||
Fn(Stream<'_>, &dyn Context) -> LineParserOut + Sync + Send
|
for<'a> Fn(Stream<'_>, &'a (dyn Context + 'a)) -> LineParserOut
|
||||||
|
+ Sync
|
||||||
|
+ Send
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
impl<F> LineParser for F where
|
impl<F> LineParser for F where
|
||||||
F: Fn(Stream<'_>, &dyn Context) -> LineParserOut + Sync + Send
|
F: for<'a> Fn(Stream<'_>, &'a (dyn Context + 'a)) -> LineParserOut
|
||||||
|
+ Sync
|
||||||
|
+ Send
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -64,22 +101,23 @@ impl<F> LineParser for F where
|
|||||||
/// Hiding type parameters in associated types allows for simpler
|
/// Hiding type parameters in associated types allows for simpler
|
||||||
/// parser definitions
|
/// parser definitions
|
||||||
pub struct ParsingContext<'a> {
|
pub struct ParsingContext<'a> {
|
||||||
pub interner: &'a Interner,
|
interner: &'a Interner,
|
||||||
pub file: Arc<VName>,
|
file_path: Arc<VName>,
|
||||||
pub source: Arc<String>,
|
source: Arc<String>,
|
||||||
pub lexers: &'a [&'a dyn LexerPlugin],
|
lexers: &'a [&'a dyn LexerPlugin],
|
||||||
pub line_parsers: &'a [&'a dyn LineParser],
|
line_parsers: &'a [&'a dyn LineParser],
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> ParsingContext<'a> {
|
impl<'a> ParsingContext<'a> {
|
||||||
|
/// Create a new parsing context
|
||||||
pub fn new(
|
pub fn new(
|
||||||
interner: &'a Interner,
|
interner: &'a Interner,
|
||||||
file: Arc<VName>,
|
file_path: Arc<VName>,
|
||||||
source: Arc<String>,
|
source: Arc<String>,
|
||||||
lexers: &'a [&'a dyn LexerPlugin],
|
lexers: &'a [&'a dyn LexerPlugin],
|
||||||
line_parsers: &'a [&'a dyn LineParser],
|
line_parsers: &'a [&'a dyn LineParser],
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self { interner, file, source, lexers, line_parsers }
|
Self { interner, file_path, source, lexers, line_parsers }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -87,7 +125,7 @@ impl<'a> Clone for ParsingContext<'a> {
|
|||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self {
|
Self {
|
||||||
interner: self.interner,
|
interner: self.interner,
|
||||||
file: self.file.clone(),
|
file_path: self.file_path.clone(),
|
||||||
source: self.source.clone(),
|
source: self.source.clone(),
|
||||||
lexers: self.lexers,
|
lexers: self.lexers,
|
||||||
line_parsers: self.line_parsers,
|
line_parsers: self.line_parsers,
|
||||||
@@ -97,7 +135,7 @@ impl<'a> Clone for ParsingContext<'a> {
|
|||||||
|
|
||||||
impl Context for ParsingContext<'_> {
|
impl Context for ParsingContext<'_> {
|
||||||
fn interner(&self) -> &Interner { self.interner }
|
fn interner(&self) -> &Interner { self.interner }
|
||||||
fn file(&self) -> Arc<VName> { self.file.clone() }
|
fn file(&self) -> Arc<VName> { self.file_path.clone() }
|
||||||
fn source(&self) -> Arc<String> { self.source.clone() }
|
fn source(&self) -> Arc<String> { self.source.clone() }
|
||||||
fn lexers(&self) -> &[&dyn LexerPlugin] { self.lexers }
|
fn lexers(&self) -> &[&dyn LexerPlugin] { self.lexers }
|
||||||
fn line_parsers(&self) -> &[&dyn LineParser] { self.line_parsers }
|
fn line_parsers(&self) -> &[&dyn LineParser] { self.line_parsers }
|
||||||
@@ -116,3 +154,23 @@ impl<'a> Context for MockContext<'a> {
|
|||||||
fn range_loc(&self, _: Range<usize>) -> Location { Location::Unknown }
|
fn range_loc(&self, _: Range<usize>) -> Location { Location::Unknown }
|
||||||
fn source(&self) -> Arc<String> { Arc::new(String::new()) }
|
fn source(&self) -> Arc<String> { Arc::new(String::new()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct FlatLocContext<'a, C: Context + ?Sized> {
|
||||||
|
sub: &'a C,
|
||||||
|
location: &'a Location,
|
||||||
|
}
|
||||||
|
impl<'a, C: Context + ?Sized> FlatLocContext<'a, C> {
|
||||||
|
pub fn new(sub: &'a C, location: &'a Location) -> Self {
|
||||||
|
Self { sub, location }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<'a, C: Context + ?Sized> Context for FlatLocContext<'a, C> {
|
||||||
|
fn interner(&self) -> &Interner { self.sub.interner() }
|
||||||
|
fn pos(&self, _: &str) -> usize { 0 }
|
||||||
|
fn file(&self) -> Arc<VName> { self.sub.file() }
|
||||||
|
fn lexers(&self) -> &[&dyn LexerPlugin] { self.sub.lexers() }
|
||||||
|
fn line_parsers(&self) -> &[&dyn LineParser] { self.sub.line_parsers() }
|
||||||
|
fn source(&self) -> Arc<String> { self.sub.source() }
|
||||||
|
fn location(&self, _: usize, _: &str) -> Location { self.location.clone() }
|
||||||
|
fn range_loc(&self, _: Range<usize>) -> Location { self.location.clone() }
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,13 +1,19 @@
|
|||||||
|
//! Errors produced by the parser. Plugins are encouraged to reuse these where
|
||||||
|
//! applicable.
|
||||||
|
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
use super::{Entry, Lexeme};
|
use super::{Entry, Lexeme, Stream};
|
||||||
use crate::error::ProjectError;
|
use crate::ast::PType;
|
||||||
|
use crate::error::{ProjectError, ProjectResult};
|
||||||
use crate::{Location, Tok};
|
use crate::{Location, Tok};
|
||||||
|
|
||||||
|
/// A line does not begin with an identifying keyword
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct LineNeedsPrefix {
|
pub struct LineNeedsPrefix {
|
||||||
|
/// Erroneous line starter
|
||||||
pub entry: Entry,
|
pub entry: Entry,
|
||||||
}
|
}
|
||||||
impl ProjectError for LineNeedsPrefix {
|
impl ProjectError for LineNeedsPrefix {
|
||||||
@@ -18,6 +24,7 @@ impl ProjectError for LineNeedsPrefix {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The line ends abruptly
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct UnexpectedEOL {
|
pub struct UnexpectedEOL {
|
||||||
/// Last entry before EOL
|
/// Last entry before EOL
|
||||||
@@ -33,7 +40,9 @@ impl ProjectError for UnexpectedEOL {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The line should have ended
|
||||||
pub struct ExpectedEOL {
|
pub struct ExpectedEOL {
|
||||||
|
/// Location of the last valid or first excessive token
|
||||||
pub location: Location,
|
pub location: Location,
|
||||||
}
|
}
|
||||||
impl ProjectError for ExpectedEOL {
|
impl ProjectError for ExpectedEOL {
|
||||||
@@ -41,11 +50,14 @@ impl ProjectError for ExpectedEOL {
|
|||||||
fn one_position(&self) -> Location { self.location.clone() }
|
fn one_position(&self) -> Location { self.location.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A name was expected
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct ExpectedName {
|
pub struct ExpectedName {
|
||||||
|
/// Non-name entry
|
||||||
pub entry: Entry,
|
pub entry: Entry,
|
||||||
}
|
}
|
||||||
impl ExpectedName {
|
impl ExpectedName {
|
||||||
|
/// If the entry is a name, return its text. If it's not, produce this error.
|
||||||
pub fn expect(entry: &Entry) -> Result<Tok<String>, Rc<dyn ProjectError>> {
|
pub fn expect(entry: &Entry) -> Result<Tok<String>, Rc<dyn ProjectError>> {
|
||||||
match &entry.lexeme {
|
match &entry.lexeme {
|
||||||
Lexeme::Name(n) => Ok(n.clone()),
|
Lexeme::Name(n) => Ok(n.clone()),
|
||||||
@@ -57,24 +69,22 @@ impl ProjectError for ExpectedName {
|
|||||||
fn description(&self) -> &str { "A name was expected" }
|
fn description(&self) -> &str { "A name was expected" }
|
||||||
fn one_position(&self) -> Location { self.entry.location() }
|
fn one_position(&self) -> Location { self.entry.location() }
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
if self.entry.is_keyword() {
|
|
||||||
format!(
|
|
||||||
"{} is a restricted keyword and cannot be used as a name",
|
|
||||||
self.entry
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
format!("Expected a name, found {}", self.entry)
|
format!("Expected a name, found {}", self.entry)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A specific lexeme was expected
|
||||||
#[derive()]
|
#[derive()]
|
||||||
pub struct Expected {
|
pub struct Expected {
|
||||||
|
/// The lexemes that would have been acceptable
|
||||||
pub expected: Vec<Lexeme>,
|
pub expected: Vec<Lexeme>,
|
||||||
|
/// Whether a name would also have been acceptable (multiname)
|
||||||
pub or_name: bool,
|
pub or_name: bool,
|
||||||
|
/// What was actually found
|
||||||
pub found: Entry,
|
pub found: Entry,
|
||||||
}
|
}
|
||||||
impl Expected {
|
impl Expected {
|
||||||
|
/// Assert that the entry contains exactly the specified lexeme
|
||||||
pub fn expect(l: Lexeme, e: &Entry) -> Result<(), Rc<dyn ProjectError>> {
|
pub fn expect(l: Lexeme, e: &Entry) -> Result<(), Rc<dyn ProjectError>> {
|
||||||
if e.lexeme.strict_eq(&l) {
|
if e.lexeme.strict_eq(&l) {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
@@ -99,7 +109,9 @@ impl ProjectError for Expected {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A token reserved for future use was found in the code
|
||||||
pub struct ReservedToken {
|
pub struct ReservedToken {
|
||||||
|
/// The offending token
|
||||||
pub entry: Entry,
|
pub entry: Entry,
|
||||||
}
|
}
|
||||||
impl ProjectError for ReservedToken {
|
impl ProjectError for ReservedToken {
|
||||||
@@ -108,8 +120,11 @@ impl ProjectError for ReservedToken {
|
|||||||
fn message(&self) -> String { format!("{} is a reserved token", self.entry) }
|
fn message(&self) -> String { format!("{} is a reserved token", self.entry) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A token was found where it doesn't belong
|
||||||
pub struct BadTokenInRegion {
|
pub struct BadTokenInRegion {
|
||||||
|
/// What was found
|
||||||
pub entry: Entry,
|
pub entry: Entry,
|
||||||
|
/// Human-readable name of the region where it should not appear
|
||||||
pub region: &'static str,
|
pub region: &'static str,
|
||||||
}
|
}
|
||||||
impl ProjectError for BadTokenInRegion {
|
impl ProjectError for BadTokenInRegion {
|
||||||
@@ -120,8 +135,11 @@ impl ProjectError for BadTokenInRegion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A specific lexeme was searched but not found
|
||||||
pub struct NotFound {
|
pub struct NotFound {
|
||||||
|
/// Human-readable description of what was searched
|
||||||
pub expected: &'static str,
|
pub expected: &'static str,
|
||||||
|
/// Area covered by the search
|
||||||
pub location: Location,
|
pub location: Location,
|
||||||
}
|
}
|
||||||
impl ProjectError for NotFound {
|
impl ProjectError for NotFound {
|
||||||
@@ -130,12 +148,14 @@ impl ProjectError for NotFound {
|
|||||||
fn message(&self) -> String { format!("{} was expected", self.expected) }
|
fn message(&self) -> String { format!("{} was expected", self.expected) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// :: found on its own somewhere other than a general export
|
||||||
pub struct LeadingNS(pub Location);
|
pub struct LeadingNS(pub Location);
|
||||||
impl ProjectError for LeadingNS {
|
impl ProjectError for LeadingNS {
|
||||||
fn description(&self) -> &str { ":: can only follow a name token" }
|
fn description(&self) -> &str { ":: can only follow a name token" }
|
||||||
fn one_position(&self) -> Location { self.0.clone() }
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parens don't pair up
|
||||||
pub struct MisalignedParen(pub Entry);
|
pub struct MisalignedParen(pub Entry);
|
||||||
impl ProjectError for MisalignedParen {
|
impl ProjectError for MisalignedParen {
|
||||||
fn description(&self) -> &str { "(), [] and {} must always pair up" }
|
fn description(&self) -> &str { "(), [] and {} must always pair up" }
|
||||||
@@ -143,30 +163,35 @@ impl ProjectError for MisalignedParen {
|
|||||||
fn message(&self) -> String { format!("This {} has no pair", self.0) }
|
fn message(&self) -> String { format!("This {} has no pair", self.0) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Export line contains a complex name
|
||||||
pub struct NamespacedExport(pub Location);
|
pub struct NamespacedExport(pub Location);
|
||||||
impl ProjectError for NamespacedExport {
|
impl ProjectError for NamespacedExport {
|
||||||
fn description(&self) -> &str { "Only local names may be exported" }
|
fn description(&self) -> &str { "Only local names may be exported" }
|
||||||
fn one_position(&self) -> Location { self.0.clone() }
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Export line contains *
|
||||||
pub struct GlobExport(pub Location);
|
pub struct GlobExport(pub Location);
|
||||||
impl ProjectError for GlobExport {
|
impl ProjectError for GlobExport {
|
||||||
fn description(&self) -> &str { "Globstars are not allowed in exports" }
|
fn description(&self) -> &str { "Globstars are not allowed in exports" }
|
||||||
fn one_position(&self) -> Location { self.0.clone() }
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// String literal never ends
|
||||||
pub struct NoStringEnd(pub Location);
|
pub struct NoStringEnd(pub Location);
|
||||||
impl ProjectError for NoStringEnd {
|
impl ProjectError for NoStringEnd {
|
||||||
fn description(&self) -> &str { "A string literal was not closed with `\"`" }
|
fn description(&self) -> &str { "A string literal was not closed with `\"`" }
|
||||||
fn one_position(&self) -> Location { self.0.clone() }
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Comment never ends
|
||||||
pub struct NoCommentEnd(pub Location);
|
pub struct NoCommentEnd(pub Location);
|
||||||
impl ProjectError for NoCommentEnd {
|
impl ProjectError for NoCommentEnd {
|
||||||
fn description(&self) -> &str { "a comment was not closed with `]--`" }
|
fn description(&self) -> &str { "a comment was not closed with `]--`" }
|
||||||
fn one_position(&self) -> Location { self.0.clone() }
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A placeholder's priority is a floating point number
|
||||||
pub struct FloatPlacehPrio(pub Location);
|
pub struct FloatPlacehPrio(pub Location);
|
||||||
impl ProjectError for FloatPlacehPrio {
|
impl ProjectError for FloatPlacehPrio {
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
@@ -175,12 +200,14 @@ impl ProjectError for FloatPlacehPrio {
|
|||||||
fn one_position(&self) -> Location { self.0.clone() }
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A number literal decodes to NaN
|
||||||
pub struct NaNLiteral(pub Location);
|
pub struct NaNLiteral(pub Location);
|
||||||
impl ProjectError for NaNLiteral {
|
impl ProjectError for NaNLiteral {
|
||||||
fn description(&self) -> &str { "float literal decoded to NaN" }
|
fn description(&self) -> &str { "float literal decoded to NaN" }
|
||||||
fn one_position(&self) -> Location { self.0.clone() }
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A sequence of digits in a number literal overflows [usize].
|
||||||
pub struct LiteralOverflow(pub Location);
|
pub struct LiteralOverflow(pub Location);
|
||||||
impl ProjectError for LiteralOverflow {
|
impl ProjectError for LiteralOverflow {
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
@@ -189,18 +216,21 @@ impl ProjectError for LiteralOverflow {
|
|||||||
fn one_position(&self) -> Location { self.0.clone() }
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A digit was expected but something else was found
|
||||||
pub struct ExpectedDigit(pub Location);
|
pub struct ExpectedDigit(pub Location);
|
||||||
impl ProjectError for ExpectedDigit {
|
impl ProjectError for ExpectedDigit {
|
||||||
fn description(&self) -> &str { "expected a digit" }
|
fn description(&self) -> &str { "expected a digit" }
|
||||||
fn one_position(&self) -> Location { self.0.clone() }
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A unicode escape sequence contains something other than a hex digit
|
||||||
pub struct NotHex(pub Location);
|
pub struct NotHex(pub Location);
|
||||||
impl ProjectError for NotHex {
|
impl ProjectError for NotHex {
|
||||||
fn description(&self) -> &str { "Expected a hex digit" }
|
fn description(&self) -> &str { "Expected a hex digit" }
|
||||||
fn one_position(&self) -> Location { self.0.clone() }
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A unicode escape sequence contains a number that isn't a unicode code point.
|
||||||
pub struct BadCodePoint(pub Location);
|
pub struct BadCodePoint(pub Location);
|
||||||
impl ProjectError for BadCodePoint {
|
impl ProjectError for BadCodePoint {
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
@@ -209,8 +239,36 @@ impl ProjectError for BadCodePoint {
|
|||||||
fn one_position(&self) -> Location { self.0.clone() }
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// An unrecognized escape sequence occurred in a string.
|
||||||
pub struct BadEscapeSequence(pub Location);
|
pub struct BadEscapeSequence(pub Location);
|
||||||
impl ProjectError for BadEscapeSequence {
|
impl ProjectError for BadEscapeSequence {
|
||||||
fn description(&self) -> &str { "Unrecognized escape sequence" }
|
fn description(&self) -> &str { "Unrecognized escape sequence" }
|
||||||
fn one_position(&self) -> Location { self.0.clone() }
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Expected a parenthesized block at the end of the line
|
||||||
|
pub struct ExpectedBlock(pub Location);
|
||||||
|
impl ExpectedBlock {
|
||||||
|
/// Remove two parentheses from the ends of the cursor
|
||||||
|
pub fn expect(tail: Stream, typ: PType) -> ProjectResult<Stream> {
|
||||||
|
let (lp, tail) = tail.trim().pop()?;
|
||||||
|
Expected::expect(Lexeme::LP(typ), lp)?;
|
||||||
|
let (rp, tail) = tail.pop_back()?;
|
||||||
|
Expected::expect(Lexeme::RP(typ), rp)?;
|
||||||
|
Ok(tail.trim())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl ProjectError for ExpectedBlock {
|
||||||
|
fn description(&self) -> &str { "Expected a parenthesized block" }
|
||||||
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A namespaced name was expected but a glob pattern or a branching multiname
|
||||||
|
/// was found.
|
||||||
|
pub struct ExpectedSingleName(pub Location);
|
||||||
|
impl ProjectError for ExpectedSingleName {
|
||||||
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
|
fn description(&self) -> &str {
|
||||||
|
"expected a single name, no wildcards, no branches"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,22 +1,46 @@
|
|||||||
use super::context::Context;
|
use super::context::{Context, FlatLocContext};
|
||||||
use super::lexer::lex;
|
use super::lexer::lex;
|
||||||
use super::sourcefile::parse_module_body;
|
use super::sourcefile::{parse_exprv, parse_module_body, vec_to_single};
|
||||||
use super::stream::Stream;
|
use super::stream::Stream;
|
||||||
use crate::error::{ParseErrorWithTokens, ProjectError, ProjectResult};
|
use super::{parse_line, split_lines};
|
||||||
|
use crate::ast::Expr;
|
||||||
|
use crate::error::ProjectResult;
|
||||||
use crate::representations::sourcefile::FileEntry;
|
use crate::representations::sourcefile::FileEntry;
|
||||||
|
use crate::{Location, VName};
|
||||||
|
|
||||||
pub fn parse2(ctx: impl Context) -> ProjectResult<Vec<FileEntry>> {
|
/// Parse a file
|
||||||
let tokens = lex(vec![], ctx.source().as_str(), &ctx).expect("debug");
|
pub fn parse_file(ctx: impl Context) -> ProjectResult<Vec<FileEntry>> {
|
||||||
|
let tokens = lex(vec![], ctx.source().as_str(), &ctx)?;
|
||||||
if tokens.is_empty() {
|
if tokens.is_empty() {
|
||||||
Ok(Vec::new())
|
Ok(Vec::new())
|
||||||
} else {
|
} else {
|
||||||
parse_module_body(Stream::from_slice(&tokens), &ctx).map_err(|error| {
|
parse_module_body(Stream::from_slice(&tokens), &ctx)
|
||||||
ParseErrorWithTokens {
|
|
||||||
error,
|
|
||||||
full_source: ctx.source().to_string(),
|
|
||||||
tokens,
|
|
||||||
}
|
|
||||||
.rc()
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parse a ready-made expression
|
||||||
|
pub fn parse_expr(
|
||||||
|
ctx: &impl Context,
|
||||||
|
text: &'static str,
|
||||||
|
location: Location,
|
||||||
|
) -> ProjectResult<Expr<VName>> {
|
||||||
|
let ctx = FlatLocContext::new(ctx, &location);
|
||||||
|
let tokens = lex(vec![], text, &ctx)?;
|
||||||
|
let items = parse_exprv(Stream::from_slice(&tokens), None, &ctx)?.0;
|
||||||
|
vec_to_single(tokens.first().expect("source must not be empty"), items)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a ready-made line
|
||||||
|
pub fn parse_entries(
|
||||||
|
ctx: &(impl Context + ?Sized),
|
||||||
|
text: &'static str,
|
||||||
|
location: Location,
|
||||||
|
) -> ProjectResult<Vec<FileEntry>> {
|
||||||
|
let ctx = FlatLocContext::new(ctx, &location);
|
||||||
|
let tokens = lex(vec![], text, &ctx)?;
|
||||||
|
let entries = split_lines(Stream::from_slice(&tokens))
|
||||||
|
.flat_map(|tokens| parse_line(tokens, &ctx).expect("pre-specified source"))
|
||||||
|
.map(|kind| kind.wrap(location.clone()))
|
||||||
|
.collect();
|
||||||
|
Ok(entries)
|
||||||
|
}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ use super::context::Context;
|
|||||||
use super::errors::{FloatPlacehPrio, NoCommentEnd};
|
use super::errors::{FloatPlacehPrio, NoCommentEnd};
|
||||||
use super::numeric::{numstart, parse_num, print_nat16};
|
use super::numeric::{numstart, parse_num, print_nat16};
|
||||||
use super::LexerPlugin;
|
use super::LexerPlugin;
|
||||||
use crate::ast::{PHClass, Placeholder};
|
use crate::ast::{PHClass, PType, Placeholder};
|
||||||
use crate::error::{ProjectError, ProjectResult};
|
use crate::error::{ProjectError, ProjectResult};
|
||||||
use crate::foreign::Atom;
|
use crate::foreign::Atom;
|
||||||
use crate::interner::Tok;
|
use crate::interner::Tok;
|
||||||
@@ -20,9 +20,12 @@ use crate::utils::pure_seq::next;
|
|||||||
use crate::utils::unwrap_or;
|
use crate::utils::unwrap_or;
|
||||||
use crate::{Location, VName};
|
use crate::{Location, VName};
|
||||||
|
|
||||||
|
/// A lexeme and the location where it was found
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct Entry {
|
pub struct Entry {
|
||||||
|
/// the lexeme
|
||||||
pub lexeme: Lexeme,
|
pub lexeme: Lexeme,
|
||||||
|
/// the location. Always a range
|
||||||
pub location: Location,
|
pub location: Location,
|
||||||
}
|
}
|
||||||
impl Entry {
|
impl Entry {
|
||||||
@@ -32,27 +35,17 @@ impl Entry {
|
|||||||
matches!(self.lexeme, Lexeme::Comment(_) | Lexeme::BR)
|
matches!(self.lexeme, Lexeme::Comment(_) | Lexeme::BR)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
/// Get location
|
||||||
pub fn is_keyword(&self) -> bool {
|
|
||||||
false
|
|
||||||
// matches!(
|
|
||||||
// self.lexeme,
|
|
||||||
// Lexeme::Const
|
|
||||||
// | Lexeme::Export
|
|
||||||
// | Lexeme::Import
|
|
||||||
// | Lexeme::Macro
|
|
||||||
// | Lexeme::Module
|
|
||||||
// )
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn location(&self) -> Location { self.location.clone() }
|
pub fn location(&self) -> Location { self.location.clone() }
|
||||||
|
|
||||||
|
/// Get range from location
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn range(&self) -> Range<usize> {
|
pub fn range(&self) -> Range<usize> {
|
||||||
self.location.range().expect("An Entry can only have a known location")
|
self.location.range().expect("An Entry can only have a known location")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get file path from location
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn file(&self) -> Arc<VName> {
|
pub fn file(&self) -> Arc<VName> {
|
||||||
self.location.file().expect("An Entry can only have a range location")
|
self.location.file().expect("An Entry can only have a range location")
|
||||||
@@ -73,32 +66,34 @@ impl AsRef<Location> for Entry {
|
|||||||
fn as_ref(&self) -> &Location { &self.location }
|
fn as_ref(&self) -> &Location { &self.location }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A unit of syntax
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub enum Lexeme {
|
pub enum Lexeme {
|
||||||
|
/// Atoms parsed by plugins
|
||||||
Atom(Atom),
|
Atom(Atom),
|
||||||
|
/// Keyword or name
|
||||||
Name(Tok<String>),
|
Name(Tok<String>),
|
||||||
|
/// Macro operator `=`number`=>`
|
||||||
Arrow(NotNan<f64>),
|
Arrow(NotNan<f64>),
|
||||||
/// Walrus operator (formerly shorthand macro)
|
/// `:=`
|
||||||
Walrus,
|
Walrus,
|
||||||
/// Line break
|
/// Line break
|
||||||
BR,
|
BR,
|
||||||
/// Namespace separator
|
/// `::`
|
||||||
NS,
|
NS,
|
||||||
/// Left paren
|
/// Left paren `([{`
|
||||||
LP(char),
|
LP(PType),
|
||||||
/// Right paren
|
/// Right paren `)]}`
|
||||||
RP(char),
|
RP(PType),
|
||||||
/// Backslash
|
/// `\`
|
||||||
BS,
|
BS,
|
||||||
|
/// `@``
|
||||||
At,
|
At,
|
||||||
// Dot,
|
/// `:`
|
||||||
Type, // type operator
|
Type,
|
||||||
|
/// comment
|
||||||
Comment(Arc<String>),
|
Comment(Arc<String>),
|
||||||
// Export,
|
/// placeholder in a macro.
|
||||||
// Import,
|
|
||||||
// Module,
|
|
||||||
// Macro,
|
|
||||||
// Const,
|
|
||||||
Placeh(Placeholder),
|
Placeh(Placeholder),
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -110,53 +105,26 @@ impl Display for Lexeme {
|
|||||||
Self::Walrus => write!(f, ":="),
|
Self::Walrus => write!(f, ":="),
|
||||||
Self::Arrow(prio) => write!(f, "={}=>", print_nat16(*prio)),
|
Self::Arrow(prio) => write!(f, "={}=>", print_nat16(*prio)),
|
||||||
Self::NS => write!(f, "::"),
|
Self::NS => write!(f, "::"),
|
||||||
Self::LP(l) => write!(f, "{}", l),
|
Self::LP(t) => write!(f, "{}", t.l()),
|
||||||
Self::RP(l) => match l {
|
Self::RP(t) => write!(f, "{}", t.r()),
|
||||||
'(' => write!(f, ")"),
|
|
||||||
'[' => write!(f, "]"),
|
|
||||||
'{' => write!(f, "}}"),
|
|
||||||
_ => f.debug_tuple("RP").field(l).finish(),
|
|
||||||
},
|
|
||||||
Self::BR => writeln!(f),
|
Self::BR => writeln!(f),
|
||||||
Self::BS => write!(f, "\\"),
|
Self::BS => write!(f, "\\"),
|
||||||
Self::At => write!(f, "@"),
|
Self::At => write!(f, "@"),
|
||||||
Self::Type => write!(f, ":"),
|
Self::Type => write!(f, ":"),
|
||||||
Self::Comment(text) => write!(f, "--[{}]--", text),
|
Self::Comment(text) => write!(f, "--[{}]--", text),
|
||||||
// Self::Export => write!(f, "export"),
|
Self::Placeh(ph) => write!(f, "{ph}"),
|
||||||
// Self::Import => write!(f, "import"),
|
|
||||||
// Self::Module => write!(f, "module"),
|
|
||||||
// Self::Const => write!(f, "const"),
|
|
||||||
// Self::Macro => write!(f, "macro"),
|
|
||||||
Self::Placeh(Placeholder { name, class }) => match *class {
|
|
||||||
PHClass::Scalar => write!(f, "${}", **name),
|
|
||||||
PHClass::Vec { nonzero, prio } => {
|
|
||||||
if nonzero { write!(f, "...") } else { write!(f, "..") }?;
|
|
||||||
write!(f, "${}", **name)?;
|
|
||||||
if prio != 0 {
|
|
||||||
write!(f, ":{}", prio)?;
|
|
||||||
};
|
|
||||||
Ok(())
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Lexeme {
|
impl Lexeme {
|
||||||
#[must_use]
|
/// Compare lexemes for equality. It's `strict` because for atoms it uses the
|
||||||
pub fn rule(prio: impl Into<f64>) -> Self {
|
/// strict equality comparison
|
||||||
Lexeme::Arrow(
|
|
||||||
NotNan::new(prio.into()).expect("Rule priority cannot be NaN"),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn strict_eq(&self, other: &Self) -> bool {
|
pub fn strict_eq(&self, other: &Self) -> bool {
|
||||||
match (self, other) {
|
match (self, other) {
|
||||||
(Self::Arrow(f1), Self::Arrow(f2)) => f1 == f2,
|
(Self::Arrow(f1), Self::Arrow(f2)) => f1 == f2,
|
||||||
(Self::At, Self::At) | (Self::BR, Self::BR) => true,
|
(Self::At, Self::At) | (Self::BR, Self::BR) => true,
|
||||||
(Self::BS, Self::BS) /*| (Self::Const, Self::Const)*/ => true,
|
(Self::BS, Self::BS) => true,
|
||||||
// (Self::Export, Self::Export) | (Self::Import, Self::Import) => true,
|
|
||||||
// (Self::Macro, Self::Macro) | (Self::Module, Self::Module) => true,
|
|
||||||
(Self::NS, Self::NS) | (Self::Type, Self::Type) => true,
|
(Self::NS, Self::NS) | (Self::Type, Self::Type) => true,
|
||||||
(Self::Walrus, Self::Walrus) => true,
|
(Self::Walrus, Self::Walrus) => true,
|
||||||
(Self::Atom(a1), Self::Atom(a2)) => a1.0.strict_eq(&a2.0),
|
(Self::Atom(a1), Self::Atom(a2)) => a1.0.strict_eq(&a2.0),
|
||||||
@@ -164,20 +132,25 @@ impl Lexeme {
|
|||||||
(Self::LP(p1), Self::LP(p2)) | (Self::RP(p1), Self::RP(p2)) => p1 == p2,
|
(Self::LP(p1), Self::LP(p2)) | (Self::RP(p1), Self::RP(p2)) => p1 == p2,
|
||||||
(Self::Name(n1), Self::Name(n2)) => n1 == n2,
|
(Self::Name(n1), Self::Name(n2)) => n1 == n2,
|
||||||
(Self::Placeh(ph1), Self::Placeh(ph2)) => ph1 == ph2,
|
(Self::Placeh(ph1), Self::Placeh(ph2)) => ph1 == ph2,
|
||||||
(_, _) => false,
|
(..) => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Neatly format source code
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
pub fn format(lexed: &[Entry]) -> String { lexed.iter().join(" ") }
|
pub fn format(lexed: &[Entry]) -> String { lexed.iter().join(" ") }
|
||||||
|
|
||||||
|
/// Character filter that can appear in a keyword or name
|
||||||
pub fn namechar(c: char) -> bool { c.is_alphanumeric() | (c == '_') }
|
pub fn namechar(c: char) -> bool { c.is_alphanumeric() | (c == '_') }
|
||||||
|
/// Character filter that can start a name
|
||||||
pub fn namestart(c: char) -> bool { c.is_alphabetic() | (c == '_') }
|
pub fn namestart(c: char) -> bool { c.is_alphabetic() | (c == '_') }
|
||||||
|
/// Character filter that can appear in operators.
|
||||||
pub fn opchar(c: char) -> bool {
|
pub fn opchar(c: char) -> bool {
|
||||||
!namestart(c) && !numstart(c) && !c.is_whitespace() && !"()[]{},".contains(c)
|
!namestart(c) && !numstart(c) && !c.is_whitespace() && !"()[]{},".contains(c)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Split off all characters from the beginning that match a filter
|
||||||
pub fn split_filter(
|
pub fn split_filter(
|
||||||
s: &str,
|
s: &str,
|
||||||
mut pred: impl FnMut(char) -> bool,
|
mut pred: impl FnMut(char) -> bool,
|
||||||
@@ -189,12 +162,12 @@ fn lit_table() -> impl IntoIterator<Item = (&'static str, Lexeme)> {
|
|||||||
[
|
[
|
||||||
("\\", Lexeme::BS),
|
("\\", Lexeme::BS),
|
||||||
("@", Lexeme::At),
|
("@", Lexeme::At),
|
||||||
("(", Lexeme::LP('(')),
|
("(", Lexeme::LP(PType::Par)),
|
||||||
("[", Lexeme::LP('[')),
|
("[", Lexeme::LP(PType::Sqr)),
|
||||||
("{", Lexeme::LP('{')),
|
("{", Lexeme::LP(PType::Curl)),
|
||||||
(")", Lexeme::RP('(')),
|
(")", Lexeme::RP(PType::Par)),
|
||||||
("]", Lexeme::RP('[')),
|
("]", Lexeme::RP(PType::Sqr)),
|
||||||
("}", Lexeme::RP('{')),
|
("}", Lexeme::RP(PType::Curl)),
|
||||||
("\n", Lexeme::BR),
|
("\n", Lexeme::BR),
|
||||||
(":=", Lexeme::Walrus),
|
(":=", Lexeme::Walrus),
|
||||||
("::", Lexeme::NS),
|
("::", Lexeme::NS),
|
||||||
@@ -282,20 +255,22 @@ pub fn lex(
|
|||||||
}
|
}
|
||||||
// todo: parse placeholders, don't forget vectorials!
|
// todo: parse placeholders, don't forget vectorials!
|
||||||
if let Some(tail) = data.strip_prefix('$') {
|
if let Some(tail) = data.strip_prefix('$') {
|
||||||
|
let (nameonly, tail) =
|
||||||
|
tail.strip_prefix('_').map_or((false, tail), |t| (true, t));
|
||||||
let (name, tail) = split_filter(tail, namechar);
|
let (name, tail) = split_filter(tail, namechar);
|
||||||
if !name.is_empty() {
|
if !name.is_empty() {
|
||||||
let name = ctx.interner().i(name);
|
let name = ctx.interner().i(name);
|
||||||
let location = ctx.location(name.len() + 1, tail);
|
let location = ctx.location(name.len() + 1, tail);
|
||||||
let lexeme =
|
let class = if nameonly { PHClass::Name } else { PHClass::Scalar };
|
||||||
Lexeme::Placeh(Placeholder { name, class: PHClass::Scalar });
|
let lexeme = Lexeme::Placeh(Placeholder { name, class });
|
||||||
tokens.push(Entry::new(location, lexeme));
|
tokens.push(Entry::new(location, lexeme));
|
||||||
data = tail;
|
data = tail;
|
||||||
continue 'tail;
|
continue 'tail;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(vec) = data.strip_prefix("..") {
|
if let Some(tail) = data.strip_prefix("..") {
|
||||||
let (nonzero, tail) =
|
let (nonzero, tail) =
|
||||||
vec.strip_prefix('.').map_or((false, vec), |t| (true, t));
|
tail.strip_prefix('.').map_or((false, tail), |t| (true, t));
|
||||||
if let Some(tail) = tail.strip_prefix('$') {
|
if let Some(tail) = tail.strip_prefix('$') {
|
||||||
let (name, tail) = split_filter(tail, namechar);
|
let (name, tail) = split_filter(tail, namechar);
|
||||||
if !name.is_empty() {
|
if !name.is_empty() {
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
|
//! Types for interacting with the Orchid parser, and parts of the parser
|
||||||
|
//! plugins can use to match the language's behaviour on certain tasks
|
||||||
mod context;
|
mod context;
|
||||||
mod errors;
|
pub mod errors;
|
||||||
mod facade;
|
mod facade;
|
||||||
mod lexer;
|
mod lexer;
|
||||||
mod multiname;
|
mod multiname;
|
||||||
@@ -8,11 +10,20 @@ mod sourcefile;
|
|||||||
mod stream;
|
mod stream;
|
||||||
mod string;
|
mod string;
|
||||||
|
|
||||||
pub use context::{Context, LexerPlugin, LineParser, ParsingContext};
|
pub use context::{
|
||||||
pub use facade::parse2;
|
Context, LexerPlugin, LexerPluginOut, LineParser, LineParserOut,
|
||||||
|
ParsingContext,
|
||||||
|
};
|
||||||
|
pub use facade::{parse_entries, parse_expr, parse_file};
|
||||||
pub use lexer::{namechar, namestart, opchar, split_filter, Entry, Lexeme};
|
pub use lexer::{namechar, namestart, opchar, split_filter, Entry, Lexeme};
|
||||||
|
pub use multiname::parse_multiname;
|
||||||
pub use numeric::{
|
pub use numeric::{
|
||||||
lex_numeric, numchar, numstart, parse_num, print_nat16, NumError,
|
lex_numeric, numchar, numstart, parse_num, print_nat16, NumError,
|
||||||
NumErrorKind,
|
NumErrorKind,
|
||||||
};
|
};
|
||||||
|
pub use sourcefile::{
|
||||||
|
expr_slice_location, parse_const, parse_exprv, parse_line, parse_module,
|
||||||
|
parse_module_body, parse_rule, split_lines, vec_to_single, parse_nsname
|
||||||
|
};
|
||||||
|
pub use stream::Stream;
|
||||||
pub use string::{lex_string, parse_string, StringError, StringErrorKind};
|
pub use string::{lex_string, parse_string, StringError, StringErrorKind};
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ use super::context::Context;
|
|||||||
use super::errors::Expected;
|
use super::errors::Expected;
|
||||||
use super::stream::Stream;
|
use super::stream::Stream;
|
||||||
use super::Lexeme;
|
use super::Lexeme;
|
||||||
|
use crate::ast::PType;
|
||||||
use crate::error::{ProjectError, ProjectResult};
|
use crate::error::{ProjectError, ProjectResult};
|
||||||
use crate::sourcefile::Import;
|
use crate::sourcefile::Import;
|
||||||
use crate::utils::boxed_iter::{box_chain, box_once};
|
use crate::utils::boxed_iter::{box_chain, box_once};
|
||||||
@@ -43,7 +44,7 @@ impl Subresult {
|
|||||||
|
|
||||||
fn parse_multiname_branch<'a>(
|
fn parse_multiname_branch<'a>(
|
||||||
cursor: Stream<'a>,
|
cursor: Stream<'a>,
|
||||||
ctx: &impl Context,
|
ctx: &(impl Context + ?Sized),
|
||||||
) -> ProjectResult<(BoxedIter<'a, Subresult>, Stream<'a>)> {
|
) -> ProjectResult<(BoxedIter<'a, Subresult>, Stream<'a>)> {
|
||||||
let comma = ctx.interner().i(",");
|
let comma = ctx.interner().i(",");
|
||||||
let (subnames, cursor) = parse_multiname_rec(cursor, ctx)?;
|
let (subnames, cursor) = parse_multiname_rec(cursor, ctx)?;
|
||||||
@@ -53,10 +54,10 @@ fn parse_multiname_branch<'a>(
|
|||||||
let (tail, cont) = parse_multiname_branch(cursor, ctx)?;
|
let (tail, cont) = parse_multiname_branch(cursor, ctx)?;
|
||||||
Ok((box_chain!(subnames, tail), cont))
|
Ok((box_chain!(subnames, tail), cont))
|
||||||
},
|
},
|
||||||
Lexeme::RP('(') => Ok((subnames, cursor)),
|
Lexeme::RP(PType::Par) => Ok((subnames, cursor)),
|
||||||
_ => Err(
|
_ => Err(
|
||||||
Expected {
|
Expected {
|
||||||
expected: vec![Lexeme::Name(comma), Lexeme::RP('(')],
|
expected: vec![Lexeme::Name(comma), Lexeme::RP(PType::Par)],
|
||||||
or_name: false,
|
or_name: false,
|
||||||
found: delim.clone(),
|
found: delim.clone(),
|
||||||
}
|
}
|
||||||
@@ -67,24 +68,24 @@ fn parse_multiname_branch<'a>(
|
|||||||
|
|
||||||
fn parse_multiname_rec<'a>(
|
fn parse_multiname_rec<'a>(
|
||||||
curosr: Stream<'a>,
|
curosr: Stream<'a>,
|
||||||
ctx: &impl Context,
|
ctx: &(impl Context + ?Sized),
|
||||||
) -> ProjectResult<(BoxedIter<'a, Subresult>, Stream<'a>)> {
|
) -> ProjectResult<(BoxedIter<'a, Subresult>, Stream<'a>)> {
|
||||||
let star = ctx.interner().i("*");
|
let star = ctx.interner().i("*");
|
||||||
let comma = ctx.interner().i(",");
|
let comma = ctx.interner().i(",");
|
||||||
let (head, mut cursor) = curosr.trim().pop()?;
|
let (head, mut cursor) = curosr.trim().pop()?;
|
||||||
match &head.lexeme {
|
match &head.lexeme {
|
||||||
Lexeme::LP('(') => parse_multiname_branch(cursor, ctx),
|
Lexeme::LP(PType::Par) => parse_multiname_branch(cursor, ctx),
|
||||||
Lexeme::LP('[') => {
|
Lexeme::LP(PType::Sqr) => {
|
||||||
let mut names = Vec::new();
|
let mut names = Vec::new();
|
||||||
loop {
|
loop {
|
||||||
let head;
|
let head;
|
||||||
(head, cursor) = cursor.trim().pop()?;
|
(head, cursor) = cursor.trim().pop()?;
|
||||||
match &head.lexeme {
|
match &head.lexeme {
|
||||||
Lexeme::Name(n) => names.push((n, head.location())),
|
Lexeme::Name(n) => names.push((n, head.location())),
|
||||||
Lexeme::RP('[') => break,
|
Lexeme::RP(PType::Sqr) => break,
|
||||||
_ => {
|
_ => {
|
||||||
let err = Expected {
|
let err = Expected {
|
||||||
expected: vec![Lexeme::RP('[')],
|
expected: vec![Lexeme::RP(PType::Sqr)],
|
||||||
or_name: true,
|
or_name: true,
|
||||||
found: head.clone(),
|
found: head.clone(),
|
||||||
};
|
};
|
||||||
@@ -114,7 +115,7 @@ fn parse_multiname_rec<'a>(
|
|||||||
},
|
},
|
||||||
_ => Err(
|
_ => Err(
|
||||||
Expected {
|
Expected {
|
||||||
expected: vec![Lexeme::LP('(')],
|
expected: vec![Lexeme::LP(PType::Par)],
|
||||||
or_name: true,
|
or_name: true,
|
||||||
found: head.clone(),
|
found: head.clone(),
|
||||||
}
|
}
|
||||||
@@ -123,9 +124,25 @@ fn parse_multiname_rec<'a>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parse a tree that describes several names. The tree can be
|
||||||
|
///
|
||||||
|
/// - name (except `,` or `*`)
|
||||||
|
/// - name (except `,` or `*`) `::` tree
|
||||||
|
/// - `(` tree `,` tree ... `)`
|
||||||
|
/// - `*` (wildcard)
|
||||||
|
/// - `[` name name ... `]` (including `,` or `*`).
|
||||||
|
///
|
||||||
|
/// Examples of valid syntax:
|
||||||
|
///
|
||||||
|
/// ```txt
|
||||||
|
/// foo
|
||||||
|
/// foo::bar::baz
|
||||||
|
/// foo::bar::(baz, quz::quux, fimble::*)
|
||||||
|
/// foo::bar::[baz quz * +]
|
||||||
|
/// ```
|
||||||
pub fn parse_multiname<'a>(
|
pub fn parse_multiname<'a>(
|
||||||
cursor: Stream<'a>,
|
cursor: Stream<'a>,
|
||||||
ctx: &impl Context,
|
ctx: &(impl Context + ?Sized),
|
||||||
) -> ProjectResult<(Vec<Import>, Stream<'a>)> {
|
) -> ProjectResult<(Vec<Import>, Stream<'a>)> {
|
||||||
let (output, cont) = parse_multiname_rec(cursor, ctx)?;
|
let (output, cont) = parse_multiname_rec(cursor, ctx)?;
|
||||||
Ok((output.map(|sr| sr.finalize()).collect(), cont))
|
Ok((output.map(|sr| sr.finalize()).collect(), cont))
|
||||||
|
|||||||
@@ -5,16 +5,22 @@ use std::rc::Rc;
|
|||||||
use ordered_float::NotNan;
|
use ordered_float::NotNan;
|
||||||
|
|
||||||
use super::context::Context;
|
use super::context::Context;
|
||||||
use super::errors::NaNLiteral;
|
#[allow(unused)] // for doc
|
||||||
|
use super::context::LexerPlugin;
|
||||||
|
use super::errors::{ExpectedDigit, LiteralOverflow, NaNLiteral};
|
||||||
use super::lexer::split_filter;
|
use super::lexer::split_filter;
|
||||||
use crate::error::{ProjectError, ProjectResult};
|
use crate::error::{ProjectError, ProjectResult};
|
||||||
use crate::foreign::Atom;
|
use crate::foreign::Atom;
|
||||||
use crate::systems::stl::Numeric;
|
use crate::systems::stl::Numeric;
|
||||||
|
|
||||||
|
/// Rasons why [parse_num] might fail. See [NumError].
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum NumErrorKind {
|
pub enum NumErrorKind {
|
||||||
|
/// The literal describes [f64::NAN]
|
||||||
NaN,
|
NaN,
|
||||||
|
/// Some integer appearing in the literal overflows [usize]
|
||||||
Overflow,
|
Overflow,
|
||||||
|
/// A character that isn't a digit in the given base was found
|
||||||
InvalidDigit,
|
InvalidDigit,
|
||||||
}
|
}
|
||||||
impl NumErrorKind {
|
impl NumErrorKind {
|
||||||
@@ -27,13 +33,17 @@ impl NumErrorKind {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Error produced by [parse_num]
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct NumError {
|
pub struct NumError {
|
||||||
|
/// Location
|
||||||
pub range: Range<usize>,
|
pub range: Range<usize>,
|
||||||
|
/// Reason
|
||||||
pub kind: NumErrorKind,
|
pub kind: NumErrorKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NumError {
|
impl NumError {
|
||||||
|
/// Convert into [ProjectError] trait object
|
||||||
pub fn into_proj(
|
pub fn into_proj(
|
||||||
self,
|
self,
|
||||||
len: usize,
|
len: usize,
|
||||||
@@ -44,12 +54,13 @@ impl NumError {
|
|||||||
let location = ctx.range_loc(start..start + self.range.len());
|
let location = ctx.range_loc(start..start + self.range.len());
|
||||||
match self.kind {
|
match self.kind {
|
||||||
NumErrorKind::NaN => NaNLiteral(location).rc(),
|
NumErrorKind::NaN => NaNLiteral(location).rc(),
|
||||||
_ => panic!(),
|
NumErrorKind::InvalidDigit => ExpectedDigit(location).rc(),
|
||||||
// NumErrorKind::Int(iek) => IntError(location, iek).rc(),
|
NumErrorKind::Overflow => LiteralOverflow(location).rc(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parse a numbre literal out of text
|
||||||
pub fn parse_num(string: &str) -> Result<Numeric, NumError> {
|
pub fn parse_num(string: &str) -> Result<Numeric, NumError> {
|
||||||
let overflow_err =
|
let overflow_err =
|
||||||
NumError { range: 0..string.len(), kind: NumErrorKind::Overflow };
|
NumError { range: 0..string.len(), kind: NumErrorKind::Overflow };
|
||||||
@@ -96,9 +107,12 @@ fn int_parse(s: &str, radix: u8, start: usize) -> Result<usize, NumError> {
|
|||||||
.map_err(|e| NumError { range, kind: NumErrorKind::from_int(e.kind()) })
|
.map_err(|e| NumError { range, kind: NumErrorKind::from_int(e.kind()) })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Filter for characters that can appear in numbers
|
||||||
pub fn numchar(c: char) -> bool { c.is_alphanumeric() | "._-".contains(c) }
|
pub fn numchar(c: char) -> bool { c.is_alphanumeric() | "._-".contains(c) }
|
||||||
|
/// Filter for characters that can start numbers
|
||||||
pub fn numstart(c: char) -> bool { c.is_ascii_digit() }
|
pub fn numstart(c: char) -> bool { c.is_ascii_digit() }
|
||||||
|
|
||||||
|
/// [LexerPlugin] for a number literal
|
||||||
pub fn lex_numeric<'a>(
|
pub fn lex_numeric<'a>(
|
||||||
data: &'a str,
|
data: &'a str,
|
||||||
ctx: &dyn Context,
|
ctx: &dyn Context,
|
||||||
@@ -140,6 +154,7 @@ mod test {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Print a number as a base-16 floating point literal
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn print_nat16(num: NotNan<f64>) -> String {
|
pub fn print_nat16(num: NotNan<f64>) -> String {
|
||||||
if *num == 0.0 {
|
if *num == 0.0 {
|
||||||
|
|||||||
@@ -5,20 +5,23 @@ use itertools::Itertools;
|
|||||||
|
|
||||||
use super::context::Context;
|
use super::context::Context;
|
||||||
use super::errors::{
|
use super::errors::{
|
||||||
BadTokenInRegion, Expected, ExpectedName, GlobExport, LeadingNS,
|
BadTokenInRegion, Expected, ExpectedBlock, ExpectedName, ExpectedSingleName,
|
||||||
MisalignedParen, NamespacedExport, ReservedToken, UnexpectedEOL,
|
GlobExport, LeadingNS, MisalignedParen, NamespacedExport, ReservedToken,
|
||||||
|
UnexpectedEOL,
|
||||||
};
|
};
|
||||||
use super::lexer::Lexeme;
|
use super::lexer::Lexeme;
|
||||||
use super::multiname::parse_multiname;
|
use super::multiname::parse_multiname;
|
||||||
use super::stream::Stream;
|
use super::stream::Stream;
|
||||||
use super::Entry;
|
use super::Entry;
|
||||||
use crate::ast::{Clause, Constant, Expr, Rule};
|
use crate::ast::{Clause, Constant, Expr, PType, Rule};
|
||||||
use crate::error::{ProjectError, ProjectResult};
|
use crate::error::{ProjectError, ProjectResult};
|
||||||
use crate::representations::location::Location;
|
use crate::representations::location::Location;
|
||||||
use crate::representations::sourcefile::{FileEntry, MemberKind, ModuleBlock};
|
use crate::representations::sourcefile::{FileEntry, MemberKind, ModuleBlock};
|
||||||
use crate::representations::VName;
|
use crate::representations::VName;
|
||||||
use crate::sourcefile::{FileEntryKind, Import, Member};
|
use crate::sourcefile::{FileEntryKind, Import, Member};
|
||||||
|
use crate::utils::pure_seq::pushed;
|
||||||
|
|
||||||
|
/// Split the stream at each line break outside parentheses
|
||||||
pub fn split_lines(module: Stream<'_>) -> impl Iterator<Item = Stream<'_>> {
|
pub fn split_lines(module: Stream<'_>) -> impl Iterator<Item = Stream<'_>> {
|
||||||
let mut source = module.data.iter().enumerate();
|
let mut source = module.data.iter().enumerate();
|
||||||
let mut fallback = module.fallback;
|
let mut fallback = module.fallback;
|
||||||
@@ -47,15 +50,27 @@ pub fn split_lines(module: Stream<'_>) -> impl Iterator<Item = Stream<'_>> {
|
|||||||
}
|
}
|
||||||
None
|
None
|
||||||
})
|
})
|
||||||
|
.map(Stream::trim)
|
||||||
|
.map(|s| {
|
||||||
|
s.pop()
|
||||||
|
.and_then(|(first, inner)| {
|
||||||
|
let (last, inner) = inner.pop_back()?;
|
||||||
|
match (&first.lexeme, &last.lexeme) {
|
||||||
|
(Lexeme::LP(PType::Par), Lexeme::RP(PType::Par)) => Ok(inner.trim()),
|
||||||
|
_ => Ok(s),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.unwrap_or(s)
|
||||||
|
})
|
||||||
|
.filter(|l| !l.data.is_empty())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parse linebreak-separated entries
|
||||||
pub fn parse_module_body(
|
pub fn parse_module_body(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'_>,
|
||||||
ctx: &impl Context,
|
ctx: &(impl Context + ?Sized),
|
||||||
) -> ProjectResult<Vec<FileEntry>> {
|
) -> ProjectResult<Vec<FileEntry>> {
|
||||||
split_lines(cursor)
|
split_lines(cursor)
|
||||||
.map(Stream::trim)
|
|
||||||
.filter(|l| !l.data.is_empty())
|
|
||||||
.map(|l| {
|
.map(|l| {
|
||||||
parse_line(l, ctx).map(move |kinds| {
|
parse_line(l, ctx).map(move |kinds| {
|
||||||
kinds
|
kinds
|
||||||
@@ -67,12 +82,13 @@ pub fn parse_module_body(
|
|||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parse a single, possibly exported entry
|
||||||
pub fn parse_line(
|
pub fn parse_line(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'_>,
|
||||||
ctx: &impl Context,
|
ctx: &(impl Context + ?Sized),
|
||||||
) -> ProjectResult<Vec<FileEntryKind>> {
|
) -> ProjectResult<Vec<FileEntryKind>> {
|
||||||
for line_parser in ctx.line_parsers() {
|
for line_parser in ctx.line_parsers() {
|
||||||
if let Some(result) = line_parser(cursor, ctx) {
|
if let Some(result) = line_parser(cursor, &ctx) {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -100,9 +116,9 @@ pub fn parse_line(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_export_line(
|
fn parse_export_line(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'_>,
|
||||||
ctx: &impl Context,
|
ctx: &(impl Context + ?Sized),
|
||||||
) -> ProjectResult<FileEntryKind> {
|
) -> ProjectResult<FileEntryKind> {
|
||||||
let cursor = cursor.trim();
|
let cursor = cursor.trim();
|
||||||
match &cursor.get(0)?.lexeme {
|
match &cursor.get(0)?.lexeme {
|
||||||
@@ -135,7 +151,7 @@ pub fn parse_export_line(
|
|||||||
|
|
||||||
fn parse_member(
|
fn parse_member(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'_>,
|
||||||
ctx: &impl Context,
|
ctx: &(impl Context + ?Sized),
|
||||||
) -> ProjectResult<MemberKind> {
|
) -> ProjectResult<MemberKind> {
|
||||||
let (typemark, cursor) = cursor.trim().pop()?;
|
let (typemark, cursor) = cursor.trim().pop()?;
|
||||||
match &typemark.lexeme {
|
match &typemark.lexeme {
|
||||||
@@ -144,7 +160,7 @@ fn parse_member(
|
|||||||
Ok(MemberKind::Constant(constant))
|
Ok(MemberKind::Constant(constant))
|
||||||
},
|
},
|
||||||
Lexeme::Name(n) if **n == "macro" => {
|
Lexeme::Name(n) if **n == "macro" => {
|
||||||
let rule = parse_rule(cursor, ctx)?;
|
let rule = parse_rule(cursor, &ctx)?;
|
||||||
Ok(MemberKind::Rule(rule))
|
Ok(MemberKind::Rule(rule))
|
||||||
},
|
},
|
||||||
Lexeme::Name(n) if **n == "module" => {
|
Lexeme::Name(n) if **n == "module" => {
|
||||||
@@ -159,7 +175,8 @@ fn parse_member(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_rule(
|
/// Parse a macro rule
|
||||||
|
pub fn parse_rule(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'_>,
|
||||||
ctx: &impl Context,
|
ctx: &impl Context,
|
||||||
) -> ProjectResult<Rule<VName>> {
|
) -> ProjectResult<Rule<VName>> {
|
||||||
@@ -172,9 +189,10 @@ fn parse_rule(
|
|||||||
Ok(Rule { pattern, prio, template })
|
Ok(Rule { pattern, prio, template })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_const(
|
/// Parse a constant declaration
|
||||||
|
pub fn parse_const(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'_>,
|
||||||
ctx: &impl Context,
|
ctx: &(impl Context + ?Sized),
|
||||||
) -> ProjectResult<Constant> {
|
) -> ProjectResult<Constant> {
|
||||||
let (name_ent, cursor) = cursor.trim().pop()?;
|
let (name_ent, cursor) = cursor.trim().pop()?;
|
||||||
let name = ExpectedName::expect(name_ent)?;
|
let name = ExpectedName::expect(name_ent)?;
|
||||||
@@ -184,24 +202,38 @@ fn parse_const(
|
|||||||
Ok(Constant { name, value: vec_to_single(walrus_ent, body)? })
|
Ok(Constant { name, value: vec_to_single(walrus_ent, body)? })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_module(
|
/// Parse a namespaced name. TODO: use this for modules
|
||||||
|
pub fn parse_nsname<'a>(
|
||||||
|
cursor: Stream<'a>,
|
||||||
|
ctx: &(impl Context + ?Sized),
|
||||||
|
) -> ProjectResult<(VName, Stream<'a>)> {
|
||||||
|
let (name, tail) = parse_multiname(cursor, ctx)?;
|
||||||
|
let name = match name.into_iter().exactly_one() {
|
||||||
|
Ok(Import { name: Some(name), path, .. }) => pushed(path, name),
|
||||||
|
_ => {
|
||||||
|
let loc = cursor.data[0].location().to(tail.data[0].location());
|
||||||
|
return Err(ExpectedSingleName(loc).rc());
|
||||||
|
},
|
||||||
|
};
|
||||||
|
Ok((name, tail))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a submodule declaration
|
||||||
|
pub fn parse_module(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'_>,
|
||||||
ctx: &impl Context,
|
ctx: &(impl Context + ?Sized),
|
||||||
) -> ProjectResult<ModuleBlock> {
|
) -> ProjectResult<ModuleBlock> {
|
||||||
let (name_ent, cursor) = cursor.trim().pop()?;
|
let (name_ent, cursor) = cursor.trim().pop()?;
|
||||||
let name = ExpectedName::expect(name_ent)?;
|
let name = ExpectedName::expect(name_ent)?;
|
||||||
let (lp_ent, cursor) = cursor.trim().pop()?;
|
let body = ExpectedBlock::expect(cursor, PType::Par)?;
|
||||||
Expected::expect(Lexeme::LP('('), lp_ent)?;
|
Ok(ModuleBlock { name, body: parse_module_body(body, ctx)? })
|
||||||
let (last, cursor) = cursor.pop_back()?;
|
|
||||||
Expected::expect(Lexeme::RP('('), last)?;
|
|
||||||
let body = parse_module_body(cursor, ctx)?;
|
|
||||||
Ok(ModuleBlock { name, body })
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_exprv<'a>(
|
/// Parse a sequence of expressions
|
||||||
|
pub fn parse_exprv<'a>(
|
||||||
mut cursor: Stream<'a>,
|
mut cursor: Stream<'a>,
|
||||||
paren: Option<char>,
|
paren: Option<PType>,
|
||||||
ctx: &impl Context,
|
ctx: &(impl Context + ?Sized),
|
||||||
) -> ProjectResult<(Vec<Expr<VName>>, Stream<'a>)> {
|
) -> ProjectResult<(Vec<Expr<VName>>, Stream<'a>)> {
|
||||||
let mut output = Vec::new();
|
let mut output = Vec::new();
|
||||||
cursor = cursor.trim();
|
cursor = cursor.trim();
|
||||||
@@ -272,7 +304,8 @@ fn parse_exprv<'a>(
|
|||||||
Ok((output, Stream::new(cursor.fallback, &[])))
|
Ok((output, Stream::new(cursor.fallback, &[])))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn vec_to_single(
|
/// Wrap an expression list in parentheses if necessary
|
||||||
|
pub fn vec_to_single(
|
||||||
fallback: &Entry,
|
fallback: &Entry,
|
||||||
v: Vec<Expr<VName>>,
|
v: Vec<Expr<VName>>,
|
||||||
) -> ProjectResult<Expr<VName>> {
|
) -> ProjectResult<Expr<VName>> {
|
||||||
@@ -281,11 +314,12 @@ fn vec_to_single(
|
|||||||
1 => Ok(v.into_iter().exactly_one().unwrap()),
|
1 => Ok(v.into_iter().exactly_one().unwrap()),
|
||||||
_ => Ok(Expr {
|
_ => Ok(Expr {
|
||||||
location: expr_slice_location(&v),
|
location: expr_slice_location(&v),
|
||||||
value: Clause::S('(', Rc::new(v)),
|
value: Clause::S(PType::Par, Rc::new(v)),
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return the location of a sequence of consecutive expressions
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn expr_slice_location(v: &[impl AsRef<Location>]) -> Location {
|
pub fn expr_slice_location(v: &[impl AsRef<Location>]) -> Location {
|
||||||
v.first()
|
v.first()
|
||||||
|
|||||||
@@ -8,14 +8,18 @@ use crate::Location;
|
|||||||
#[must_use = "streams represent segments of code that must be parsed"]
|
#[must_use = "streams represent segments of code that must be parsed"]
|
||||||
#[derive(Clone, Copy)]
|
#[derive(Clone, Copy)]
|
||||||
pub struct Stream<'a> {
|
pub struct Stream<'a> {
|
||||||
|
/// Entry to place in errors if the stream contains no tokens
|
||||||
pub fallback: &'a Entry,
|
pub fallback: &'a Entry,
|
||||||
|
/// Tokens to parse
|
||||||
pub data: &'a [Entry],
|
pub data: &'a [Entry],
|
||||||
}
|
}
|
||||||
impl<'a> Stream<'a> {
|
impl<'a> Stream<'a> {
|
||||||
|
/// Create a new stream
|
||||||
pub fn new(fallback: &'a Entry, data: &'a [Entry]) -> Self {
|
pub fn new(fallback: &'a Entry, data: &'a [Entry]) -> Self {
|
||||||
Self { fallback, data }
|
Self { fallback, data }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Remove comments and line breaks from both ends of the text
|
||||||
pub fn trim(self) -> Self {
|
pub fn trim(self) -> Self {
|
||||||
let Self { data, fallback } = self;
|
let Self { data, fallback } = self;
|
||||||
let front = data.iter().take_while(|e| e.is_filler()).count();
|
let front = data.iter().take_while(|e| e.is_filler()).count();
|
||||||
@@ -25,12 +29,14 @@ impl<'a> Stream<'a> {
|
|||||||
Self { fallback, data }
|
Self { fallback, data }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Discard the first entry
|
||||||
pub fn step(self) -> ProjectResult<Self> {
|
pub fn step(self) -> ProjectResult<Self> {
|
||||||
let (fallback, data) = (self.data.split_first())
|
let (fallback, data) = (self.data.split_first())
|
||||||
.ok_or_else(|| UnexpectedEOL { entry: self.fallback.clone() }.rc())?;
|
.ok_or_else(|| UnexpectedEOL { entry: self.fallback.clone() }.rc())?;
|
||||||
Ok(Stream { data, fallback })
|
Ok(Stream { data, fallback })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the first entry
|
||||||
pub fn pop(self) -> ProjectResult<(&'a Entry, Stream<'a>)> {
|
pub fn pop(self) -> ProjectResult<(&'a Entry, Stream<'a>)> {
|
||||||
Ok((self.get(0)?, self.step()?))
|
Ok((self.get(0)?, self.step()?))
|
||||||
}
|
}
|
||||||
@@ -43,6 +49,7 @@ impl<'a> Stream<'a> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Area covered by this stream
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn location(self) -> Location {
|
pub fn location(self) -> Location {
|
||||||
self.data.first().map_or_else(
|
self.data.first().map_or_else(
|
||||||
@@ -51,6 +58,8 @@ impl<'a> Stream<'a> {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Find a given token, split the stream there and read some value from the
|
||||||
|
/// separator. See also [Stream::find]
|
||||||
pub fn find_map<T>(
|
pub fn find_map<T>(
|
||||||
self,
|
self,
|
||||||
expected: &'static str,
|
expected: &'static str,
|
||||||
@@ -65,6 +74,8 @@ impl<'a> Stream<'a> {
|
|||||||
Ok((Self::new(fallback, left), output, Self::new(middle_ent, right)))
|
Ok((Self::new(fallback, left), output, Self::new(middle_ent, right)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Split the stream at a token and return just the two sides.
|
||||||
|
/// See also [Stream::find_map].
|
||||||
pub fn find(
|
pub fn find(
|
||||||
self,
|
self,
|
||||||
expected: &'static str,
|
expected: &'static str,
|
||||||
@@ -75,6 +86,7 @@ impl<'a> Stream<'a> {
|
|||||||
Ok((left, right))
|
Ok((left, right))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Remove the last item from the stream
|
||||||
pub fn pop_back(self) -> ProjectResult<(&'a Entry, Self)> {
|
pub fn pop_back(self) -> ProjectResult<(&'a Entry, Self)> {
|
||||||
let Self { data, fallback } = self;
|
let Self { data, fallback } = self;
|
||||||
let (last, data) = (data.split_last())
|
let (last, data) = (data.split_last())
|
||||||
@@ -91,6 +103,7 @@ impl<'a> Stream<'a> {
|
|||||||
Self { data, fallback }
|
Self { data, fallback }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Assert that the stream is empty.
|
||||||
pub fn expect_empty(self) -> ProjectResult<()> {
|
pub fn expect_empty(self) -> ProjectResult<()> {
|
||||||
if let Some(x) = self.data.first() {
|
if let Some(x) = self.data.first() {
|
||||||
Err(ExpectedEOL { location: x.location() }.rc())
|
Err(ExpectedEOL { location: x.location() }.rc())
|
||||||
|
|||||||
@@ -1,22 +1,32 @@
|
|||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
use super::context::Context;
|
use super::context::Context;
|
||||||
|
#[allow(unused)] // for doc
|
||||||
|
use super::context::LexerPlugin;
|
||||||
use super::errors::{BadCodePoint, BadEscapeSequence, NoStringEnd, NotHex};
|
use super::errors::{BadCodePoint, BadEscapeSequence, NoStringEnd, NotHex};
|
||||||
use crate::error::{ProjectError, ProjectResult};
|
use crate::error::{ProjectError, ProjectResult};
|
||||||
use crate::foreign::Atom;
|
use crate::foreign::Atom;
|
||||||
use crate::OrcString;
|
use crate::OrcString;
|
||||||
|
|
||||||
|
/// Reasons why [parse_string] might fail. See [StringError]
|
||||||
pub enum StringErrorKind {
|
pub enum StringErrorKind {
|
||||||
|
/// A unicode escape sequence wasn't followed by 4 hex digits
|
||||||
NotHex,
|
NotHex,
|
||||||
|
/// A unicode escape sequence contained an unassigned code point
|
||||||
BadCodePoint,
|
BadCodePoint,
|
||||||
|
/// An unrecognized escape sequence was found
|
||||||
BadEscSeq,
|
BadEscSeq,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Error produced by [parse_string]
|
||||||
pub struct StringError {
|
pub struct StringError {
|
||||||
|
/// Character where the error occured
|
||||||
pos: usize,
|
pos: usize,
|
||||||
|
/// Reason for the error
|
||||||
kind: StringErrorKind,
|
kind: StringErrorKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Process escape sequences in a string literal
|
||||||
pub fn parse_string(str: &str) -> Result<String, StringError> {
|
pub fn parse_string(str: &str) -> Result<String, StringError> {
|
||||||
let mut target = String::new();
|
let mut target = String::new();
|
||||||
let mut iter = str.char_indices();
|
let mut iter = str.char_indices();
|
||||||
@@ -65,6 +75,7 @@ pub fn parse_string(str: &str) -> Result<String, StringError> {
|
|||||||
Ok(target)
|
Ok(target)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// [LexerPlugin] for a string literal.
|
||||||
pub fn lex_string<'a>(
|
pub fn lex_string<'a>(
|
||||||
data: &'a str,
|
data: &'a str,
|
||||||
ctx: &dyn Context,
|
ctx: &dyn Context,
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ use crate::ProjectTree;
|
|||||||
/// import pointing to a module in the environment.
|
/// import pointing to a module in the environment.
|
||||||
pub fn parse_layer<'a>(
|
pub fn parse_layer<'a>(
|
||||||
targets: impl Iterator<Item = &'a [Tok<String>]>,
|
targets: impl Iterator<Item = &'a [Tok<String>]>,
|
||||||
loader: &impl Fn(&[Tok<String>]) -> IOResult,
|
loader: &impl Fn(&[Tok<String>], &[Tok<String>]) -> IOResult,
|
||||||
environment: &'a ProjectTree<VName>,
|
environment: &'a ProjectTree<VName>,
|
||||||
prelude: &[FileEntry],
|
prelude: &[FileEntry],
|
||||||
lexer_plugins: &[&dyn LexerPlugin],
|
lexer_plugins: &[&dyn LexerPlugin],
|
||||||
|
|||||||
@@ -29,10 +29,11 @@ pub struct Context<'a> {
|
|||||||
/// Load the source at the given path or all within if it's a collection,
|
/// Load the source at the given path or all within if it's a collection,
|
||||||
/// and all sources imported from these.
|
/// and all sources imported from these.
|
||||||
fn load_abs_path_rec(
|
fn load_abs_path_rec(
|
||||||
|
referrer: &[Tok<String>],
|
||||||
abs_path: &[Tok<String>],
|
abs_path: &[Tok<String>],
|
||||||
mut all: Preparsed,
|
mut all: Preparsed,
|
||||||
source: &mut LoadedSourceTable,
|
source: &mut LoadedSourceTable,
|
||||||
get_source: &impl Fn(&[Tok<String>]) -> IOResult,
|
get_source: &impl Fn(&[Tok<String>], &[Tok<String>]) -> IOResult,
|
||||||
is_injected_module: &impl Fn(&[Tok<String>]) -> bool,
|
is_injected_module: &impl Fn(&[Tok<String>]) -> bool,
|
||||||
ctx @ Context { i, lexer_plugins, line_parsers, prelude }: Context,
|
ctx @ Context { i, lexer_plugins, line_parsers, prelude }: Context,
|
||||||
) -> ProjectResult<Preparsed> {
|
) -> ProjectResult<Preparsed> {
|
||||||
@@ -46,7 +47,7 @@ fn load_abs_path_rec(
|
|||||||
|
|
||||||
// try splitting the path to file, swallowing any IO errors
|
// try splitting the path to file, swallowing any IO errors
|
||||||
let name_split = split_max_prefix(abs_path, &|p| {
|
let name_split = split_max_prefix(abs_path, &|p| {
|
||||||
get_source(p).map(|l| l.is_code()).unwrap_or(false)
|
get_source(p, referrer).map(|l| l.is_code()).unwrap_or(false)
|
||||||
});
|
});
|
||||||
if let Some((filename, _)) = name_split {
|
if let Some((filename, _)) = name_split {
|
||||||
// Termination: exit if entry already visited
|
// Termination: exit if entry already visited
|
||||||
@@ -54,10 +55,10 @@ fn load_abs_path_rec(
|
|||||||
return Ok(all);
|
return Ok(all);
|
||||||
}
|
}
|
||||||
// if the filename is valid, load, preparse and record this file
|
// if the filename is valid, load, preparse and record this file
|
||||||
let text = unwrap_or!(get_source(filename)? => Loaded::Code; {
|
let text = unwrap_or!(get_source(filename, referrer)? => Loaded::Code; {
|
||||||
return Err(UnexpectedDirectory { path: filename.to_vec() }.rc())
|
return Err(UnexpectedDirectory { path: filename.to_vec() }.rc())
|
||||||
});
|
});
|
||||||
let entries = parse::parse2(ParsingContext::new(
|
let entries = parse::parse_file(ParsingContext::new(
|
||||||
i,
|
i,
|
||||||
Arc::new(filename.to_vec()),
|
Arc::new(filename.to_vec()),
|
||||||
text,
|
text,
|
||||||
@@ -73,6 +74,7 @@ fn load_abs_path_rec(
|
|||||||
mut all|
|
mut all|
|
||||||
-> ProjectResult<_> {
|
-> ProjectResult<_> {
|
||||||
let details = unwrap_or!(module.extra.details(); return Ok(all));
|
let details = unwrap_or!(module.extra.details(); return Ok(all));
|
||||||
|
let referrer = modpath.iter().rev_vec_clone();
|
||||||
for import in &details.imports {
|
for import in &details.imports {
|
||||||
let origin = &Location::Unknown;
|
let origin = &Location::Unknown;
|
||||||
let abs_pathv = import_abs_path(
|
let abs_pathv = import_abs_path(
|
||||||
@@ -87,6 +89,7 @@ fn load_abs_path_rec(
|
|||||||
}
|
}
|
||||||
// recurse on imported module
|
// recurse on imported module
|
||||||
all = load_abs_path_rec(
|
all = load_abs_path_rec(
|
||||||
|
&referrer,
|
||||||
&abs_pathv,
|
&abs_pathv,
|
||||||
all,
|
all,
|
||||||
source,
|
source,
|
||||||
@@ -101,7 +104,7 @@ fn load_abs_path_rec(
|
|||||||
all.0.overlay(preparsed.0).map(Preparsed)
|
all.0.overlay(preparsed.0).map(Preparsed)
|
||||||
} else {
|
} else {
|
||||||
// If the path is not within a file, load it as directory
|
// If the path is not within a file, load it as directory
|
||||||
let coll = match get_source(abs_path) {
|
let coll = match get_source(abs_path, referrer) {
|
||||||
Ok(Loaded::Collection(coll)) => coll,
|
Ok(Loaded::Collection(coll)) => coll,
|
||||||
Ok(Loaded::Code(_)) => {
|
Ok(Loaded::Code(_)) => {
|
||||||
unreachable!("split_name returned None but the path is a file")
|
unreachable!("split_name returned None but the path is a file")
|
||||||
@@ -118,6 +121,7 @@ fn load_abs_path_rec(
|
|||||||
for item in coll.iter() {
|
for item in coll.iter() {
|
||||||
let abs_subpath = pushed_ref(abs_path, i.i(item));
|
let abs_subpath = pushed_ref(abs_path, i.i(item));
|
||||||
all = load_abs_path_rec(
|
all = load_abs_path_rec(
|
||||||
|
referrer,
|
||||||
&abs_subpath,
|
&abs_subpath,
|
||||||
all,
|
all,
|
||||||
source,
|
source,
|
||||||
@@ -139,7 +143,7 @@ fn load_abs_path_rec(
|
|||||||
pub fn load_source<'a>(
|
pub fn load_source<'a>(
|
||||||
targets: impl Iterator<Item = &'a [Tok<String>]>,
|
targets: impl Iterator<Item = &'a [Tok<String>]>,
|
||||||
ctx: Context,
|
ctx: Context,
|
||||||
get_source: &impl Fn(&[Tok<String>]) -> IOResult,
|
get_source: &impl Fn(&[Tok<String>], &[Tok<String>]) -> IOResult,
|
||||||
is_injected_module: &impl Fn(&[Tok<String>]) -> bool,
|
is_injected_module: &impl Fn(&[Tok<String>]) -> bool,
|
||||||
) -> ProjectResult<(Preparsed, LoadedSourceTable)> {
|
) -> ProjectResult<(Preparsed, LoadedSourceTable)> {
|
||||||
let mut table = LoadedSourceTable::new();
|
let mut table = LoadedSourceTable::new();
|
||||||
@@ -149,6 +153,7 @@ pub fn load_source<'a>(
|
|||||||
for target in targets {
|
for target in targets {
|
||||||
any_target |= true;
|
any_target |= true;
|
||||||
all = load_abs_path_rec(
|
all = load_abs_path_rec(
|
||||||
|
&[],
|
||||||
target,
|
target,
|
||||||
all,
|
all,
|
||||||
&mut table,
|
&mut table,
|
||||||
|
|||||||
@@ -104,6 +104,8 @@ pub enum PHClass {
|
|||||||
},
|
},
|
||||||
/// Matches exactly one token, lambda or parenthesized group
|
/// Matches exactly one token, lambda or parenthesized group
|
||||||
Scalar,
|
Scalar,
|
||||||
|
/// Matches exactly one name
|
||||||
|
Name,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Properties of a placeholder that matches unknown tokens in macros
|
/// Properties of a placeholder that matches unknown tokens in macros
|
||||||
@@ -120,6 +122,7 @@ impl Display for Placeholder {
|
|||||||
let name = &self.name;
|
let name = &self.name;
|
||||||
match self.class {
|
match self.class {
|
||||||
PHClass::Scalar => write!(f, "${name}"),
|
PHClass::Scalar => write!(f, "${name}"),
|
||||||
|
PHClass::Name => write!(f, "$_{name}"),
|
||||||
PHClass::Vec { nonzero, prio } => {
|
PHClass::Vec { nonzero, prio } => {
|
||||||
if nonzero { write!(f, "...") } else { write!(f, "..") }?;
|
if nonzero { write!(f, "...") } else { write!(f, "..") }?;
|
||||||
write!(f, "${name}:{prio}")
|
write!(f, "${name}:{prio}")
|
||||||
@@ -128,6 +131,36 @@ impl Display for Placeholder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Different types of brackets supported by Orchid
|
||||||
|
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
|
||||||
|
pub enum PType {
|
||||||
|
/// ()
|
||||||
|
Par,
|
||||||
|
/// []
|
||||||
|
Sqr,
|
||||||
|
/// {}
|
||||||
|
Curl,
|
||||||
|
}
|
||||||
|
impl PType {
|
||||||
|
/// Left paren character for this paren type
|
||||||
|
pub fn l(self) -> char {
|
||||||
|
match self {
|
||||||
|
PType::Curl => '{',
|
||||||
|
PType::Par => '(',
|
||||||
|
PType::Sqr => '[',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Right paren character for this paren type
|
||||||
|
pub fn r(self) -> char {
|
||||||
|
match self {
|
||||||
|
PType::Curl => '}',
|
||||||
|
PType::Par => ')',
|
||||||
|
PType::Sqr => ']',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// An S-expression as read from a source file
|
/// An S-expression as read from a source file
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum Clause<N: NameLike> {
|
pub enum Clause<N: NameLike> {
|
||||||
@@ -139,7 +172,7 @@ pub enum Clause<N: NameLike> {
|
|||||||
Name(N),
|
Name(N),
|
||||||
/// A parenthesized expression
|
/// A parenthesized expression
|
||||||
/// eg. `(print out "hello")`, `[1, 2, 3]`, `{Some(t) => t}`
|
/// eg. `(print out "hello")`, `[1, 2, 3]`, `{Some(t) => t}`
|
||||||
S(char, Rc<Vec<Expr<N>>>),
|
S(PType, Rc<Vec<Expr<N>>>),
|
||||||
/// A function expression, eg. `\x. x + 1`
|
/// A function expression, eg. `\x. x + 1`
|
||||||
Lambda(Rc<Vec<Expr<N>>>, Rc<Vec<Expr<N>>>),
|
Lambda(Rc<Vec<Expr<N>>>, Rc<Vec<Expr<N>>>),
|
||||||
/// A placeholder for macros, eg. `$name`, `...$body`, `...$lhs:1`
|
/// A placeholder for macros, eg. `$name`, `...$body`, `...$lhs:1`
|
||||||
@@ -159,7 +192,7 @@ impl<N: NameLike> Clause<N> {
|
|||||||
/// Convert with identical meaning
|
/// Convert with identical meaning
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn into_expr(self) -> Expr<N> {
|
pub fn into_expr(self) -> Expr<N> {
|
||||||
if let Self::S('(', body) = &self {
|
if let Self::S(PType::Par, body) = &self {
|
||||||
if body.len() == 1 {
|
if body.len() == 1 {
|
||||||
body[0].clone()
|
body[0].clone()
|
||||||
} else {
|
} else {
|
||||||
@@ -178,7 +211,7 @@ impl<N: NameLike> Clause<N> {
|
|||||||
} else if exprs.len() == 1 {
|
} else if exprs.len() == 1 {
|
||||||
Some(exprs[0].value.clone())
|
Some(exprs[0].value.clone())
|
||||||
} else {
|
} else {
|
||||||
Some(Self::S('(', Rc::new(exprs.to_vec())))
|
Some(Self::S(PType::Par, Rc::new(exprs.to_vec())))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -188,7 +221,7 @@ impl<N: NameLike> Clause<N> {
|
|||||||
if exprv.len() < 2 {
|
if exprv.len() < 2 {
|
||||||
Self::from_exprs(exprv)
|
Self::from_exprs(exprv)
|
||||||
} else {
|
} else {
|
||||||
Some(Self::S('(', exprv.clone()))
|
Some(Self::S(PType::Par, exprv.clone()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -304,6 +337,19 @@ impl<N: NameLike> Clause<N> {
|
|||||||
Clause::S(_, body) => search_all_slcs(body, f),
|
Clause::S(_, body) => search_all_slcs(body, f),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Generate a parenthesized expression sequence
|
||||||
|
pub fn s(delimiter: char, items: impl IntoIterator<Item = Self>) -> Self {
|
||||||
|
Self::S(
|
||||||
|
match delimiter {
|
||||||
|
'(' => PType::Par,
|
||||||
|
'[' => PType::Sqr,
|
||||||
|
'{' => PType::Curl,
|
||||||
|
_ => panic!("not an opening paren"),
|
||||||
|
},
|
||||||
|
Rc::new(items.into_iter().map(Self::into_expr).collect()),
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clause<VName> {
|
impl Clause<VName> {
|
||||||
@@ -333,15 +379,9 @@ impl<N: NameLike> Display for Clause<N> {
|
|||||||
Self::ExternFn(fun) => write!(f, "{fun:?}"),
|
Self::ExternFn(fun) => write!(f, "{fun:?}"),
|
||||||
Self::Atom(a) => write!(f, "{a:?}"),
|
Self::Atom(a) => write!(f, "{a:?}"),
|
||||||
Self::Name(name) => write!(f, "{}", name.to_strv().join("::")),
|
Self::Name(name) => write!(f, "{}", name.to_strv().join("::")),
|
||||||
Self::S(del, items) => {
|
Self::S(t, items) => {
|
||||||
let body = items.iter().join(" ");
|
let body = items.iter().join(" ");
|
||||||
let led = match del {
|
write!(f, "{}{body}{}", t.l(), t.r())
|
||||||
'(' => ")",
|
|
||||||
'[' => "]",
|
|
||||||
'{' => "}",
|
|
||||||
_ => "CLOSING_DELIM",
|
|
||||||
};
|
|
||||||
write!(f, "{del}{body}{led}")
|
|
||||||
},
|
},
|
||||||
Self::Lambda(arg, body) => {
|
Self::Lambda(arg, body) => {
|
||||||
let args = arg.iter().join(" ");
|
let args = arg.iter().join(" ");
|
||||||
|
|||||||
@@ -8,7 +8,8 @@ pub type AstError = ast_to_postmacro::Error;
|
|||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
pub fn ast_to_interpreted(
|
pub fn ast_to_interpreted(
|
||||||
ast: &ast::Expr<Sym>,
|
ast: &ast::Expr<Sym>,
|
||||||
|
symbol: Sym,
|
||||||
) -> Result<interpreted::ExprInst, AstError> {
|
) -> Result<interpreted::ExprInst, AstError> {
|
||||||
let pmtree = ast_to_postmacro::expr(ast)?;
|
let pmtree = ast_to_postmacro::expr(ast, symbol)?;
|
||||||
Ok(postmacro_to_interpreted::expr(&pmtree))
|
Ok(postmacro_to_interpreted::expr(&pmtree))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,8 +2,10 @@ use std::rc::Rc;
|
|||||||
|
|
||||||
use super::location::Location;
|
use super::location::Location;
|
||||||
use super::{ast, postmacro};
|
use super::{ast, postmacro};
|
||||||
|
use crate::ast::PType;
|
||||||
use crate::error::ProjectError;
|
use crate::error::ProjectError;
|
||||||
use crate::utils::substack::Substack;
|
use crate::utils::substack::Substack;
|
||||||
|
use crate::utils::unwrap_or;
|
||||||
use crate::Sym;
|
use crate::Sym;
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@@ -12,7 +14,7 @@ pub enum ErrorKind {
|
|||||||
EmptyS,
|
EmptyS,
|
||||||
/// Only `(...)` may be converted to typed lambdas. `[...]` and `{...}`
|
/// Only `(...)` may be converted to typed lambdas. `[...]` and `{...}`
|
||||||
/// left in the code are signs of incomplete macro execution
|
/// left in the code are signs of incomplete macro execution
|
||||||
BadGroup(char),
|
BadGroup(PType),
|
||||||
/// Placeholders shouldn't even occur in the code during macro
|
/// Placeholders shouldn't even occur in the code during macro
|
||||||
/// execution. Something is clearly terribly wrong
|
/// execution. Something is clearly terribly wrong
|
||||||
Placeholder,
|
Placeholder,
|
||||||
@@ -24,11 +26,12 @@ pub enum ErrorKind {
|
|||||||
pub struct Error {
|
pub struct Error {
|
||||||
pub location: Location,
|
pub location: Location,
|
||||||
pub kind: ErrorKind,
|
pub kind: ErrorKind,
|
||||||
|
pub symbol: Sym,
|
||||||
}
|
}
|
||||||
impl Error {
|
impl Error {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn new(kind: ErrorKind, location: &Location) -> Self {
|
pub fn new(kind: ErrorKind, location: &Location, symbol: Sym) -> Self {
|
||||||
Self { location: location.clone(), kind }
|
Self { location: location.clone(), kind, symbol }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl ProjectError for Error {
|
impl ProjectError for Error {
|
||||||
@@ -46,22 +49,31 @@ impl ProjectError for Error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
match self.kind {
|
if let ErrorKind::BadGroup(t) = self.kind {
|
||||||
ErrorKind::BadGroup(char) => format!("{} block found in the code", char),
|
let sym = self.symbol.extern_vec().join("::");
|
||||||
_ => self.description().to_string(),
|
return format!("{}{} block found in {sym}", t.l(), t.r());
|
||||||
}
|
}
|
||||||
|
format!(
|
||||||
|
"in {}, {}",
|
||||||
|
self.symbol.extern_vec().join("::"),
|
||||||
|
self.description()
|
||||||
|
)
|
||||||
}
|
}
|
||||||
fn one_position(&self) -> Location { self.location.clone() }
|
fn one_position(&self) -> Location { self.location.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Try to convert an expression from AST format to typed lambda
|
/// Try to convert an expression from AST format to typed lambda
|
||||||
pub fn expr(expr: &ast::Expr<Sym>) -> Result<postmacro::Expr, Error> {
|
pub fn expr(
|
||||||
expr_rec(expr, Context::new())
|
expr: &ast::Expr<Sym>,
|
||||||
|
symbol: Sym,
|
||||||
|
) -> Result<postmacro::Expr, Error> {
|
||||||
|
expr_rec(expr, Context::new(symbol))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct Context<'a> {
|
struct Context<'a> {
|
||||||
names: Substack<'a, Sym>,
|
names: Substack<'a, Sym>,
|
||||||
|
symbol: Sym,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Context<'a> {
|
impl<'a> Context<'a> {
|
||||||
@@ -70,11 +82,12 @@ impl<'a> Context<'a> {
|
|||||||
where
|
where
|
||||||
'a: 'b,
|
'a: 'b,
|
||||||
{
|
{
|
||||||
Context { names: self.names.push(name) }
|
Context { names: self.names.push(name), symbol: self.symbol.clone() }
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
impl Context<'static> {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn new() -> Context<'static> { Context { names: Substack::Bottom } }
|
fn new(symbol: Sym) -> Self { Self { names: Substack::Bottom, symbol } }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Process an expression sequence
|
/// Process an expression sequence
|
||||||
@@ -83,8 +96,9 @@ fn exprv_rec<'a>(
|
|||||||
v: &'a [ast::Expr<Sym>],
|
v: &'a [ast::Expr<Sym>],
|
||||||
ctx: Context<'a>,
|
ctx: Context<'a>,
|
||||||
) -> Result<postmacro::Expr, Error> {
|
) -> Result<postmacro::Expr, Error> {
|
||||||
let (last, rest) =
|
let (last, rest) = unwrap_or! {v.split_last(); {
|
||||||
(v.split_last()).ok_or_else(|| Error::new(ErrorKind::EmptyS, location))?;
|
return Err(Error::new(ErrorKind::EmptyS, location, ctx.symbol));
|
||||||
|
}};
|
||||||
if rest.is_empty() {
|
if rest.is_empty() {
|
||||||
return expr_rec(&v[0], ctx);
|
return expr_rec(&v[0], ctx);
|
||||||
}
|
}
|
||||||
@@ -99,13 +113,16 @@ fn expr_rec<'a>(
|
|||||||
ast::Expr { value, location }: &'a ast::Expr<Sym>,
|
ast::Expr { value, location }: &'a ast::Expr<Sym>,
|
||||||
ctx: Context<'a>,
|
ctx: Context<'a>,
|
||||||
) -> Result<postmacro::Expr, Error> {
|
) -> Result<postmacro::Expr, Error> {
|
||||||
if let ast::Clause::S(paren, body) = value {
|
match value {
|
||||||
if *paren != '(' {
|
ast::Clause::S(PType::Par, body) =>
|
||||||
return Err(Error::new(ErrorKind::BadGroup(*paren), location));
|
return Ok(postmacro::Expr {
|
||||||
|
value: exprv_rec(location, body.as_ref(), ctx)?.value,
|
||||||
|
location: location.clone(),
|
||||||
|
}),
|
||||||
|
ast::Clause::S(paren, _) =>
|
||||||
|
return Err(Error::new(ErrorKind::BadGroup(*paren), location, ctx.symbol)),
|
||||||
|
_ => (),
|
||||||
}
|
}
|
||||||
let expr = exprv_rec(location, body.as_ref(), ctx)?;
|
|
||||||
Ok(postmacro::Expr { value: expr.value, location: location.clone() })
|
|
||||||
} else {
|
|
||||||
let value = match value {
|
let value = match value {
|
||||||
ast::Clause::Atom(a) => postmacro::Clause::Atom(a.clone()),
|
ast::Clause::Atom(a) => postmacro::Clause::Atom(a.clone()),
|
||||||
ast::Clause::ExternFn(fun) => postmacro::Clause::ExternFn(fun.clone()),
|
ast::Clause::ExternFn(fun) => postmacro::Clause::ExternFn(fun.clone()),
|
||||||
@@ -113,8 +130,9 @@ fn expr_rec<'a>(
|
|||||||
let name = match &arg[..] {
|
let name = match &arg[..] {
|
||||||
[ast::Expr { value: ast::Clause::Name(name), .. }] => name,
|
[ast::Expr { value: ast::Clause::Name(name), .. }] => name,
|
||||||
[ast::Expr { value: ast::Clause::Placeh { .. }, .. }] =>
|
[ast::Expr { value: ast::Clause::Placeh { .. }, .. }] =>
|
||||||
return Err(Error::new(ErrorKind::Placeholder, location)),
|
return Err(Error::new(ErrorKind::Placeholder, location, ctx.symbol)),
|
||||||
_ => return Err(Error::new(ErrorKind::InvalidArg, location)),
|
_ =>
|
||||||
|
return Err(Error::new(ErrorKind::InvalidArg, location, ctx.symbol)),
|
||||||
};
|
};
|
||||||
let body_ctx = ctx.w_name(name.clone());
|
let body_ctx = ctx.w_name(name.clone());
|
||||||
let body = exprv_rec(location, b.as_ref(), body_ctx)?;
|
let body = exprv_rec(location, b.as_ref(), body_ctx)?;
|
||||||
@@ -130,16 +148,12 @@ fn expr_rec<'a>(
|
|||||||
None => postmacro::Clause::Constant(name.clone()),
|
None => postmacro::Clause::Constant(name.clone()),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ast::Clause::S(paren, entries) => {
|
ast::Clause::S(PType::Par, entries) =>
|
||||||
if *paren != '(' {
|
exprv_rec(location, entries.as_ref(), ctx)?.value,
|
||||||
return Err(Error::new(ErrorKind::BadGroup(*paren), location));
|
ast::Clause::S(paren, _) =>
|
||||||
}
|
return Err(Error::new(ErrorKind::BadGroup(*paren), location, ctx.symbol)),
|
||||||
let expr = exprv_rec(location, entries.as_ref(), ctx)?;
|
|
||||||
expr.value
|
|
||||||
},
|
|
||||||
ast::Clause::Placeh { .. } =>
|
ast::Clause::Placeh { .. } =>
|
||||||
return Err(Error::new(ErrorKind::Placeholder, location)),
|
return Err(Error::new(ErrorKind::Placeholder, location, ctx.symbol)),
|
||||||
};
|
};
|
||||||
Ok(postmacro::Expr { value, location: location.clone() })
|
Ok(postmacro::Expr { value, location: location.clone() })
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,6 @@
|
|||||||
//! functions have to define
|
//! functions have to define
|
||||||
use std::fmt::{Debug, Display};
|
use std::fmt::{Debug, Display};
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
use std::rc::Rc;
|
|
||||||
use std::sync::{Arc, Mutex, TryLockError};
|
use std::sync::{Arc, Mutex, TryLockError};
|
||||||
|
|
||||||
#[allow(unused)] // for doc
|
#[allow(unused)] // for doc
|
||||||
@@ -13,7 +12,7 @@ use super::location::Location;
|
|||||||
use super::path_set::PathSet;
|
use super::path_set::PathSet;
|
||||||
#[allow(unused)] // for doc
|
#[allow(unused)] // for doc
|
||||||
use crate::foreign::Atomic;
|
use crate::foreign::Atomic;
|
||||||
use crate::foreign::{Atom, ExFn, ExternError};
|
use crate::foreign::{Atom, ExFn, XfnResult};
|
||||||
use crate::utils::ddispatch::request;
|
use crate::utils::ddispatch::request;
|
||||||
use crate::utils::take_with_output;
|
use crate::utils::take_with_output;
|
||||||
use crate::Sym;
|
use crate::Sym;
|
||||||
@@ -53,11 +52,11 @@ pub struct NotALiteral;
|
|||||||
/// Types automatically convertible from an [ExprInst]
|
/// Types automatically convertible from an [ExprInst]
|
||||||
pub trait TryFromExprInst: Sized {
|
pub trait TryFromExprInst: Sized {
|
||||||
/// Match and clone the value out of an [ExprInst]
|
/// Match and clone the value out of an [ExprInst]
|
||||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>>;
|
fn from_exi(exi: ExprInst) -> XfnResult<Self>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFromExprInst for ExprInst {
|
impl TryFromExprInst for ExprInst {
|
||||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> { Ok(exi) }
|
fn from_exi(exi: ExprInst) -> XfnResult<Self> { Ok(exi) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A wrapper around expressions to handle their multiple occurences in
|
/// A wrapper around expressions to handle their multiple occurences in
|
||||||
@@ -162,7 +161,7 @@ impl ExprInst {
|
|||||||
/// Convert into any type that implements [FromExprInst]. Calls to this
|
/// Convert into any type that implements [FromExprInst]. Calls to this
|
||||||
/// function are generated wherever a conversion is elided in an extern
|
/// function are generated wherever a conversion is elided in an extern
|
||||||
/// function.
|
/// function.
|
||||||
pub fn downcast<T: TryFromExprInst>(self) -> Result<T, Rc<dyn ExternError>> {
|
pub fn downcast<T: TryFromExprInst>(self) -> XfnResult<T> {
|
||||||
T::from_exi(self)
|
T::from_exi(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
//! Building blocks of a source file
|
//! Building blocks of a source file
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
use std::iter;
|
|
||||||
|
|
||||||
use itertools::{Either, Itertools};
|
use itertools::{Either, Itertools};
|
||||||
|
|
||||||
@@ -9,7 +8,7 @@ use crate::ast::{Constant, Rule};
|
|||||||
use crate::error::{ProjectError, ProjectResult, TooManySupers};
|
use crate::error::{ProjectError, ProjectResult, TooManySupers};
|
||||||
use crate::interner::{Interner, Tok};
|
use crate::interner::{Interner, Tok};
|
||||||
use crate::utils::pure_seq::pushed;
|
use crate::utils::pure_seq::pushed;
|
||||||
use crate::utils::{unwrap_or, BoxedIter};
|
use crate::utils::BoxedIter;
|
||||||
use crate::Location;
|
use crate::Location;
|
||||||
|
|
||||||
/// An import pointing at another module, either specifying the symbol to be
|
/// An import pointing at another module, either specifying the symbol to be
|
||||||
@@ -79,6 +78,12 @@ pub enum MemberKind {
|
|||||||
/// A prefixed set of other entries
|
/// A prefixed set of other entries
|
||||||
Module(ModuleBlock),
|
Module(ModuleBlock),
|
||||||
}
|
}
|
||||||
|
impl MemberKind {
|
||||||
|
/// Convert to [FileEntry]
|
||||||
|
pub fn to_entry(self, exported: bool, location: Location) -> FileEntry {
|
||||||
|
FileEntryKind::Member(Member { exported, kind: self }).wrap(location)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Display for MemberKind {
|
impl Display for MemberKind {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
@@ -122,6 +127,12 @@ pub enum FileEntryKind {
|
|||||||
/// tokens that the local module doesn't actually define a role for
|
/// tokens that the local module doesn't actually define a role for
|
||||||
Export(Vec<(Tok<String>, Location)>),
|
Export(Vec<(Tok<String>, Location)>),
|
||||||
}
|
}
|
||||||
|
impl FileEntryKind {
|
||||||
|
/// Wrap with no location
|
||||||
|
pub fn wrap(self, location: Location) -> FileEntry {
|
||||||
|
FileEntry { kind: self, locations: vec![location] }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Display for FileEntryKind {
|
impl Display for FileEntryKind {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
@@ -239,25 +250,22 @@ pub fn absolute_path(
|
|||||||
|
|
||||||
#[must_use = "this could be None which means that there are too many supers"]
|
#[must_use = "this could be None which means that there are too many supers"]
|
||||||
fn absolute_path_rec(
|
fn absolute_path_rec(
|
||||||
abs_location: &[Tok<String>],
|
mut abs_location: &[Tok<String>],
|
||||||
rel_path: &[Tok<String>],
|
mut rel_path: &[Tok<String>],
|
||||||
i: &Interner,
|
i: &Interner,
|
||||||
) -> Option<VName> {
|
) -> Option<VName> {
|
||||||
let (head, tail) = unwrap_or!(rel_path.split_first();
|
let mut relative = false;
|
||||||
return Some(vec![])
|
while rel_path.first() == Some(&i.i("super")) {
|
||||||
);
|
abs_location = abs_location.split_last()?.1;
|
||||||
if *head == i.i("super") {
|
rel_path = rel_path.split_first().expect("checked above").1;
|
||||||
let (_, new_abs) = abs_location.split_last()?;
|
relative = true;
|
||||||
if tail.is_empty() {
|
|
||||||
Some(new_abs.to_vec())
|
|
||||||
} else {
|
|
||||||
let new_rel =
|
|
||||||
iter::once(i.i("self")).chain(tail.iter().cloned()).collect::<Vec<_>>();
|
|
||||||
absolute_path_rec(new_abs, &new_rel, i)
|
|
||||||
}
|
}
|
||||||
} else if *head == i.i("self") {
|
if rel_path.first() == Some(&i.i("self")) {
|
||||||
Some(abs_location.iter().chain(tail.iter()).cloned().collect())
|
relative = true;
|
||||||
} else {
|
rel_path = rel_path.split_first().expect("checked above").1;
|
||||||
Some(rel_path.to_vec())
|
}
|
||||||
|
match relative {
|
||||||
|
true => Some(abs_location.iter().chain(rel_path).cloned().collect()),
|
||||||
|
false => Some(rel_path.to_vec()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,5 +14,9 @@ pub trait Matcher {
|
|||||||
fn new(pattern: Rc<Vec<RuleExpr>>) -> Self;
|
fn new(pattern: Rc<Vec<RuleExpr>>) -> Self;
|
||||||
/// Apply matcher to a token sequence
|
/// Apply matcher to a token sequence
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn apply<'a>(&self, source: &'a [RuleExpr]) -> Option<State<'a>>;
|
fn apply<'a>(
|
||||||
|
&self,
|
||||||
|
source: &'a [RuleExpr],
|
||||||
|
save_loc: &impl Fn(Sym) -> bool,
|
||||||
|
) -> Option<State<'a>>;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,24 +3,27 @@ use super::shared::AnyMatcher;
|
|||||||
use super::vec_match::vec_match;
|
use super::vec_match::vec_match;
|
||||||
use crate::rule::matcher::RuleExpr;
|
use crate::rule::matcher::RuleExpr;
|
||||||
use crate::rule::state::State;
|
use crate::rule::state::State;
|
||||||
|
use crate::Sym;
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn any_match<'a>(
|
pub fn any_match<'a>(
|
||||||
matcher: &AnyMatcher,
|
matcher: &AnyMatcher,
|
||||||
seq: &'a [RuleExpr],
|
seq: &'a [RuleExpr],
|
||||||
|
save_loc: &impl Fn(Sym) -> bool,
|
||||||
) -> Option<State<'a>> {
|
) -> Option<State<'a>> {
|
||||||
match matcher {
|
match matcher {
|
||||||
AnyMatcher::Scalar(scalv) => scalv_match(scalv, seq),
|
AnyMatcher::Scalar(scalv) => scalv_match(scalv, seq, save_loc),
|
||||||
AnyMatcher::Vec { left, mid, right } => {
|
AnyMatcher::Vec { left, mid, right } => {
|
||||||
if seq.len() < left.len() + right.len() {
|
if seq.len() < left.len() + right.len() {
|
||||||
return None;
|
return None;
|
||||||
};
|
};
|
||||||
let left_split = left.len();
|
let left_split = left.len();
|
||||||
let right_split = seq.len() - right.len();
|
let right_split = seq.len() - right.len();
|
||||||
let mut state = scalv_match(left, &seq[..left_split])?;
|
Some(
|
||||||
state.extend(scalv_match(right, &seq[right_split..])?);
|
scalv_match(left, &seq[..left_split], save_loc)?
|
||||||
state.extend(vec_match(mid, &seq[left_split..right_split])?);
|
.combine(scalv_match(right, &seq[right_split..], save_loc)?)
|
||||||
Some(state)
|
.combine(vec_match(mid, &seq[left_split..right_split], save_loc)?),
|
||||||
|
)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -110,12 +110,14 @@ fn mk_scalar(pattern: &RuleExpr) -> ScalMatcher {
|
|||||||
Clause::Atom(a) => ScalMatcher::Atom(a.clone()),
|
Clause::Atom(a) => ScalMatcher::Atom(a.clone()),
|
||||||
Clause::ExternFn(_) => panic!("Cannot match on ExternFn"),
|
Clause::ExternFn(_) => panic!("Cannot match on ExternFn"),
|
||||||
Clause::Name(n) => ScalMatcher::Name(n.clone()),
|
Clause::Name(n) => ScalMatcher::Name(n.clone()),
|
||||||
Clause::Placeh(Placeholder { name, class }) => {
|
Clause::Placeh(Placeholder { name, class }) => match class {
|
||||||
debug_assert!(
|
PHClass::Vec { .. } => {
|
||||||
!matches!(class, PHClass::Vec { .. }),
|
panic!("Scalar matcher cannot be built from vector pattern")
|
||||||
"Scalar matcher cannot be built from vector pattern"
|
},
|
||||||
);
|
PHClass::Scalar | PHClass::Name => ScalMatcher::Placeh {
|
||||||
ScalMatcher::Placeh(name.clone())
|
key: name.clone(),
|
||||||
|
name_only: class == &PHClass::Name,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
Clause::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body))),
|
Clause::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body))),
|
||||||
Clause::Lambda(arg, body) =>
|
Clause::Lambda(arg, body) =>
|
||||||
@@ -128,7 +130,7 @@ mod test {
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use super::mk_any;
|
use super::mk_any;
|
||||||
use crate::ast::{Clause, PHClass, Placeholder};
|
use crate::ast::{Clause, PHClass, PType, Placeholder};
|
||||||
use crate::interner::Interner;
|
use crate::interner::Interner;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -142,7 +144,7 @@ mod test {
|
|||||||
.into_expr(),
|
.into_expr(),
|
||||||
Clause::Name(i.i(&[i.i("prelude"), i.i("do")][..])).into_expr(),
|
Clause::Name(i.i(&[i.i("prelude"), i.i("do")][..])).into_expr(),
|
||||||
Clause::S(
|
Clause::S(
|
||||||
'(',
|
PType::Par,
|
||||||
Rc::new(vec![
|
Rc::new(vec![
|
||||||
Clause::Placeh(Placeholder {
|
Clause::Placeh(Placeholder {
|
||||||
class: PHClass::Vec { nonzero: false, prio: 0 },
|
class: PHClass::Vec { nonzero: false, prio: 0 },
|
||||||
|
|||||||
@@ -3,25 +3,32 @@ use super::shared::ScalMatcher;
|
|||||||
use crate::ast::Clause;
|
use crate::ast::Clause;
|
||||||
use crate::rule::matcher::RuleExpr;
|
use crate::rule::matcher::RuleExpr;
|
||||||
use crate::rule::state::{State, StateEntry};
|
use crate::rule::state::{State, StateEntry};
|
||||||
|
use crate::Sym;
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn scal_match<'a>(
|
pub fn scal_match<'a>(
|
||||||
matcher: &ScalMatcher,
|
matcher: &ScalMatcher,
|
||||||
expr: &'a RuleExpr,
|
expr: &'a RuleExpr,
|
||||||
|
save_loc: &impl Fn(Sym) -> bool,
|
||||||
) -> Option<State<'a>> {
|
) -> Option<State<'a>> {
|
||||||
match (matcher, &expr.value) {
|
match (matcher, &expr.value) {
|
||||||
(ScalMatcher::Atom(a1), Clause::Atom(a2)) if a1.0.strict_eq(&a2.0) =>
|
(ScalMatcher::Atom(a1), Clause::Atom(a2)) if a1.0.strict_eq(&a2.0) =>
|
||||||
Some(State::new()),
|
Some(State::default()),
|
||||||
(ScalMatcher::Name(n1), Clause::Name(n2)) if n1 == n2 => Some(State::new()),
|
(ScalMatcher::Name(n1), Clause::Name(n2)) if n1 == n2 =>
|
||||||
(ScalMatcher::Placeh(key), _) =>
|
Some(match save_loc(n1.clone()) {
|
||||||
Some(State::from([(key.clone(), StateEntry::Scalar(expr))])),
|
true => State::from_name(n1.clone(), expr.location.clone()),
|
||||||
|
false => State::default(),
|
||||||
|
}),
|
||||||
|
(ScalMatcher::Placeh { key, name_only: true }, Clause::Name(n)) =>
|
||||||
|
Some(State::from_ph(key.clone(), StateEntry::Name(n, &expr.location))),
|
||||||
|
(ScalMatcher::Placeh { key, name_only: false }, _) =>
|
||||||
|
Some(State::from_ph(key.clone(), StateEntry::Scalar(expr))),
|
||||||
(ScalMatcher::S(c1, b_mat), Clause::S(c2, body)) if c1 == c2 =>
|
(ScalMatcher::S(c1, b_mat), Clause::S(c2, body)) if c1 == c2 =>
|
||||||
any_match(b_mat, &body[..]),
|
any_match(b_mat, &body[..], save_loc),
|
||||||
(ScalMatcher::Lambda(arg_mat, b_mat), Clause::Lambda(arg, body)) => {
|
(ScalMatcher::Lambda(arg_mat, b_mat), Clause::Lambda(arg, body)) => Some(
|
||||||
let mut state = any_match(arg_mat, arg)?;
|
any_match(arg_mat, arg, save_loc)?
|
||||||
state.extend(any_match(b_mat, body)?);
|
.combine(any_match(b_mat, body, save_loc)?),
|
||||||
Some(state)
|
),
|
||||||
},
|
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -30,13 +37,14 @@ pub fn scal_match<'a>(
|
|||||||
pub fn scalv_match<'a>(
|
pub fn scalv_match<'a>(
|
||||||
matchers: &[ScalMatcher],
|
matchers: &[ScalMatcher],
|
||||||
seq: &'a [RuleExpr],
|
seq: &'a [RuleExpr],
|
||||||
|
save_loc: &impl Fn(Sym) -> bool,
|
||||||
) -> Option<State<'a>> {
|
) -> Option<State<'a>> {
|
||||||
if seq.len() != matchers.len() {
|
if seq.len() != matchers.len() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let mut state = State::new();
|
let mut state = State::default();
|
||||||
for (matcher, expr) in matchers.iter().zip(seq.iter()) {
|
for (matcher, expr) in matchers.iter().zip(seq.iter()) {
|
||||||
state.extend(scal_match(matcher, expr)?);
|
state = state.combine(scal_match(matcher, expr, save_loc)?);
|
||||||
}
|
}
|
||||||
Some(state)
|
Some(state)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ use itertools::Itertools;
|
|||||||
|
|
||||||
use super::any_match::any_match;
|
use super::any_match::any_match;
|
||||||
use super::build::mk_any;
|
use super::build::mk_any;
|
||||||
|
use crate::ast::PType;
|
||||||
use crate::foreign::Atom;
|
use crate::foreign::Atom;
|
||||||
use crate::interner::Tok;
|
use crate::interner::Tok;
|
||||||
use crate::rule::matcher::{Matcher, RuleExpr};
|
use crate::rule::matcher::{Matcher, RuleExpr};
|
||||||
@@ -15,9 +16,9 @@ use crate::{Sym, VName};
|
|||||||
pub enum ScalMatcher {
|
pub enum ScalMatcher {
|
||||||
Atom(Atom),
|
Atom(Atom),
|
||||||
Name(Sym),
|
Name(Sym),
|
||||||
S(char, Box<AnyMatcher>),
|
S(PType, Box<AnyMatcher>),
|
||||||
Lambda(Box<AnyMatcher>, Box<AnyMatcher>),
|
Lambda(Box<AnyMatcher>, Box<AnyMatcher>),
|
||||||
Placeh(Tok<String>),
|
Placeh { key: Tok<String>, name_only: bool },
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum VecMatcher {
|
pub enum VecMatcher {
|
||||||
@@ -58,8 +59,12 @@ pub enum AnyMatcher {
|
|||||||
impl Matcher for AnyMatcher {
|
impl Matcher for AnyMatcher {
|
||||||
fn new(pattern: Rc<Vec<RuleExpr>>) -> Self { mk_any(&pattern) }
|
fn new(pattern: Rc<Vec<RuleExpr>>) -> Self { mk_any(&pattern) }
|
||||||
|
|
||||||
fn apply<'a>(&self, source: &'a [RuleExpr]) -> Option<State<'a>> {
|
fn apply<'a>(
|
||||||
any_match(self, source)
|
&self,
|
||||||
|
source: &'a [RuleExpr],
|
||||||
|
save_loc: &impl Fn(Sym) -> bool,
|
||||||
|
) -> Option<State<'a>> {
|
||||||
|
any_match(self, source, save_loc)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -69,20 +74,13 @@ impl Display for ScalMatcher {
|
|||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::Atom(a) => write!(f, "{a:?}"),
|
Self::Atom(a) => write!(f, "{a:?}"),
|
||||||
Self::Placeh(n) => write!(f, "${n}"),
|
Self::Placeh { key, name_only } => match name_only {
|
||||||
|
false => write!(f, "${key}"),
|
||||||
|
true => write!(f, "$_{key}"),
|
||||||
|
},
|
||||||
Self::Name(n) => write!(f, "{}", n.extern_vec().join("::")),
|
Self::Name(n) => write!(f, "{}", n.extern_vec().join("::")),
|
||||||
Self::S(c, body) => {
|
Self::S(t, body) => write!(f, "{}{body}{}", t.l(), t.r()),
|
||||||
let pair = match c {
|
Self::Lambda(arg, body) => write!(f, "\\{arg}.{body}"),
|
||||||
'(' => ')',
|
|
||||||
'[' => ']',
|
|
||||||
'{' => '}',
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
write!(f, "{c}{body}{pair}")
|
|
||||||
},
|
|
||||||
Self::Lambda(arg, body) => {
|
|
||||||
write!(f, "\\{arg}.{body}")
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -136,8 +134,12 @@ pub struct VectreeMatcher(AnyMatcher);
|
|||||||
impl Matcher for VectreeMatcher {
|
impl Matcher for VectreeMatcher {
|
||||||
fn new(pattern: Rc<Vec<RuleExpr>>) -> Self { Self(AnyMatcher::new(pattern)) }
|
fn new(pattern: Rc<Vec<RuleExpr>>) -> Self { Self(AnyMatcher::new(pattern)) }
|
||||||
|
|
||||||
fn apply<'a>(&self, source: &'a [RuleExpr]) -> Option<State<'a>> {
|
fn apply<'a>(
|
||||||
self.0.apply(source)
|
&self,
|
||||||
|
source: &'a [RuleExpr],
|
||||||
|
save_loc: &impl Fn(Sym) -> bool,
|
||||||
|
) -> Option<State<'a>> {
|
||||||
|
self.0.apply(source, save_loc)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Display for VectreeMatcher {
|
impl Display for VectreeMatcher {
|
||||||
|
|||||||
@@ -6,19 +6,20 @@ use super::scal_match::scalv_match;
|
|||||||
use super::shared::VecMatcher;
|
use super::shared::VecMatcher;
|
||||||
use crate::rule::matcher::RuleExpr;
|
use crate::rule::matcher::RuleExpr;
|
||||||
use crate::rule::state::{State, StateEntry};
|
use crate::rule::state::{State, StateEntry};
|
||||||
use crate::utils::unwrap_or;
|
use crate::Sym;
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn vec_match<'a>(
|
pub fn vec_match<'a>(
|
||||||
matcher: &VecMatcher,
|
matcher: &VecMatcher,
|
||||||
seq: &'a [RuleExpr],
|
seq: &'a [RuleExpr],
|
||||||
|
save_loc: &impl Fn(Sym) -> bool,
|
||||||
) -> Option<State<'a>> {
|
) -> Option<State<'a>> {
|
||||||
match matcher {
|
match matcher {
|
||||||
VecMatcher::Placeh { key, nonzero } => {
|
VecMatcher::Placeh { key, nonzero } => {
|
||||||
if *nonzero && seq.is_empty() {
|
if *nonzero && seq.is_empty() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
return Some(State::from([(key.clone(), StateEntry::Vec(seq))]));
|
return Some(State::from_ph(key.clone(), StateEntry::Vec(seq)));
|
||||||
},
|
},
|
||||||
VecMatcher::Scan { left, sep, right, direction } => {
|
VecMatcher::Scan { left, sep, right, direction } => {
|
||||||
if seq.len() < sep.len() {
|
if seq.len() < sep.len() {
|
||||||
@@ -26,10 +27,16 @@ pub fn vec_match<'a>(
|
|||||||
}
|
}
|
||||||
for lpos in direction.walk(0..=seq.len() - sep.len()) {
|
for lpos in direction.walk(0..=seq.len() - sep.len()) {
|
||||||
let rpos = lpos + sep.len();
|
let rpos = lpos + sep.len();
|
||||||
let mut state = unwrap_or!(vec_match(left, &seq[..lpos]); continue);
|
let state = vec_match(left, &seq[..lpos], save_loc)
|
||||||
state.extend(unwrap_or!(scalv_match(sep, &seq[lpos..rpos]); continue));
|
.and_then(|s| {
|
||||||
state.extend(unwrap_or!(vec_match(right, &seq[rpos..]); continue));
|
Some(s.combine(scalv_match(sep, &seq[lpos..rpos], save_loc)?))
|
||||||
return Some(state);
|
})
|
||||||
|
.and_then(|s| {
|
||||||
|
Some(s.combine(vec_match(right, &seq[rpos..], save_loc)?))
|
||||||
|
});
|
||||||
|
if let Some(s) = state {
|
||||||
|
return Some(s);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
},
|
},
|
||||||
@@ -42,14 +49,16 @@ pub fn vec_match<'a>(
|
|||||||
let lposv = seq[..seq.len() - right_sep.len()]
|
let lposv = seq[..seq.len() - right_sep.len()]
|
||||||
.windows(left_sep.len())
|
.windows(left_sep.len())
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.filter_map(|(i, window)| scalv_match(left_sep, window).map(|s| (i, s)))
|
.filter_map(|(i, window)| {
|
||||||
|
scalv_match(left_sep, window, save_loc).map(|s| (i, s))
|
||||||
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
// Valid locations for the right separator
|
// Valid locations for the right separator
|
||||||
let rposv = seq[left_sep.len()..]
|
let rposv = seq[left_sep.len()..]
|
||||||
.windows(right_sep.len())
|
.windows(right_sep.len())
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.filter_map(|(i, window)| {
|
.filter_map(|(i, window)| {
|
||||||
scalv_match(right_sep, window).map(|s| (i, s))
|
scalv_match(right_sep, window, save_loc).map(|s| (i, s))
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
// Valid combinations of locations for the separators
|
// Valid combinations of locations for the separators
|
||||||
@@ -57,9 +66,8 @@ pub fn vec_match<'a>(
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.cartesian_product(rposv)
|
.cartesian_product(rposv)
|
||||||
.filter(|((lpos, _), (rpos, _))| lpos + left_sep.len() <= *rpos)
|
.filter(|((lpos, _), (rpos, _))| lpos + left_sep.len() <= *rpos)
|
||||||
.map(|((lpos, mut lstate), (rpos, rstate))| {
|
.map(|((lpos, lstate), (rpos, rstate))| {
|
||||||
lstate.extend(rstate);
|
(lpos, rpos, lstate.combine(rstate))
|
||||||
(lpos, rpos, lstate)
|
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
// In descending order of size
|
// In descending order of size
|
||||||
@@ -68,25 +76,29 @@ pub fn vec_match<'a>(
|
|||||||
for (_gap_size, cluster) in eql_clusters.into_iter() {
|
for (_gap_size, cluster) in eql_clusters.into_iter() {
|
||||||
let best_candidate = cluster
|
let best_candidate = cluster
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(lpos, rpos, mut state)| {
|
.filter_map(|(lpos, rpos, state)| {
|
||||||
state.extend(vec_match(left, &seq[..lpos])?);
|
Some(
|
||||||
state.extend(vec_match(mid, &seq[lpos + left_sep.len()..rpos])?);
|
state
|
||||||
state.extend(vec_match(right, &seq[rpos + right_sep.len()..])?);
|
.combine(vec_match(left, &seq[..lpos], save_loc)?)
|
||||||
Some(state)
|
.combine(vec_match(
|
||||||
|
mid,
|
||||||
|
&seq[lpos + left_sep.len()..rpos],
|
||||||
|
save_loc,
|
||||||
|
)?)
|
||||||
|
.combine(vec_match(
|
||||||
|
right,
|
||||||
|
&seq[rpos + right_sep.len()..],
|
||||||
|
save_loc,
|
||||||
|
)?),
|
||||||
|
)
|
||||||
})
|
})
|
||||||
.max_by(|a, b| {
|
.max_by(|a, b| {
|
||||||
for key in key_order {
|
for key in key_order {
|
||||||
let aslc = if let Some(StateEntry::Vec(s)) = a.get(key) {
|
let alen =
|
||||||
s
|
a.ph_len(key).expect("key_order references scalar or missing");
|
||||||
} else {
|
let blen =
|
||||||
panic!("key_order references scalar or missing")
|
b.ph_len(key).expect("key_order references scalar or missing");
|
||||||
};
|
match alen.cmp(&blen) {
|
||||||
let bslc = if let Some(StateEntry::Vec(s)) = b.get(key) {
|
|
||||||
s
|
|
||||||
} else {
|
|
||||||
panic!("key_order references scalar or missing")
|
|
||||||
};
|
|
||||||
match aslc.len().cmp(&bslc.len()) {
|
|
||||||
Ordering::Equal => (),
|
Ordering::Equal => (),
|
||||||
any => return any,
|
any => return any,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -43,6 +43,7 @@ fn pad(mut rule: Rule<Sym>, i: &Interner) -> Rule<Sym> {
|
|||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||||
enum PHType {
|
enum PHType {
|
||||||
Scalar,
|
Scalar,
|
||||||
|
Name,
|
||||||
Vec { nonzero: bool },
|
Vec { nonzero: bool },
|
||||||
}
|
}
|
||||||
impl From<PHClass> for PHType {
|
impl From<PHClass> for PHType {
|
||||||
@@ -50,6 +51,7 @@ impl From<PHClass> for PHType {
|
|||||||
match value {
|
match value {
|
||||||
PHClass::Scalar => Self::Scalar,
|
PHClass::Scalar => Self::Scalar,
|
||||||
PHClass::Vec { nonzero, .. } => Self::Vec { nonzero },
|
PHClass::Vec { nonzero, .. } => Self::Vec { nonzero },
|
||||||
|
PHClass::Name => Self::Name,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,13 +18,20 @@ use crate::Sym;
|
|||||||
pub struct CachedRule<M: Matcher> {
|
pub struct CachedRule<M: Matcher> {
|
||||||
matcher: M,
|
matcher: M,
|
||||||
pattern: Vec<RuleExpr>,
|
pattern: Vec<RuleExpr>,
|
||||||
|
pat_glossary: HashSet<Sym>,
|
||||||
template: Vec<RuleExpr>,
|
template: Vec<RuleExpr>,
|
||||||
|
save_location: HashSet<Sym>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<M: Display + Matcher> Display for CachedRule<M> {
|
impl<M: Display + Matcher> Display for CachedRule<M> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
let patterns = self.pattern.iter().join(" ");
|
let patterns = self.pattern.iter().join(" ");
|
||||||
write!(f, "{patterns} is matched by {}", self.matcher)
|
write!(
|
||||||
|
f,
|
||||||
|
"{patterns} is matched by {} and generates {}",
|
||||||
|
self.matcher,
|
||||||
|
self.template.iter().map(|e| e.to_string()).join(" ")
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -36,7 +43,7 @@ impl<M: Display + Matcher> Display for CachedRule<M> {
|
|||||||
///
|
///
|
||||||
/// If you don't know what to put in the generic parameter, use [Repo]
|
/// If you don't know what to put in the generic parameter, use [Repo]
|
||||||
pub struct Repository<M: Matcher> {
|
pub struct Repository<M: Matcher> {
|
||||||
cache: Vec<(CachedRule<M>, HashSet<Sym>, NotNan<f64>)>,
|
cache: Vec<(CachedRule<M>, NotNan<f64>)>,
|
||||||
}
|
}
|
||||||
impl<M: Matcher> Repository<M> {
|
impl<M: Matcher> Repository<M> {
|
||||||
/// Build a new repository to hold the given set of rules
|
/// Build a new repository to hold the given set of rules
|
||||||
@@ -48,19 +55,27 @@ impl<M: Matcher> Repository<M> {
|
|||||||
let cache = rules
|
let cache = rules
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|r| {
|
.map(|r| {
|
||||||
let prio = r.prio;
|
let Rule { pattern, prio, template } =
|
||||||
let rule = prepare_rule(r.clone(), i).map_err(|e| (r, e))?;
|
prepare_rule(r.clone(), i).map_err(|e| (r, e))?;
|
||||||
let mut glossary = HashSet::new();
|
let mut pat_glossary = HashSet::new();
|
||||||
for e in rule.pattern.iter() {
|
pat_glossary.extend(
|
||||||
glossary.extend(e.value.collect_names().into_iter());
|
pattern.iter().flat_map(|e| e.value.collect_names().into_iter()),
|
||||||
}
|
);
|
||||||
let matcher = M::new(Rc::new(rule.pattern.clone()));
|
let mut tpl_glossary = HashSet::new();
|
||||||
|
tpl_glossary.extend(
|
||||||
|
template.iter().flat_map(|e| e.value.collect_names().into_iter()),
|
||||||
|
);
|
||||||
|
let save_location =
|
||||||
|
pat_glossary.intersection(&tpl_glossary).cloned().collect();
|
||||||
|
let matcher = M::new(Rc::new(pattern.clone()));
|
||||||
let prep = CachedRule {
|
let prep = CachedRule {
|
||||||
matcher,
|
matcher,
|
||||||
pattern: rule.pattern,
|
pattern,
|
||||||
template: rule.template,
|
template,
|
||||||
|
pat_glossary,
|
||||||
|
save_location,
|
||||||
};
|
};
|
||||||
Ok((prep, glossary, prio))
|
Ok((prep, prio))
|
||||||
})
|
})
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
Ok(Self { cache })
|
Ok(Self { cache })
|
||||||
@@ -70,12 +85,13 @@ impl<M: Matcher> Repository<M> {
|
|||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn step(&self, code: &RuleExpr) -> Option<RuleExpr> {
|
pub fn step(&self, code: &RuleExpr) -> Option<RuleExpr> {
|
||||||
let glossary = code.value.collect_names();
|
let glossary = code.value.collect_names();
|
||||||
for (rule, deps, _) in self.cache.iter() {
|
for (rule, _) in self.cache.iter() {
|
||||||
if !deps.is_subset(&glossary) {
|
if !rule.pat_glossary.is_subset(&glossary) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let product = update_first_seq::expr(code, &mut |exprv| {
|
let product = update_first_seq::expr(code, &mut |exprv| {
|
||||||
let state = rule.matcher.apply(exprv.as_slice())?;
|
let save_loc = |n| rule.save_location.contains(&n);
|
||||||
|
let state = rule.matcher.apply(exprv.as_slice(), &save_loc)?;
|
||||||
let result = apply_exprv(&rule.template, &state);
|
let result = apply_exprv(&rule.template, &state);
|
||||||
Some(Rc::new(result))
|
Some(Rc::new(result))
|
||||||
});
|
});
|
||||||
@@ -142,9 +158,10 @@ impl<M: Debug + Matcher> Debug for Repository<M> {
|
|||||||
impl<M: Display + Matcher> Display for Repository<M> {
|
impl<M: Display + Matcher> Display for Repository<M> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
writeln!(f, "Repository[")?;
|
writeln!(f, "Repository[")?;
|
||||||
for (rule, deps, p) in self.cache.iter() {
|
for (rule, p) in self.cache.iter() {
|
||||||
let prio = print_nat16(*p);
|
let prio = print_nat16(*p);
|
||||||
let deps = deps.iter().map(|t| t.extern_vec().join("::")).join(", ");
|
let deps =
|
||||||
|
rule.pat_glossary.iter().map(|t| t.extern_vec().join("::")).join(", ");
|
||||||
writeln!(f, " priority: {prio}\tdependencies: [{deps}]")?;
|
writeln!(f, " priority: {prio}\tdependencies: [{deps}]")?;
|
||||||
writeln!(f, " {rule}")?;
|
writeln!(f, " {rule}")?;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -180,6 +180,7 @@ impl ProjectError for ArityMismatch {
|
|||||||
"This instance represents ".to_string()
|
"This instance represents ".to_string()
|
||||||
+ match class {
|
+ match class {
|
||||||
ast::PHClass::Scalar => "one clause",
|
ast::PHClass::Scalar => "one clause",
|
||||||
|
ast::PHClass::Name => "one name",
|
||||||
ast::PHClass::Vec { nonzero: true, .. } => "one or more clauses",
|
ast::PHClass::Vec { nonzero: true, .. } => "one or more clauses",
|
||||||
ast::PHClass::Vec { nonzero: false, .. } =>
|
ast::PHClass::Vec { nonzero: false, .. } =>
|
||||||
"any number of clauses",
|
"any number of clauses",
|
||||||
|
|||||||
@@ -1,18 +1,72 @@
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
|
use itertools::{EitherOrBoth, Itertools};
|
||||||
|
|
||||||
use super::matcher::RuleExpr;
|
use super::matcher::RuleExpr;
|
||||||
use crate::ast::{Clause, Expr, PHClass, Placeholder};
|
use crate::ast::{Clause, Expr, PHClass, Placeholder};
|
||||||
use crate::interner::Tok;
|
use crate::interner::Tok;
|
||||||
use crate::utils::unwrap_or;
|
use crate::utils::unwrap_or;
|
||||||
|
use crate::{Location, Sym};
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug)]
|
#[derive(Clone, Copy, Debug)]
|
||||||
pub enum StateEntry<'a> {
|
pub enum StateEntry<'a> {
|
||||||
Vec(&'a [RuleExpr]),
|
Vec(&'a [RuleExpr]),
|
||||||
Scalar(&'a RuleExpr),
|
Scalar(&'a RuleExpr),
|
||||||
|
Name(&'a Sym, &'a Location),
|
||||||
|
}
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct State<'a> {
|
||||||
|
placeholders: HashMap<Tok<String>, StateEntry<'a>>,
|
||||||
|
name_locations: HashMap<Sym, Vec<Location>>,
|
||||||
|
}
|
||||||
|
impl<'a> State<'a> {
|
||||||
|
pub fn from_ph(key: Tok<String>, entry: StateEntry<'a>) -> Self {
|
||||||
|
Self {
|
||||||
|
placeholders: HashMap::from([(key, entry)]),
|
||||||
|
name_locations: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn combine(self, s: Self) -> Self {
|
||||||
|
Self {
|
||||||
|
placeholders: self
|
||||||
|
.placeholders
|
||||||
|
.into_iter()
|
||||||
|
.chain(s.placeholders)
|
||||||
|
.collect(),
|
||||||
|
name_locations: (self.name_locations.into_iter())
|
||||||
|
.sorted_unstable_by_key(|(k, _)| k.id())
|
||||||
|
.merge_join_by(
|
||||||
|
(s.name_locations.into_iter())
|
||||||
|
.sorted_unstable_by_key(|(k, _)| k.id()),
|
||||||
|
|(k, _), (k2, _)| k.id().cmp(&k2.id()),
|
||||||
|
)
|
||||||
|
.map(|ent| match ent {
|
||||||
|
EitherOrBoth::Left(i) | EitherOrBoth::Right(i) => i,
|
||||||
|
EitherOrBoth::Both((k, l), (_, r)) =>
|
||||||
|
(k, l.into_iter().chain(r).collect()),
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn ph_len(&self, key: &Tok<String>) -> Option<usize> {
|
||||||
|
match self.placeholders.get(key)? {
|
||||||
|
StateEntry::Vec(slc) => Some(slc.len()),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn from_name(name: Sym, location: Location) -> Self {
|
||||||
|
Self {
|
||||||
|
name_locations: HashMap::from([(name, vec![location])]),
|
||||||
|
placeholders: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Default for State<'static> {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self { name_locations: HashMap::new(), placeholders: HashMap::new() }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
pub type State<'a> = HashMap<Tok<String>, StateEntry<'a>>;
|
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn apply_exprv(template: &[RuleExpr], state: &State) -> Vec<RuleExpr> {
|
pub fn apply_exprv(template: &[RuleExpr], state: &State) -> Vec<RuleExpr> {
|
||||||
@@ -35,12 +89,15 @@ pub fn apply_expr(template: &RuleExpr, state: &State) -> Vec<RuleExpr> {
|
|||||||
value: Clause::S(*c, Rc::new(apply_exprv(body.as_slice(), state))),
|
value: Clause::S(*c, Rc::new(apply_exprv(body.as_slice(), state))),
|
||||||
}],
|
}],
|
||||||
Clause::Placeh(Placeholder { name, class }) => {
|
Clause::Placeh(Placeholder { name, class }) => {
|
||||||
let value = *unwrap_or!(state.get(name);
|
let value = *unwrap_or!(state.placeholders.get(name);
|
||||||
panic!("Placeholder does not have a value in state")
|
panic!("Placeholder does not have a value in state")
|
||||||
);
|
);
|
||||||
match (class, value) {
|
match (class, value) {
|
||||||
(PHClass::Scalar, StateEntry::Scalar(item)) => vec![item.clone()],
|
(PHClass::Scalar, StateEntry::Scalar(item)) => vec![item.clone()],
|
||||||
(PHClass::Vec { .. }, StateEntry::Vec(chunk)) => chunk.to_vec(),
|
(PHClass::Vec { .. }, StateEntry::Vec(chunk)) => chunk.to_vec(),
|
||||||
|
(PHClass::Name, StateEntry::Name(n, l)) => {
|
||||||
|
vec![RuleExpr { value: Clause::Name(n.clone()), location: l.clone() }]
|
||||||
|
},
|
||||||
_ => panic!("Type mismatch between template and state"),
|
_ => panic!("Type mismatch between template and state"),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -55,6 +55,7 @@ impl InertAtomic for Yield {
|
|||||||
|
|
||||||
/// Error indicating a yield command when all event producers and timers had
|
/// Error indicating a yield command when all event producers and timers had
|
||||||
/// exited
|
/// exited
|
||||||
|
#[derive(Clone)]
|
||||||
pub struct InfiniteBlock;
|
pub struct InfiniteBlock;
|
||||||
impl ExternError for InfiniteBlock {}
|
impl ExternError for InfiniteBlock {}
|
||||||
impl Display for InfiniteBlock {
|
impl Display for InfiniteBlock {
|
||||||
@@ -187,8 +188,8 @@ impl<'a> IntoSystem<'a> for AsynchSystem<'a> {
|
|||||||
});
|
});
|
||||||
System {
|
System {
|
||||||
name: vec!["system".to_string(), "asynch".to_string()],
|
name: vec!["system".to_string(), "asynch".to_string()],
|
||||||
lexer_plugin: None,
|
lexer_plugins: vec![],
|
||||||
line_parser: None,
|
line_parsers: vec![],
|
||||||
constants: ConstTree::namespace(
|
constants: ConstTree::namespace(
|
||||||
[i.i("system"), i.i("async")],
|
[i.i("system"), i.i("async")],
|
||||||
ConstTree::tree([
|
ConstTree::tree([
|
||||||
|
|||||||
@@ -183,8 +183,8 @@ impl IntoSystem<'static> for DirectFS {
|
|||||||
name: ["system", "directfs"].into_iter().map_into().collect(),
|
name: ["system", "directfs"].into_iter().map_into().collect(),
|
||||||
code: HashMap::new(),
|
code: HashMap::new(),
|
||||||
prelude: Vec::new(),
|
prelude: Vec::new(),
|
||||||
lexer_plugin: None,
|
lexer_plugins: vec![],
|
||||||
line_parser: None,
|
line_parsers: vec![],
|
||||||
constants: ConstTree::namespace(
|
constants: ConstTree::namespace(
|
||||||
[i.i("system"), i.i("fs")],
|
[i.i("system"), i.i("fs")],
|
||||||
ConstTree::tree([
|
ConstTree::tree([
|
||||||
|
|||||||
@@ -113,8 +113,8 @@ impl<'a, ST: IntoIterator<Item = (&'a str, Stream)>> IntoSystem<'static>
|
|||||||
name: None,
|
name: None,
|
||||||
}]),
|
}]),
|
||||||
}],
|
}],
|
||||||
lexer_plugin: None,
|
lexer_plugins: vec![],
|
||||||
line_parser: None,
|
line_parsers: vec![],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,3 +5,5 @@ pub mod directfs;
|
|||||||
pub mod io;
|
pub mod io;
|
||||||
pub mod scheduler;
|
pub mod scheduler;
|
||||||
pub mod stl;
|
pub mod stl;
|
||||||
|
pub mod parse_custom_line;
|
||||||
|
|
||||||
|
|||||||
40
src/systems/parse_custom_line.rs
Normal file
40
src/systems/parse_custom_line.rs
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
//! A helper for defining custom lines. See [custom_line]
|
||||||
|
use crate::error::{ProjectError, ProjectResult};
|
||||||
|
use crate::parse::errors::{Expected, ExpectedName};
|
||||||
|
use crate::parse::{Entry, Lexeme, Stream};
|
||||||
|
use crate::{Location, Tok};
|
||||||
|
|
||||||
|
/// An exported line with a name for which the line parser denies exports
|
||||||
|
pub struct Unexportable(Entry);
|
||||||
|
impl ProjectError for Unexportable {
|
||||||
|
fn description(&self) -> &str { "this line type cannot be exported" }
|
||||||
|
fn message(&self) -> String { format!("{} cannot be exported", &self.0) }
|
||||||
|
fn one_position(&self) -> Location { self.0.location() }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a line identified by the specified leading keyword. Although not
|
||||||
|
/// required, plugins are encouraged to prefix their lines with a globally
|
||||||
|
/// unique keyword which makes or breaks their parsing, to avoid accidental
|
||||||
|
/// failure to recognize
|
||||||
|
pub fn custom_line(
|
||||||
|
tail: Stream<'_>,
|
||||||
|
keyword: Tok<String>,
|
||||||
|
exportable: bool,
|
||||||
|
) -> Option<ProjectResult<(bool, Stream<'_>, Location)>> {
|
||||||
|
let line_loc = tail.location();
|
||||||
|
let (fst, tail) = tail.pop().ok()?;
|
||||||
|
let fst_name = ExpectedName::expect(fst).ok()?;
|
||||||
|
let (exported, n_ent, tail) = if fst_name == keyword {
|
||||||
|
(false, fst, tail.trim())
|
||||||
|
} else if fst_name.as_str() == "export" {
|
||||||
|
let (snd, tail) = tail.pop().ok()?;
|
||||||
|
Expected::expect(Lexeme::Name(keyword), snd).ok()?;
|
||||||
|
(true, snd, tail.trim())
|
||||||
|
} else {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
Some(match exported && !exportable {
|
||||||
|
true => Err(Unexportable(n_ent.clone()).rc()),
|
||||||
|
false => Ok((exported, tail, line_loc)),
|
||||||
|
})
|
||||||
|
}
|
||||||
@@ -332,8 +332,8 @@ impl IntoSystem<'static> for SeqScheduler {
|
|||||||
prelude: Vec::new(),
|
prelude: Vec::new(),
|
||||||
code: HashMap::new(),
|
code: HashMap::new(),
|
||||||
handlers,
|
handlers,
|
||||||
lexer_plugin: None,
|
lexer_plugins: vec![],
|
||||||
line_parser: None,
|
line_parsers: vec![],
|
||||||
constants: ConstTree::namespace(
|
constants: ConstTree::namespace(
|
||||||
[i.i("system"), i.i("scheduler")],
|
[i.i("system"), i.i("scheduler")],
|
||||||
ConstTree::tree([
|
ConstTree::tree([
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ use std::fmt::Display;
|
|||||||
use crate::foreign::ExternError;
|
use crate::foreign::ExternError;
|
||||||
|
|
||||||
/// Various errors produced by arithmetic operations
|
/// Various errors produced by arithmetic operations
|
||||||
|
#[derive(Clone)]
|
||||||
pub enum ArithmeticError {
|
pub enum ArithmeticError {
|
||||||
/// Integer overflow
|
/// Integer overflow
|
||||||
Overflow,
|
Overflow,
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
import std::match
|
||||||
|
|
||||||
export ::(!=, ==)
|
export ::(!=, ==)
|
||||||
|
|
||||||
export const not := \bool. if bool then false else true
|
export const not := \bool. if bool then false else true
|
||||||
@@ -8,3 +10,37 @@ export macro ...$a or ...$b =0x4p36=> (ifthenelse (...$a) true (...$b))
|
|||||||
export macro if ...$cond then ...$true else ...$false:1 =0x1p84=> (
|
export macro if ...$cond then ...$true else ...$false:1 =0x1p84=> (
|
||||||
ifthenelse (...$cond) (...$true) (...$false)
|
ifthenelse (...$cond) (...$true) (...$false)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
macro match::request (== ...$other)
|
||||||
|
=0x1p230=> match::response (
|
||||||
|
if match::value == (...$other)
|
||||||
|
then match::pass
|
||||||
|
else match::fail
|
||||||
|
)
|
||||||
|
( match::no_binds )
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
macro match::request (!= ...$other)
|
||||||
|
=0x1p230=> match::response (
|
||||||
|
if match::value != (...$other)
|
||||||
|
then match::pass
|
||||||
|
else match::fail
|
||||||
|
)
|
||||||
|
( match::no_binds )
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
macro match::request (true)
|
||||||
|
=0x1p230=> match::response
|
||||||
|
(if match::value then match::pass else match::fail)
|
||||||
|
( match::no_binds )
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
macro match::request (false)
|
||||||
|
=0x1p230=> match::response
|
||||||
|
(if match::value then match::fail else match::pass)
|
||||||
|
( match::no_binds )
|
||||||
|
)
|
||||||
|
|||||||
112
src/systems/stl/cross_pipeline.rs
Normal file
112
src/systems/stl/cross_pipeline.rs
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
use std::collections::VecDeque;
|
||||||
|
use std::fmt::Debug;
|
||||||
|
use std::iter;
|
||||||
|
use std::ops::Deref;
|
||||||
|
use std::rc::Rc;
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
|
use crate::ast::{self, PType};
|
||||||
|
use crate::ddispatch::Responder;
|
||||||
|
use crate::foreign::{
|
||||||
|
xfn_1ary, Atomic, AtomicReturn, ExFn, StrictEq, ToClause, XfnResult,
|
||||||
|
};
|
||||||
|
use crate::interpreted::{self, TryFromExprInst};
|
||||||
|
use crate::utils::pure_seq::pushed;
|
||||||
|
use crate::{interpreter, VName};
|
||||||
|
|
||||||
|
pub trait DeferredRuntimeCallback<T, U, R: ToClause>:
|
||||||
|
Fn(Vec<(T, U)>) -> XfnResult<R> + Clone + Send + 'static
|
||||||
|
{
|
||||||
|
}
|
||||||
|
impl<
|
||||||
|
T,
|
||||||
|
U,
|
||||||
|
R: ToClause,
|
||||||
|
F: Fn(Vec<(T, U)>) -> XfnResult<R> + Clone + Send + 'static,
|
||||||
|
> DeferredRuntimeCallback<T, U, R> for F
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
fn table_receiver_rec<
|
||||||
|
T: Clone + Send + 'static,
|
||||||
|
U: TryFromExprInst + Clone + Send + 'static,
|
||||||
|
R: ToClause + 'static,
|
||||||
|
>(
|
||||||
|
results: Vec<(T, U)>,
|
||||||
|
mut remaining_keys: VecDeque<T>,
|
||||||
|
callback: impl DeferredRuntimeCallback<T, U, R>,
|
||||||
|
) -> XfnResult<interpreted::Clause> {
|
||||||
|
match remaining_keys.pop_front() {
|
||||||
|
None => callback(results).map(|v| v.to_clause()),
|
||||||
|
Some(t) => Ok(interpreted::Clause::ExternFn(ExFn(Box::new(xfn_1ary(
|
||||||
|
move |u: U| {
|
||||||
|
table_receiver_rec(pushed(results, (t, u)), remaining_keys, callback)
|
||||||
|
},
|
||||||
|
))))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct EphemeralAtom(
|
||||||
|
Arc<dyn Fn() -> XfnResult<interpreted::Clause> + Sync + Send>,
|
||||||
|
);
|
||||||
|
impl Debug for EphemeralAtom {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.write_str("EphemeralAtom")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Responder for EphemeralAtom {
|
||||||
|
fn respond(&self, _request: crate::ddispatch::Request) {}
|
||||||
|
}
|
||||||
|
impl StrictEq for EphemeralAtom {
|
||||||
|
fn strict_eq(&self, _: &dyn std::any::Any) -> bool { false }
|
||||||
|
}
|
||||||
|
impl Atomic for EphemeralAtom {
|
||||||
|
fn as_any(self: Box<Self>) -> Box<dyn std::any::Any> { self }
|
||||||
|
fn as_any_ref(&self) -> &dyn std::any::Any { self }
|
||||||
|
fn run(
|
||||||
|
self: Box<Self>,
|
||||||
|
ctx: interpreter::Context,
|
||||||
|
) -> crate::foreign::AtomicResult {
|
||||||
|
Ok(AtomicReturn { clause: (self.0)()?, gas: ctx.gas, inert: false })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn table_receiver<
|
||||||
|
T: Clone + Send + 'static,
|
||||||
|
U: TryFromExprInst + Clone + Send + 'static,
|
||||||
|
R: ToClause + 'static,
|
||||||
|
>(
|
||||||
|
keys: VecDeque<T>,
|
||||||
|
callback: impl DeferredRuntimeCallback<T, U, R>,
|
||||||
|
) -> ast::Clause<VName> {
|
||||||
|
if keys.is_empty() {
|
||||||
|
let result =
|
||||||
|
Arc::new(Mutex::new(callback(Vec::new()).map(|v| v.to_clause())));
|
||||||
|
EphemeralAtom(Arc::new(move || result.lock().unwrap().deref().clone()))
|
||||||
|
.ast_cls()
|
||||||
|
} else {
|
||||||
|
match table_receiver_rec(Vec::new(), keys, callback) {
|
||||||
|
Ok(interpreted::Clause::ExternFn(xfn)) => ast::Clause::ExternFn(xfn),
|
||||||
|
_ => unreachable!("details"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn defer_to_runtime<
|
||||||
|
T: Clone + Send + 'static,
|
||||||
|
U: TryFromExprInst + Clone + Send + 'static,
|
||||||
|
R: ToClause + 'static,
|
||||||
|
>(
|
||||||
|
pairs: impl IntoIterator<Item = (T, Vec<ast::Expr<VName>>)>,
|
||||||
|
callback: impl DeferredRuntimeCallback<T, U, R>,
|
||||||
|
) -> ast::Clause<VName> {
|
||||||
|
let (keys, ast_values) =
|
||||||
|
pairs.into_iter().unzip::<_, _, VecDeque<_>, Vec<_>>();
|
||||||
|
ast::Clause::s(
|
||||||
|
'(',
|
||||||
|
iter::once(table_receiver(keys, callback)).chain(
|
||||||
|
ast_values.into_iter().map(|v| ast::Clause::S(PType::Par, Rc::new(v))),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -1,4 +1,7 @@
|
|||||||
import super::known::*
|
import super::known::*
|
||||||
|
import super::match::*
|
||||||
|
import super::macro
|
||||||
|
import super::match::(match, =>)
|
||||||
|
|
||||||
--[ Do nothing. Especially useful as a passive cps operation ]--
|
--[ Do nothing. Especially useful as a passive cps operation ]--
|
||||||
export const identity := \x.x
|
export const identity := \x.x
|
||||||
@@ -21,6 +24,19 @@ export const return := \a. \b.a
|
|||||||
export macro ...$prefix $ ...$suffix:1 =0x1p38=> ...$prefix (...$suffix)
|
export macro ...$prefix $ ...$suffix:1 =0x1p38=> ...$prefix (...$suffix)
|
||||||
export macro ...$prefix |> $fn ..$suffix:1 =0x2p32=> $fn (...$prefix) ..$suffix
|
export macro ...$prefix |> $fn ..$suffix:1 =0x2p32=> $fn (...$prefix) ..$suffix
|
||||||
|
|
||||||
export macro ($name) => ...$body =0x2p127=> (\$name. ...$body)
|
( macro (..$argv) => ...$body
|
||||||
export macro ($name, ...$argv) => ...$body =0x2p127=> (\$name. (...$argv) => ...$body)
|
=0x2p127=> lambda_walker macro::comma_list (..$argv) (...$body)
|
||||||
export macro $name => ...$body =0x1p127=> (\$name. ...$body)
|
)
|
||||||
|
( macro $_arg => ...$body
|
||||||
|
=0x2p127=> \$_arg. ...$body)
|
||||||
|
( macro lambda_walker ( macro::list_item ($_argname) $tail ) $body
|
||||||
|
=0x2p254=> \$_argname. lambda_walker $tail $body
|
||||||
|
)
|
||||||
|
( macro lambda_walker ( macro::list_item (...$head) $tail ) $body
|
||||||
|
=0x1p254=> \arg. match arg {
|
||||||
|
...$head => lambda_walker $tail $body;
|
||||||
|
}
|
||||||
|
)
|
||||||
|
( macro lambda_walker macro::list_end $body
|
||||||
|
=0x1p254=> $body
|
||||||
|
)
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
export ::[,]
|
export ::[, _ ; . =]
|
||||||
|
|||||||
@@ -1,18 +1,26 @@
|
|||||||
import super::option
|
import super::(option, match, macro)
|
||||||
import super::(functional::*, procedural::*, loop::*, bool::*, known::*, number::*, tuple::*)
|
import super::(functional::*, procedural::*)
|
||||||
|
import super::(loop::*, bool::*, known::*, number::*, tuple::*)
|
||||||
|
|
||||||
const pair := \a. \b. \f. f a b
|
export type ty (
|
||||||
|
import super::super::(option, tuple, panic)
|
||||||
|
import super::super::(known::*, bool::*)
|
||||||
|
|
||||||
-- Constructors
|
export const cons := \hd. \tl. wrap (option::some tuple::t[hd, unwrap tl])
|
||||||
|
export const end := wrap option::none
|
||||||
|
export const pop := \list. \default. \f. (
|
||||||
|
option::handle (unwrap list)
|
||||||
|
default
|
||||||
|
\pair. tuple::apply pair
|
||||||
|
\len. if len == 2
|
||||||
|
then ( \hd. \tl. f hd (wrap tl) )
|
||||||
|
else panic "list element must be 2-ple"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
export const cons := \hd. \tl. option::some t[hd, tl]
|
export const cons := ty::cons
|
||||||
export const end := option::none
|
export const end := ty::end
|
||||||
|
export const pop := ty::pop
|
||||||
export const pop := \list. \default. \f. do{
|
|
||||||
cps tuple = list default;
|
|
||||||
cps head, tail = tuple;
|
|
||||||
f head tail
|
|
||||||
}
|
|
||||||
|
|
||||||
-- Operators
|
-- Operators
|
||||||
|
|
||||||
@@ -124,8 +132,34 @@ export const chain := \list. \cont. loop_over (list) {
|
|||||||
cps head;
|
cps head;
|
||||||
}
|
}
|
||||||
|
|
||||||
macro new[...$item, ...$rest:1] =0x2p84=> (cons (...$item) new[...$rest])
|
macro new[..$items] =0x2p84=> mk_list macro::comma_list (..$items)
|
||||||
macro new[...$end] =0x1p84=> (cons (...$end) end)
|
|
||||||
macro new[] =0x1p84=> end
|
macro mk_list ( macro::list_item $item $tail ) =0x1p254=> (cons $item mk_list $tail)
|
||||||
|
macro mk_list macro::list_end =0x1p254=> end
|
||||||
|
|
||||||
export ::(new)
|
export ::(new)
|
||||||
|
|
||||||
|
( macro match::request (cons $head $tail)
|
||||||
|
=0x1p230=> await_subpatterns
|
||||||
|
(match::request ($head))
|
||||||
|
(match::request ($tail))
|
||||||
|
)
|
||||||
|
( macro await_subpatterns
|
||||||
|
(match::response $h_expr ( $h_binds ))
|
||||||
|
(match::response $t_expr ( $t_binds ))
|
||||||
|
=0x1p230=> match::response (
|
||||||
|
pop
|
||||||
|
match::value
|
||||||
|
match::fail
|
||||||
|
\head. \tail. (
|
||||||
|
(\match::pass. (\match::value. $h_expr) head)
|
||||||
|
(match::take_binds $h_binds (
|
||||||
|
(\match::pass. (\match::value. $t_expr) tail)
|
||||||
|
(match::take_binds $t_binds (
|
||||||
|
match::give_binds match::chain_binds $h_binds $t_binds match::pass
|
||||||
|
))
|
||||||
|
))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
(match::chain_binds $h_binds $t_binds)
|
||||||
|
)
|
||||||
|
|||||||
@@ -37,6 +37,7 @@ macro parse_binds (...$item) =0x1p250=> (
|
|||||||
()
|
()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
-- while loop
|
-- while loop
|
||||||
export macro statement (
|
export macro statement (
|
||||||
while ..$condition (..$binds) {
|
while ..$condition (..$binds) {
|
||||||
@@ -64,7 +65,7 @@ macro init_binds ( ($name $value) $tail ) =0x1p250=> $value init_binds $tail
|
|||||||
macro $fn init_binds () =0x1p250=> $fn
|
macro $fn init_binds () =0x1p250=> $fn
|
||||||
|
|
||||||
-- apply_binds passes the name for initializers
|
-- apply_binds passes the name for initializers
|
||||||
macro apply_binds ( ($name $_value) $tail ) =0x1p250=> $name apply_binds $tail
|
macro apply_binds ( ($name $value) $tail ) =0x1p250=> $name apply_binds $tail
|
||||||
macro $fn apply_binds () =0x1p250=> $fn
|
macro $fn apply_binds () =0x1p250=> $fn
|
||||||
|
|
||||||
--[
|
--[
|
||||||
|
|||||||
68
src/systems/stl/macro.orc
Normal file
68
src/systems/stl/macro.orc
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
import std::number::add
|
||||||
|
import std::known::*
|
||||||
|
|
||||||
|
-- convert a comma-separated list into a linked list, with support for trailing commas
|
||||||
|
export ::comma_list
|
||||||
|
( macro comma_list ( ...$head, ...$tail:1 )
|
||||||
|
=0x2p254=> ( await_comma_list ( ...$head ) comma_list ( ...$tail ) )
|
||||||
|
)
|
||||||
|
( macro comma_list (...$only)
|
||||||
|
=0x1p254=> ( list_item (...$only) list_end )
|
||||||
|
)
|
||||||
|
( macro ( await_comma_list $head $tail )
|
||||||
|
=0x2p254=> ( list_item $head $tail )
|
||||||
|
)
|
||||||
|
( macro comma_list ()
|
||||||
|
=0x1p254=> list_end
|
||||||
|
)
|
||||||
|
( macro comma_list (...$data,)
|
||||||
|
=0x3p254=> comma_list (...$data)
|
||||||
|
)
|
||||||
|
|
||||||
|
-- convert a comma-separated list into a linked list, with support for trailing commas
|
||||||
|
export ::semi_list
|
||||||
|
( macro semi_list ( ...$head; ...$tail:1 )
|
||||||
|
=0x2p254=> ( await_semi_list ( ...$head ) semi_list ( ...$tail ) )
|
||||||
|
)
|
||||||
|
( macro semi_list (...$only)
|
||||||
|
=0x1p254=> ( list_item (...$only) list_end )
|
||||||
|
)
|
||||||
|
( macro ( await_semi_list $head $tail )
|
||||||
|
=0x2p254=> ( list_item $head $tail )
|
||||||
|
)
|
||||||
|
( macro semi_list ()
|
||||||
|
=0x1p254=> list_end
|
||||||
|
)
|
||||||
|
( macro semi_list (...$data;)
|
||||||
|
=0x3p254=> semi_list (...$data)
|
||||||
|
)
|
||||||
|
|
||||||
|
-- calculate the length of a linked list
|
||||||
|
export ::length
|
||||||
|
( macro length ( list_item $discard $tail )
|
||||||
|
=0x1p254=> await_length ( length $tail )
|
||||||
|
)
|
||||||
|
( macro await_length ( $len )
|
||||||
|
=0x1p254=> (add 1 $len)
|
||||||
|
)
|
||||||
|
macro length list_end =0x1p254=> (0)
|
||||||
|
|
||||||
|
|
||||||
|
export ::error
|
||||||
|
( macro ( ..$prefix error $details ..$suffix )
|
||||||
|
=0x2p255=> error $details
|
||||||
|
)
|
||||||
|
( macro [ ..$prefix error $details ..$suffix ]
|
||||||
|
=0x2p255=> error $details
|
||||||
|
)
|
||||||
|
( macro { ..$prefix error $details ..$suffix }
|
||||||
|
=0x2p255=> error $details
|
||||||
|
)
|
||||||
|
( macro error $details
|
||||||
|
=0x1p255=>
|
||||||
|
)
|
||||||
|
|
||||||
|
export ::leftover_error
|
||||||
|
( macro leftover_error $details
|
||||||
|
=0x1p255=> error ( "Token fails to parse" $details )
|
||||||
|
)
|
||||||
@@ -1,73 +1,96 @@
|
|||||||
import super::(bool::*, functional::*, known::*, list, option, loop::*, procedural::*)
|
import super::(bool::*, functional::*, known::*, loop::*, procedural::*)
|
||||||
import std::panic
|
import super::(panic, match, macro, option, list)
|
||||||
|
|
||||||
-- utilities for using lists as pairs
|
export type ty (
|
||||||
|
import super::super::(panic, macro, list, tuple, option)
|
||||||
|
import super::super::(bool::*, functional::*, known::*, loop::*, procedural::*)
|
||||||
|
|
||||||
const fst := \l. (
|
--[ Constructors ]--
|
||||||
list::get l 0
|
|
||||||
(panic "nonempty expected")
|
|
||||||
\x.x
|
|
||||||
)
|
|
||||||
const snd := \l. (
|
|
||||||
list::get l 1
|
|
||||||
(panic "2 elements expected")
|
|
||||||
\x.x
|
|
||||||
)
|
|
||||||
|
|
||||||
-- constructors
|
const empty := wrap list::end
|
||||||
|
const add := \m. \k. \v. wrap (
|
||||||
export const empty := list::end
|
|
||||||
export const add := \m. \k. \v. (
|
|
||||||
list::cons
|
list::cons
|
||||||
list::new[k, v]
|
tuple::t[k, v]
|
||||||
m
|
(unwrap m)
|
||||||
)
|
)
|
||||||
|
|
||||||
-- queries
|
--[ List constructor ]--
|
||||||
|
|
||||||
-- return the last occurrence of a key if exists
|
export ::new
|
||||||
export const get := \m. \key. (
|
macro new[..$items] =0x2p84=> mk_map macro::comma_list (..$items)
|
||||||
loop_over (m) {
|
|
||||||
|
macro mk_map macro::list_end =0x1p254=> empty
|
||||||
|
( macro mk_map ( macro::list_item ( ...$key = ...$value:1 ) $tail )
|
||||||
|
=0x1p254=> ( set mk_map $tail (...$key) (...$value) )
|
||||||
|
)
|
||||||
|
|
||||||
|
--[ Queries ]--
|
||||||
|
|
||||||
|
-- return the last occurrence of a key if exists
|
||||||
|
export const get := \m. \key. (
|
||||||
|
loop_over (m=unwrap m) {
|
||||||
cps record, m = list::pop m option::none;
|
cps record, m = list::pop m option::none;
|
||||||
cps if fst record == key
|
cps if tuple::pick record 0 == key
|
||||||
then return $ option::some $ snd record
|
then return $ option::some $ tuple::pick record 1
|
||||||
else identity;
|
else identity;
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
-- commands
|
--[ Commands ]--
|
||||||
|
|
||||||
-- remove one occurrence of a key
|
-- remove one occurrence of a key
|
||||||
export const del := \m. \k. (
|
export const del := \m. \k. wrap (
|
||||||
recursive r (m)
|
recursive r (m=unwrap m)
|
||||||
list::pop m list::end \head. \tail.
|
list::pop m list::end \head. \tail.
|
||||||
if fst head == k then tail
|
if tuple::pick head 0 == k then tail
|
||||||
else list::cons head $ r tail
|
else list::cons head $ r tail
|
||||||
|
)
|
||||||
|
|
||||||
|
-- replace at most one occurrence of a key
|
||||||
|
export const set := \m. \k. \v. m |> del k |> add k v
|
||||||
)
|
)
|
||||||
|
|
||||||
-- remove all occurrences of a key
|
macro new =0x1p200=> ty::new
|
||||||
export const delall := \m. \k. (
|
|
||||||
list::filter m \record. fst record != k
|
|
||||||
)
|
|
||||||
|
|
||||||
-- replace at most one occurrence of a key
|
export const empty := ty::empty
|
||||||
export const set := \m. \k. \v. (
|
export const add := ty::add
|
||||||
m
|
export const get := ty::get
|
||||||
|> del k
|
export const set := ty::set
|
||||||
|> add k v
|
export const del := ty::del
|
||||||
)
|
|
||||||
|
|
||||||
-- ensure that there's only one instance of each key in the map
|
export ::having
|
||||||
export const normalize := \m. (
|
( macro match::request (having [..$items])
|
||||||
recursive r (m, normal=empty)
|
=0x1p230=> having_pattern (
|
||||||
list::pop m normal \head. \tail.
|
pattern_walker
|
||||||
r tail $ set normal (fst head) (snd head)
|
macro::comma_list ( ..$items )
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
( macro having_pattern ( tail_result $expr ( $binds ) )
|
||||||
macro new[...$tail:2, ...$key = ...$value:1] =0x2p84=> (
|
=0x1p254=> match::response $expr ( $binds )
|
||||||
set new[...$tail] (...$key) (...$value)
|
)
|
||||||
|
( macro pattern_walker macro::list_end
|
||||||
|
=0x1p254=> tail_result match::pass ( match::no_binds )
|
||||||
|
)
|
||||||
|
( macro pattern_walker ( macro::list_item ( ...$key = ...$value:1 ) $tail )
|
||||||
|
=0x1p254=> await_pattern ( ...$key )
|
||||||
|
( match::request (...$value) )
|
||||||
|
( pattern_walker $tail )
|
||||||
|
)
|
||||||
|
( macro await_pattern $key
|
||||||
|
( match::response $expr ( $binds ) )
|
||||||
|
( tail_result $t_expr ( $t_binds ) )
|
||||||
|
=0x1p254=> tail_result (
|
||||||
|
option::handle (get match::value $key)
|
||||||
|
match::fail
|
||||||
|
\value. (\match::pass. (\match::value. $expr) value) (
|
||||||
|
match::take_binds $binds (
|
||||||
|
(\match::pass. $t_expr) (
|
||||||
|
match::take_binds $t_binds (
|
||||||
|
match::give_binds match::chain_binds $binds $t_binds match::pass
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
( match::chain_binds $binds $t_binds )
|
||||||
)
|
)
|
||||||
macro new[...$key = ...$value:1] =0x1p84=> (add empty (...$key) (...$value))
|
|
||||||
macro new[] =0x1p84=> empty
|
|
||||||
|
|
||||||
export ::(new)
|
|
||||||
|
|||||||
104
src/systems/stl/match.orc
Normal file
104
src/systems/stl/match.orc
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
import std::known::(_, ;)
|
||||||
|
import std::procedural
|
||||||
|
import std::bool
|
||||||
|
import std::macro
|
||||||
|
import std::panic
|
||||||
|
|
||||||
|
--[
|
||||||
|
The protocol:
|
||||||
|
|
||||||
|
Request contains the pattern
|
||||||
|
Response contains an expression and the list of names
|
||||||
|
]--
|
||||||
|
|
||||||
|
(
|
||||||
|
macro ..$prefix:1 match ...$argument:0 { ..$body } ..$suffix:1
|
||||||
|
=0x1p130=> ..$prefix (
|
||||||
|
(\value. match_walker macro::semi_list ( ..$body ) )
|
||||||
|
( ...$argument )
|
||||||
|
) ..$suffix
|
||||||
|
)
|
||||||
|
|
||||||
|
macro match_walker macro::list_end =0x1p254=> panic "no arms match"
|
||||||
|
( macro match_walker ( macro::list_item (...$pattern => ...$handler:1) $tail )
|
||||||
|
=0x1p254=> match_await ( request (...$pattern) ) (...$handler) ( match_walker $tail )
|
||||||
|
)
|
||||||
|
( macro match_await ( response $expr ( $binds ) ) $handler $tail
|
||||||
|
=0x1p254=> (\fail. (\pass. $expr) (take_binds $binds $handler)) $tail
|
||||||
|
)
|
||||||
|
|
||||||
|
macro request (( ..$pattern )) =0x1p254=> request ( ..$pattern )
|
||||||
|
|
||||||
|
-- bindings list
|
||||||
|
|
||||||
|
export ::(no_binds, add_bind, chain_binds, give_binds, take_binds)
|
||||||
|
|
||||||
|
macro add_bind $_new no_binds =0x1p254=> ( binds_list $_new no_binds )
|
||||||
|
( macro add_bind $_new ( binds_list ...$tail )
|
||||||
|
=0x1p254=> ( binds_list $_new ( binds_list ...$tail ) )
|
||||||
|
)
|
||||||
|
macro give_binds no_binds $cont =0x1p254=> $cont
|
||||||
|
( macro give_binds ( binds_list $_name $tail ) $cont
|
||||||
|
=0x1p254=> (give_binds $tail $cont $_name)
|
||||||
|
)
|
||||||
|
macro take_binds no_binds $cont =0x1p254=> $cont
|
||||||
|
( macro take_binds ( binds_list $_name $tail ) $cont
|
||||||
|
=0x1p254=> \$_name. take_binds $tail $cont
|
||||||
|
)
|
||||||
|
macro chain_binds no_binds $second =0x1p254=> $second
|
||||||
|
( macro chain_binds ( binds_list $_head $tail ) $second
|
||||||
|
=0x1p254=> add_bind $_head chain_binds $tail $second
|
||||||
|
)
|
||||||
|
|
||||||
|
--[ primitive pattern ( _ ) ]--
|
||||||
|
|
||||||
|
(
|
||||||
|
macro request ( _ )
|
||||||
|
=0x1p230=> response pass ( no_binds )
|
||||||
|
)
|
||||||
|
|
||||||
|
--[ primitive name pattern ]--
|
||||||
|
|
||||||
|
(
|
||||||
|
macro request ( $_name )
|
||||||
|
=0x1p226=> response ( pass value ) ( add_bind $_name no_binds )
|
||||||
|
)
|
||||||
|
|
||||||
|
--[ primitive pattern ( and ) ]--
|
||||||
|
|
||||||
|
( macro request ( ...$lhs bool::and ...$rhs )
|
||||||
|
=0x3p230=> await_and_subpatterns ( request (...$lhs ) ) ( request ( ...$rhs ) )
|
||||||
|
)
|
||||||
|
|
||||||
|
( macro await_and_subpatterns ( response $lh_expr ( $lh_binds ) ) ( response $rh_expr ( $rh_binds ) )
|
||||||
|
=0x1p254=> response (
|
||||||
|
(\pass. $lh_expr) (take_binds $lh_binds (
|
||||||
|
(\pass. $rh_expr) (take_binds $rh_binds (
|
||||||
|
give_binds chain_binds $lh_binds $rh_binds pass
|
||||||
|
))
|
||||||
|
))
|
||||||
|
)
|
||||||
|
( chain_binds $lh_binds $rh_binds )
|
||||||
|
)
|
||||||
|
|
||||||
|
--[ primitive pattern ( or ) ]--
|
||||||
|
|
||||||
|
(
|
||||||
|
macro request ( ...$lhs bool::or ...$rhs )
|
||||||
|
=0x3p230=> await_or_subpatterns
|
||||||
|
( request ( ...$lhs ) )
|
||||||
|
( request ( ...$rhs ) )
|
||||||
|
)
|
||||||
|
|
||||||
|
( -- for this to work, lh and rh must produce the same bindings
|
||||||
|
macro await_or_subpatterns ( response $lh_expr ( $lh_binds) ) ( response $rh_expr ( $rh_binds ) )
|
||||||
|
=0x1p254=> response (
|
||||||
|
(\cancel. $lh_expr) -- lh works with pass directly because its bindings are reported up
|
||||||
|
($rh_expr (take_binds $rh_binds -- rh runs if lh cancels
|
||||||
|
(give_binds $lh_binds pass) -- translate rh binds to lh binds
|
||||||
|
))
|
||||||
|
)
|
||||||
|
( $lh_binds ) -- report lh bindings
|
||||||
|
)
|
||||||
|
|
||||||
|
export ::(match, cancel, argument, request, response, =>)
|
||||||
@@ -4,10 +4,13 @@ mod arithmetic_error;
|
|||||||
mod binary;
|
mod binary;
|
||||||
mod bool;
|
mod bool;
|
||||||
mod conv;
|
mod conv;
|
||||||
|
mod cross_pipeline;
|
||||||
mod exit_status;
|
mod exit_status;
|
||||||
mod inspect;
|
mod inspect;
|
||||||
mod number;
|
mod number;
|
||||||
mod panic;
|
mod panic;
|
||||||
|
mod protocol;
|
||||||
|
mod reflect;
|
||||||
mod state;
|
mod state;
|
||||||
mod stl_system;
|
mod stl_system;
|
||||||
mod string;
|
mod string;
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use ordered_float::NotNan;
|
use ordered_float::NotNan;
|
||||||
|
|
||||||
use super::ArithmeticError;
|
use super::ArithmeticError;
|
||||||
@@ -39,7 +37,7 @@ impl Numeric {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Wrap a f64 in a Numeric
|
/// Wrap a f64 in a Numeric
|
||||||
pub fn new(value: f64) -> Result<Self, Rc<dyn ExternError>> {
|
pub fn new(value: f64) -> XfnResult<Self> {
|
||||||
if value.is_finite() {
|
if value.is_finite() {
|
||||||
NotNan::new(value)
|
NotNan::new(value)
|
||||||
.map(Self::Float)
|
.map(Self::Float)
|
||||||
@@ -50,7 +48,7 @@ impl Numeric {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl TryFromExprInst for Numeric {
|
impl TryFromExprInst for Numeric {
|
||||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> {
|
fn from_exi(exi: ExprInst) -> XfnResult<Self> {
|
||||||
(exi.request())
|
(exi.request())
|
||||||
.ok_or_else(|| AssertionError::ext(Location::Unknown, "a numeric value"))
|
.ok_or_else(|| AssertionError::ext(Location::Unknown, "a numeric value"))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,9 +1,40 @@
|
|||||||
import std::panic
|
import std::(panic, match)
|
||||||
|
|
||||||
export const some := \v. \d. \f. f v
|
export type ty (
|
||||||
export const none := \d. \f. d
|
export const some := \v. wrap \d. \f. f v
|
||||||
|
export const none := wrap \d. \f. d
|
||||||
|
|
||||||
export const map := \option. \f. option none f
|
export const handle := \t. \d. \f. (unwrap t) d f
|
||||||
export const flatten := \option. option none \opt. opt
|
)
|
||||||
export const flatmap := \option. \f. option none \opt. map opt f
|
|
||||||
export const unwrap := \option. option (panic "value expected") \x.x
|
export const some := ty::some
|
||||||
|
export const none := ty::none
|
||||||
|
export const handle := ty::handle
|
||||||
|
|
||||||
|
export const map := \option. \f. handle option none f
|
||||||
|
export const flatten := \option. handle option none \opt. opt
|
||||||
|
export const flatmap := \option. \f. handle option none \opt. map opt f
|
||||||
|
export const unwrap := \option. handle option (panic "value expected") \x.x
|
||||||
|
|
||||||
|
(
|
||||||
|
macro match::request ( none )
|
||||||
|
=0x1p230=> match::response (
|
||||||
|
handle match::value
|
||||||
|
match::pass
|
||||||
|
\_. match::fail
|
||||||
|
) ( match::no_binds )
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
macro match::request ( some ...$value )
|
||||||
|
=0x1p230=> await_some_subpattern ( match::request (...$value) )
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
macro await_some_subpattern ( match::response $expr ( $binds ) )
|
||||||
|
=0x1p254=> match::response (
|
||||||
|
handle match::value
|
||||||
|
match::fail
|
||||||
|
\match::value. $expr
|
||||||
|
) ( $binds )
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::foreign::{xfn_1ary, ExternError, XfnResult};
|
use crate::foreign::{xfn_1ary, ExternError, XfnResult};
|
||||||
use crate::interpreted::Clause;
|
use crate::interpreted::Clause;
|
||||||
@@ -7,7 +7,8 @@ use crate::{ConstTree, Interner, OrcString};
|
|||||||
|
|
||||||
/// An unrecoverable error in Orchid land. Because Orchid is lazy, this only
|
/// An unrecoverable error in Orchid land. Because Orchid is lazy, this only
|
||||||
/// invalidates expressions that reference the one that generated it.
|
/// invalidates expressions that reference the one that generated it.
|
||||||
pub struct OrchidPanic(Rc<String>);
|
#[derive(Clone)]
|
||||||
|
pub struct OrchidPanic(Arc<String>);
|
||||||
|
|
||||||
impl Display for OrchidPanic {
|
impl Display for OrchidPanic {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
@@ -20,7 +21,7 @@ impl ExternError for OrchidPanic {}
|
|||||||
/// Takes a message, returns an [ExternError] unconditionally.
|
/// Takes a message, returns an [ExternError] unconditionally.
|
||||||
pub fn orc_panic(msg: OrcString) -> XfnResult<Clause> {
|
pub fn orc_panic(msg: OrcString) -> XfnResult<Clause> {
|
||||||
// any return value would work, but Clause is the simplest
|
// any return value would work, but Clause is the simplest
|
||||||
Err(OrchidPanic(Rc::new(msg.get_string())).into_extern())
|
Err(OrchidPanic(Arc::new(msg.get_string())).into_extern())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn panic(i: &Interner) -> ConstTree {
|
pub fn panic(i: &Interner) -> ConstTree {
|
||||||
|
|||||||
@@ -5,11 +5,13 @@ export ::[++]
|
|||||||
import std::bool::*
|
import std::bool::*
|
||||||
export ::([== !=], if, then, else, true, false, and, or, not)
|
export ::([== !=], if, then, else, true, false, and, or, not)
|
||||||
import std::functional::*
|
import std::functional::*
|
||||||
export ::([$ |> =>], identity, pass, pass2, return)
|
export ::([$ |>], identity, pass, pass2, return)
|
||||||
import std::procedural::*
|
import std::procedural::*
|
||||||
export ::(do, let, cps, [; =])
|
export ::(do, let, cps, [;])
|
||||||
import std::tuple::*
|
import std::tuple::t
|
||||||
export ::(t)
|
export ::(t)
|
||||||
|
import std::match::(match, [=>])
|
||||||
|
export ::(match, [=>])
|
||||||
import std::tuple
|
import std::tuple
|
||||||
import std::list
|
import std::list
|
||||||
import std::map
|
import std::map
|
||||||
@@ -19,4 +21,4 @@ import std::loop::*
|
|||||||
export ::(loop_over, recursive, while)
|
export ::(loop_over, recursive, while)
|
||||||
|
|
||||||
import std::known::*
|
import std::known::*
|
||||||
export ::[,]
|
export ::[, _ ; . =]
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import super::functional::=>
|
import super::match::=>
|
||||||
|
import super::known::*
|
||||||
|
|
||||||
-- remove duplicate ;-s
|
-- remove duplicate ;-s
|
||||||
export macro do {
|
export macro do {
|
||||||
@@ -14,8 +15,11 @@ export macro do { ...$return } =0x1p130=> (...$return)
|
|||||||
-- modular operation block that returns a CPS function
|
-- modular operation block that returns a CPS function
|
||||||
export macro do cps { ...$body } =0x1p130=> \cont. do { ...$body ; cont }
|
export macro do cps { ...$body } =0x1p130=> \cont. do { ...$body ; cont }
|
||||||
|
|
||||||
export macro statement (let $name = ...$value) (...$next) =0x1p230=> (
|
export macro statement (let $_name = ...$value) (...$next) =0x2p230=> (
|
||||||
( \$name. ...$next) (...$value)
|
( \$_name. ...$next) (...$value)
|
||||||
|
)
|
||||||
|
export macro statement (let ...$pattern = ...$value:1) (...$next) =0x1p230=> (
|
||||||
|
( (...$pattern) => (...$next) ) (...$value)
|
||||||
)
|
)
|
||||||
export macro statement (cps ...$names = ...$operation:1) (...$next) =0x2p230=> (
|
export macro statement (cps ...$names = ...$operation:1) (...$next) =0x2p230=> (
|
||||||
(...$operation) ( (...$names) => ...$next )
|
(...$operation) ( (...$names) => ...$next )
|
||||||
|
|||||||
283
src/systems/stl/protocol.rs
Normal file
283
src/systems/stl/protocol.rs
Normal file
@@ -0,0 +1,283 @@
|
|||||||
|
use std::fmt::Debug;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use hashbrown::HashMap;
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
|
use super::cross_pipeline::defer_to_runtime;
|
||||||
|
use super::reflect::RefEqual;
|
||||||
|
use crate::ast::{self, Constant, Expr, PType};
|
||||||
|
use crate::error::{ProjectResult, RuntimeError};
|
||||||
|
use crate::foreign::{xfn_2ary, Atomic, InertAtomic, XfnResult};
|
||||||
|
use crate::interpreted::ExprInst;
|
||||||
|
use crate::parse::errors::{Expected, ExpectedBlock, ExpectedName};
|
||||||
|
use crate::parse::{
|
||||||
|
parse_entries, parse_exprv, parse_line, parse_nsname, split_lines,
|
||||||
|
vec_to_single, Context, Lexeme, LineParser, LineParserOut, Stream,
|
||||||
|
};
|
||||||
|
use crate::sourcefile::{
|
||||||
|
FileEntry, FileEntryKind, Member, MemberKind, ModuleBlock,
|
||||||
|
};
|
||||||
|
use crate::systems::parse_custom_line::custom_line;
|
||||||
|
use crate::utils::pure_seq::pushed;
|
||||||
|
use crate::{ConstTree, Interner, Location, Tok, VName};
|
||||||
|
|
||||||
|
pub struct TypeData {
|
||||||
|
pub id: RefEqual,
|
||||||
|
pub display_name: Tok<String>,
|
||||||
|
pub impls: HashMap<RefEqual, ExprInst>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Protocol(pub Arc<TypeData>);
|
||||||
|
impl Debug for Protocol {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_tuple(&self.0.display_name).field(&self.0.id.id()).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl InertAtomic for Protocol {
|
||||||
|
fn type_str() -> &'static str { "Protocol" }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Tag(pub Arc<TypeData>);
|
||||||
|
impl Debug for Tag {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_tuple(&self.0.display_name).field(&self.0.id.id()).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl InertAtomic for Tag {
|
||||||
|
fn type_str() -> &'static str { "Tag" }
|
||||||
|
fn strict_eq(&self, other: &Self) -> bool { self.0.id == other.0.id }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Tagged {
|
||||||
|
pub tag: Tag,
|
||||||
|
pub value: ExprInst,
|
||||||
|
}
|
||||||
|
impl Debug for Tagged {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_tuple("Tagged").field(&self.tag).field(&self.value).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl InertAtomic for Tagged {
|
||||||
|
fn type_str() -> &'static str { "Tagged" }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_impl(
|
||||||
|
tail: Stream,
|
||||||
|
ctx: &(impl Context + ?Sized),
|
||||||
|
) -> Option<ProjectResult<(VName, Expr<VName>)>> {
|
||||||
|
custom_line(tail, ctx.interner().i("impl"), false).map(|res| {
|
||||||
|
let (_, tail, _) = res?;
|
||||||
|
let (name, tail) = parse_nsname(tail, ctx)?;
|
||||||
|
let (walrus, tail) = tail.trim().pop()?;
|
||||||
|
Expected::expect(Lexeme::Walrus, walrus)?;
|
||||||
|
let (body, empty) = parse_exprv(tail, None, ctx)?;
|
||||||
|
empty.expect_empty()?;
|
||||||
|
let value = vec_to_single(tail.fallback, body)?;
|
||||||
|
Ok((name, value))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Impl {
|
||||||
|
target: VName,
|
||||||
|
value: Expr<VName>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_impls(
|
||||||
|
tail: Stream,
|
||||||
|
ctx: &(impl Context + ?Sized),
|
||||||
|
location: Location,
|
||||||
|
typeid_name: Tok<String>,
|
||||||
|
) -> ProjectResult<(Vec<FileEntry>, Vec<Impl>)> {
|
||||||
|
let mut lines = Vec::new();
|
||||||
|
let mut impls = Vec::new(); // name1, value1, name2, value2, etc...
|
||||||
|
for line in split_lines(tail) {
|
||||||
|
match parse_impl(line, ctx) {
|
||||||
|
Some(result) => {
|
||||||
|
let (name, value) = result?;
|
||||||
|
impls.push(Impl { target: pushed(name, typeid_name.clone()), value });
|
||||||
|
},
|
||||||
|
None => lines.extend(
|
||||||
|
parse_line(line, ctx)?.into_iter().map(|k| k.wrap(location.clone())),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok((lines, impls))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn protocol_parser<'a>(
|
||||||
|
tail: Stream<'_>,
|
||||||
|
ctx: &'a (impl Context + ?Sized + 'a),
|
||||||
|
) -> LineParserOut {
|
||||||
|
let i = ctx.interner();
|
||||||
|
custom_line(tail, i.i("protocol"), true).map(|res| {
|
||||||
|
let (exported, tail, line_loc) = res?;
|
||||||
|
let (name, tail) = tail.pop()?;
|
||||||
|
let name = ExpectedName::expect(name)?;
|
||||||
|
let tail = ExpectedBlock::expect(tail, PType::Par)?;
|
||||||
|
let protoid = RefEqual::new();
|
||||||
|
let (lines, impls) =
|
||||||
|
extract_impls(tail, ctx, line_loc.clone(), i.i("__type_id__"))?;
|
||||||
|
let prelude = "
|
||||||
|
import std::protocol
|
||||||
|
const resolve := protocol::resolve __protocol__
|
||||||
|
const get_impl := protocol::get_impl __protocol__
|
||||||
|
";
|
||||||
|
let body = parse_entries(ctx, prelude, line_loc.clone())?
|
||||||
|
.into_iter()
|
||||||
|
.chain(
|
||||||
|
[
|
||||||
|
("__protocol_id__", protoid.clone().ast_cls()),
|
||||||
|
(
|
||||||
|
"__protocol__",
|
||||||
|
defer_to_runtime(
|
||||||
|
impls.into_iter().flat_map(|Impl { target, value }| {
|
||||||
|
[ast::Clause::Name(target).into_expr(), value]
|
||||||
|
.map(|e| ((), vec![e]))
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
let name = name.clone();
|
||||||
|
move |pairs: Vec<((), ExprInst)>| {
|
||||||
|
let mut impls = HashMap::new();
|
||||||
|
debug_assert!(
|
||||||
|
pairs.len() % 2 == 0,
|
||||||
|
"names and values pair up"
|
||||||
|
);
|
||||||
|
let mut nvnvnv = pairs.into_iter().map(|t| t.1);
|
||||||
|
while let Some((name, value)) = nvnvnv.next_tuple() {
|
||||||
|
let key = name.downcast::<RefEqual>()?;
|
||||||
|
impls.insert(key, value);
|
||||||
|
}
|
||||||
|
let id = protoid.clone();
|
||||||
|
let display_name = name.clone();
|
||||||
|
Ok(Protocol(Arc::new(TypeData { id, display_name, impls })))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
.map(|(n, value)| {
|
||||||
|
let value = Expr { value, location: line_loc.clone() };
|
||||||
|
MemberKind::Constant(Constant { name: i.i(n), value })
|
||||||
|
.to_entry(true, line_loc.clone())
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.chain(lines)
|
||||||
|
.collect();
|
||||||
|
let kind = MemberKind::Module(ModuleBlock { name, body });
|
||||||
|
Ok(vec![FileEntryKind::Member(Member { exported, kind })])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn type_parser(
|
||||||
|
tail: Stream,
|
||||||
|
ctx: &(impl Context + ?Sized),
|
||||||
|
) -> LineParserOut {
|
||||||
|
let i = ctx.interner();
|
||||||
|
custom_line(tail, ctx.interner().i("type"), true).map(|res| {
|
||||||
|
let (exported, tail, line_loc) = res?;
|
||||||
|
let (name, tail) = tail.pop()?;
|
||||||
|
let name = ExpectedName::expect(name)?;
|
||||||
|
let tail = ExpectedBlock::expect(tail, PType::Par)?;
|
||||||
|
let typeid = RefEqual::new();
|
||||||
|
let (lines, impls) =
|
||||||
|
extract_impls(tail, ctx, line_loc.clone(), i.i("__protocol_id__"))?;
|
||||||
|
let prelude = "
|
||||||
|
import std::protocol
|
||||||
|
const unwrap := protocol::unwrap __type_tag__
|
||||||
|
const wrap := protocol::wrap __type_tag__
|
||||||
|
";
|
||||||
|
let body = parse_entries(ctx, prelude, line_loc.clone())?
|
||||||
|
.into_iter()
|
||||||
|
.chain(
|
||||||
|
[
|
||||||
|
("__type_id__", typeid.clone().ast_cls()),
|
||||||
|
(
|
||||||
|
"__type_tag__",
|
||||||
|
defer_to_runtime(
|
||||||
|
impls.into_iter().flat_map(|Impl { target, value }| {
|
||||||
|
[ast::Clause::Name(target).into_expr(), value]
|
||||||
|
.map(|e| ((), vec![e]))
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
let name = name.clone();
|
||||||
|
move |pairs: Vec<((), ExprInst)>| {
|
||||||
|
let mut impls = HashMap::new();
|
||||||
|
debug_assert!(
|
||||||
|
pairs.len() % 2 == 0,
|
||||||
|
"names and values pair up"
|
||||||
|
);
|
||||||
|
let mut nvnvnv = pairs.into_iter().map(|t| t.1);
|
||||||
|
while let Some((name, value)) = nvnvnv.next_tuple() {
|
||||||
|
let key = name.downcast::<RefEqual>()?;
|
||||||
|
impls.insert(key, value);
|
||||||
|
}
|
||||||
|
let id = typeid.clone();
|
||||||
|
let display_name = name.clone();
|
||||||
|
Ok(Tag(Arc::new(TypeData { id, display_name, impls })))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
.map(|(n, value)| {
|
||||||
|
let value = Expr { value, location: line_loc.clone() };
|
||||||
|
MemberKind::Constant(Constant { name: i.i(n), value })
|
||||||
|
.to_entry(true, line_loc.clone())
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.chain(lines)
|
||||||
|
.collect();
|
||||||
|
let kind = MemberKind::Module(ModuleBlock { name, body });
|
||||||
|
Ok(vec![FileEntryKind::Member(Member { exported, kind })])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parsers() -> Vec<Box<dyn LineParser>> {
|
||||||
|
vec![
|
||||||
|
Box::new(|tail, ctx| protocol_parser(tail, ctx)),
|
||||||
|
Box::new(|tail, ctx| type_parser(tail, ctx)),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn unwrap(tag: Tag, tagged: Tagged) -> XfnResult<ExprInst> {
|
||||||
|
if tagged.tag.strict_eq(&tag) {
|
||||||
|
return Ok(tagged.value);
|
||||||
|
}
|
||||||
|
let msg = format!("{:?} is not {:?}", tagged.tag, tag);
|
||||||
|
RuntimeError::fail(msg, "unwrapping type-tagged value")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn wrap(tag: Tag, value: ExprInst) -> XfnResult<Tagged> {
|
||||||
|
Ok(Tagged { tag, value })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolve(protocol: Protocol, tagged: Tagged) -> XfnResult<ExprInst> {
|
||||||
|
get_impl(protocol, tagged.tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_impl(proto: Protocol, tag: Tag) -> XfnResult<ExprInst> {
|
||||||
|
if let Some(implem) = proto.0.impls.get(&tag.0.id) {
|
||||||
|
return Ok(implem.clone());
|
||||||
|
}
|
||||||
|
if let Some(implem) = tag.0.impls.get(&proto.0.id) {
|
||||||
|
return Ok(implem.clone());
|
||||||
|
}
|
||||||
|
let message = format!("{:?} doesn't implement {:?}", tag, proto);
|
||||||
|
RuntimeError::fail(message, "dispatching protocol")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn protocol_lib(i: &Interner) -> ConstTree {
|
||||||
|
ConstTree::namespace(
|
||||||
|
[i.i("protocol")],
|
||||||
|
ConstTree::tree([
|
||||||
|
(i.i("unwrap"), ConstTree::xfn(xfn_2ary(unwrap))),
|
||||||
|
(i.i("wrap"), ConstTree::xfn(xfn_2ary(wrap))),
|
||||||
|
(i.i("get_impl"), ConstTree::xfn(xfn_2ary(get_impl))),
|
||||||
|
(i.i("resolve"), ConstTree::xfn(xfn_2ary(resolve))),
|
||||||
|
]),
|
||||||
|
)
|
||||||
|
}
|
||||||
69
src/systems/stl/reflect.rs
Normal file
69
src/systems/stl/reflect.rs
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
use std::cmp::Ordering;
|
||||||
|
use std::fmt::Debug;
|
||||||
|
use std::hash::Hash;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use crate::foreign::{xfn_2ary, InertAtomic};
|
||||||
|
use crate::{ConstTree, Interner, Sym};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct SymbolName(pub Sym);
|
||||||
|
impl InertAtomic for SymbolName {
|
||||||
|
fn type_str() -> &'static str { "SymbolName" }
|
||||||
|
}
|
||||||
|
|
||||||
|
// #[derive(Debug, Clone)]
|
||||||
|
// pub struct GetSymName;
|
||||||
|
// impl ExternFn for GetSymName {
|
||||||
|
// fn name(&self) -> &str { "GetSymName" }
|
||||||
|
// fn apply(
|
||||||
|
// self: Box<Self>,
|
||||||
|
// arg: ExprInst,
|
||||||
|
// _: Context,
|
||||||
|
// ) -> XfnResult<Clause> { arg.inspect(|c| match c { Clause::Constant(name)
|
||||||
|
// => Ok(SymbolName(name.clone()).atom_cls()), _ =>
|
||||||
|
// AssertionError::fail(arg.location(), "is not a constant name"), })
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct RefEqual(Arc<u8>);
|
||||||
|
impl RefEqual {
|
||||||
|
pub fn new() -> Self { Self(Arc::new(0u8)) }
|
||||||
|
pub fn id(&self) -> usize { &*self.0 as *const u8 as usize }
|
||||||
|
}
|
||||||
|
impl Debug for RefEqual {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_tuple("RefEqual").field(&self.id()).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl InertAtomic for RefEqual {
|
||||||
|
fn type_str() -> &'static str { "RefEqual" }
|
||||||
|
fn strict_eq(&self, other: &Self) -> bool { self == other }
|
||||||
|
}
|
||||||
|
impl Eq for RefEqual {}
|
||||||
|
impl PartialEq for RefEqual {
|
||||||
|
fn eq(&self, other: &Self) -> bool { self.id() == other.id() }
|
||||||
|
}
|
||||||
|
impl Ord for RefEqual {
|
||||||
|
fn cmp(&self, other: &Self) -> Ordering { self.id().cmp(&other.id()) }
|
||||||
|
}
|
||||||
|
impl PartialOrd for RefEqual {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
|
Some(self.cmp(other))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Hash for RefEqual {
|
||||||
|
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.id().hash(state) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reflect(i: &Interner) -> ConstTree {
|
||||||
|
// ConstTree::tree([])
|
||||||
|
ConstTree::namespace(
|
||||||
|
[i.i("reflect")],
|
||||||
|
ConstTree::tree([(
|
||||||
|
i.i("ref_equal"),
|
||||||
|
ConstTree::xfn(xfn_2ary(|l: RefEqual, r: RefEqual| Ok(l.id() == r.id()))),
|
||||||
|
)]),
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -10,6 +10,8 @@ use super::exit_status::exit_status;
|
|||||||
use super::inspect::inspect;
|
use super::inspect::inspect;
|
||||||
use super::number::num;
|
use super::number::num;
|
||||||
use super::panic::panic;
|
use super::panic::panic;
|
||||||
|
use super::protocol::{parsers, protocol_lib};
|
||||||
|
use super::reflect::reflect;
|
||||||
use super::state::{state_handlers, state_lib};
|
use super::state::{state_handlers, state_lib};
|
||||||
use super::string::str;
|
use super::string::str;
|
||||||
use crate::facade::{IntoSystem, System};
|
use crate::facade::{IntoSystem, System};
|
||||||
@@ -40,8 +42,10 @@ impl IntoSystem<'static> for StlConfig {
|
|||||||
+ exit_status(i)
|
+ exit_status(i)
|
||||||
+ num(i)
|
+ num(i)
|
||||||
+ panic(i)
|
+ panic(i)
|
||||||
|
+ reflect(i)
|
||||||
+ state_lib(i)
|
+ state_lib(i)
|
||||||
+ str(i);
|
+ str(i)
|
||||||
|
+ protocol_lib(i);
|
||||||
let mk_impure_fns = || inspect(i);
|
let mk_impure_fns = || inspect(i);
|
||||||
let fns = if self.impure { pure_tree + mk_impure_fns() } else { pure_tree };
|
let fns = if self.impure { pure_tree + mk_impure_fns() } else { pure_tree };
|
||||||
System {
|
System {
|
||||||
@@ -57,8 +61,8 @@ impl IntoSystem<'static> for StlConfig {
|
|||||||
}]),
|
}]),
|
||||||
}],
|
}],
|
||||||
handlers: state_handlers(),
|
handlers: state_handlers(),
|
||||||
lexer_plugin: None,
|
lexer_plugins: vec![],
|
||||||
line_parser: None,
|
line_parsers: parsers(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,16 +1,84 @@
|
|||||||
import super::(known::*, bool::*, number::*)
|
import super::(known::*, bool::*, number::*, match, macro)
|
||||||
|
|
||||||
const discard_args := \n. \value. (
|
export type ty (
|
||||||
|
import super::super::(number::*, bool::*, macro, panic)
|
||||||
|
|
||||||
|
const discard_args := \n. \value. (
|
||||||
if n == 0 then value
|
if n == 0 then value
|
||||||
else \_. discard_args (n - 1) value
|
else \_. discard_args (n - 1) value
|
||||||
|
)
|
||||||
|
|
||||||
|
macro gen_call macro::list_end =0x1p254=> \f.f
|
||||||
|
macro gen_call ( macro::list_item $item $tail ) =0x1p254=> \f. (gen_call $tail) (f $item)
|
||||||
|
export macro new ( $list ) =0x1p84=> wrap \f. (gen_call $list) (f (macro::length $list))
|
||||||
|
|
||||||
|
export const pick := \tuple. \i. (unwrap tuple) ( \size.
|
||||||
|
if size <= i then panic "Tuple index out of bounds"
|
||||||
|
else discard_args i \val. discard_args (size - 1 - i) val
|
||||||
|
)
|
||||||
|
|
||||||
|
export const length := \tuple. (unwrap tuple) \size. discard_args size size
|
||||||
|
|
||||||
|
export const apply := \tuple. \f. (unwrap tuple) f
|
||||||
)
|
)
|
||||||
|
|
||||||
export const pick := \tuple. \i. \n. tuple (
|
const pick := ty::pick
|
||||||
discard_args i \val. discard_args (n - 1 - i) val
|
const length := ty::length
|
||||||
|
const apply := ty::apply
|
||||||
|
|
||||||
|
macro t[..$items] =0x2p84=> ( ty::new ( macro::comma_list (..$items) ) )
|
||||||
|
|
||||||
|
export ::(t, size)
|
||||||
|
|
||||||
|
macro size ( t[..$items] ) =0x1p230=> macro::length macro::comma_list (..$items)
|
||||||
|
|
||||||
|
--[
|
||||||
|
request l -> pattern_walker l
|
||||||
|
pattern_walker end -> pattern_result
|
||||||
|
pattern_walker h ++ t -> await_pattern
|
||||||
|
await_pattern -> pattern_result
|
||||||
|
]--
|
||||||
|
|
||||||
|
( macro match::request ( t[ ..$items ] )
|
||||||
|
=0x1p230=> tuple_pattern
|
||||||
|
( macro::length macro::comma_list ( ..$items ) )
|
||||||
|
(
|
||||||
|
pattern_walker
|
||||||
|
(0) -- index of next item
|
||||||
|
macro::comma_list ( ..$items ) -- leftover items
|
||||||
|
)
|
||||||
|
)
|
||||||
|
( macro tuple_pattern $length ( pattern_result $expr ( $binds ) )
|
||||||
|
=0x1p254=> match::response (
|
||||||
|
if length match::value == $length
|
||||||
|
then $expr
|
||||||
|
else match::fail
|
||||||
|
) ( $binds )
|
||||||
|
)
|
||||||
|
( macro pattern_walker $length macro::list_end
|
||||||
|
=0x1p254=> pattern_result match::pass ( match::no_binds )
|
||||||
|
)
|
||||||
|
( macro pattern_walker (...$length) ( macro::list_item $next $tail )
|
||||||
|
=0x1p254=> pattern_await
|
||||||
|
(...$length)
|
||||||
|
( match::request $next )
|
||||||
|
( pattern_walker (...$length + 1) $tail )
|
||||||
|
)
|
||||||
|
( macro pattern_await $length
|
||||||
|
( match::response $expr ( $binds ) )
|
||||||
|
( pattern_result $tail_expr ( $tail_binds ) )
|
||||||
|
=0x1p254=>
|
||||||
|
pattern_result
|
||||||
|
(
|
||||||
|
(\match::pass. (\match::value. $expr) (pick match::value $length)) (
|
||||||
|
match::take_binds $binds (
|
||||||
|
(\match::pass. $tail_expr) ( match::take_binds $tail_binds (
|
||||||
|
match::give_binds
|
||||||
|
match::chain_binds $binds $tail_binds
|
||||||
|
match::pass
|
||||||
|
))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
( match::chain_binds $binds $tail_binds )
|
||||||
)
|
)
|
||||||
|
|
||||||
macro t[...$item, ...$rest:1] =0x2p84=> (\f. t[...$rest] (f (...$item)))
|
|
||||||
macro t[...$end] =0x1p84=> (\f. f (...$end))
|
|
||||||
macro t[] =0x1p84=> \f.f
|
|
||||||
|
|
||||||
export ::(t)
|
|
||||||
|
|||||||
Reference in New Issue
Block a user