Added directfs

Added a very rudimentary file I/O system suitable for experimenting
with the language further. A better one will be designed when we have
sensible error management.
This commit is contained in:
2023-09-17 16:37:39 +01:00
parent 1078835e8b
commit 7396078304
84 changed files with 563 additions and 721 deletions

View File

@@ -0,0 +1,5 @@
export const block_on := \action.\cont. (
action cont
(\e.panic "unwrapped asynch call")
\c.yield
)

View File

@@ -8,12 +8,14 @@ use std::time::Duration;
use hashbrown::HashMap;
use ordered_float::NotNan;
use rust_embed::RustEmbed;
use crate::facade::{IntoSystem, System};
use crate::foreign::cps_box::{init_cps, CPSBox};
use crate::foreign::{Atomic, ExternError, InertAtomic};
use crate::interpreted::ExprInst;
use crate::interpreter::HandlerTable;
use crate::pipeline::file_loader::embed_to_map;
use crate::systems::codegen::call;
use crate::systems::stl::Boolean;
use crate::utils::poller::{PollEvent, Poller};
@@ -68,6 +70,12 @@ impl MessagePort {
}
}
#[derive(RustEmbed)]
#[folder = "src/systems/asynch"]
#[prefix = "system/"]
#[include = "*.orc"]
struct AsynchEmbed;
type AnyHandler<'a> = Box<dyn FnMut(Box<dyn Any>) -> Vec<ExprInst> + 'a>;
/// Datastructures the asynch system will eventually be constructed from.
@@ -80,6 +88,7 @@ pub struct AsynchSystem<'a> {
impl<'a> AsynchSystem<'a> {
/// Create a new async event loop that allows registering handlers and taking
/// references to the port before it's converted into a [System]
#[must_use]
pub fn new() -> Self {
let (sender, poller) = Poller::new();
Self { poller, sender, handlers: HashMap::new() }
@@ -108,6 +117,7 @@ impl<'a> AsynchSystem<'a> {
/// Obtain a message port for sending messages to the main thread. If an
/// object is passed to the MessagePort that does not have a handler, the
/// main thread panics.
#[must_use]
pub fn get_port(&self) -> MessagePort { MessagePort(self.sender.clone()) }
}
@@ -181,7 +191,7 @@ impl<'a> IntoSystem<'a> for AsynchSystem<'a> {
]),
)
.unwrap_tree(),
code: HashMap::new(),
code: embed_to_map::<AsynchEmbed>(".orc", i),
prelude: Vec::new(),
handlers: handler_table,
}

View File

@@ -38,14 +38,24 @@ fn none() -> Clause {
/// Define a clause that can be called with a callback and passes the provided
/// values to the callback in order.
pub fn tuple(data: Vec<ExprInst>) -> Clause {
Clause::Lambda {
args: Some(PathSet {
next: None,
steps: Rc::new(data.iter().map(|_| Side::Left).collect()),
}),
body: (data.into_iter())
.fold(Clause::LambdaArg.wrap(), |f, x| Clause::Apply { f, x }.wrap()),
pub fn tuple(data: impl IntoIterator<Item = ExprInst>) -> Clause {
let mut steps = Vec::new();
let mut body = Clause::LambdaArg.wrap();
for x in data.into_iter() {
steps.push(Side::Left);
body = Clause::Apply { f: body, x }.wrap()
}
let path_set = PathSet { next: None, steps: Rc::new(steps) };
Clause::Lambda { args: Some(path_set), body }
}
#[cfg(test)]
mod test {
use crate::systems::codegen::tuple;
#[test]
fn tuple_printer() {
println!("Binary tuple: {}", tuple([0.into(), 1.into()]))
}
}
@@ -55,3 +65,9 @@ pub fn call(f: ExprInst, args: impl IntoIterator<Item = ExprInst>) -> Clause {
let x = unwrap_or!(it.by_ref().next(); return f.inspect(Clause::clone));
it.fold(Clause::Apply { f, x }, |acc, x| Clause::Apply { f: acc.wrap(), x })
}
/// Build an Orchid list from a Rust iterator
pub fn list(items: impl IntoIterator<Item = ExprInst>) -> Clause {
let mut iter = items.into_iter();
orchid_opt(iter.next().map(|it| tuple([it, list(iter).wrap()]).wrap()))
}

View File

@@ -1,19 +1,203 @@
use crate::foreign::cps_box::init_cps;
use crate::foreign::InertAtomic;
use crate::systems::asynch::MessagePort;
use crate::systems::scheduler::SeqScheduler;
use crate::{define_fn, OrcString};
use std::ffi::OsString;
use std::fs::File;
use std::io::{BufReader, Read, Write};
use std::path::Path;
use hashbrown::HashMap;
use itertools::Itertools;
use crate::facade::{IntoSystem, System};
use crate::foreign::cps_box::{init_cps, CPSBox};
use crate::foreign::{Atomic, InertAtomic};
use crate::interpreted::{Clause, ExprInst};
use crate::interpreter::HandlerTable;
use crate::systems::codegen::{call, list, orchid_opt, tuple};
use crate::systems::io::wrap_io_error;
use crate::systems::scheduler::{SeqScheduler, SharedHandle};
use crate::systems::stl::Boolean;
use crate::systems::RuntimeError;
use crate::utils::unwrap_or;
use crate::{define_fn, ConstTree, OrcString};
#[derive(Debug, Clone)]
struct ReadFile(OrcString);
impl InertAtomic for ReadFile {
fn type_str() -> &'static str { "a readfile command" }
pub struct ReadFileCmd(OrcString);
impl InertAtomic for ReadFileCmd {
fn type_str() -> &'static str { "readfile command" }
}
pub fn read_file(port: MessagePort, cmd: ReadFile) -> Vec<ExprInst> {
let new_file =
#[derive(Debug, Clone)]
pub struct ReadDirCmd(OrcString);
impl InertAtomic for ReadDirCmd {
fn type_str() -> &'static str { "readdir command" }
}
#[derive(Debug, Clone)]
pub struct WriteFile {
name: OrcString,
append: bool,
}
impl InertAtomic for WriteFile {
fn type_str() -> &'static str { "writefile command" }
}
#[must_use]
fn read_file(sched: &SeqScheduler, cmd: CPSBox<ReadFileCmd>) -> ExprInst {
let (ReadFileCmd(name), succ, fail, cont) = cmd.unpack3();
let name = name.get_string();
let cancel = sched.run_orphan(
move |_| File::open(name),
|file, _| match file {
Err(e) => vec![call(fail, [wrap_io_error(e)]).wrap()],
Ok(f) => {
let source =
SharedHandle::wrap(BufReader::new(Box::new(f) as Box<dyn Read>));
vec![call(succ, [source.atom_exi()]).wrap()]
},
},
);
call(cont, [init_cps(1, cancel).wrap()]).wrap()
}
#[must_use]
fn read_dir(sched: &SeqScheduler, cmd: CPSBox<ReadDirCmd>) -> ExprInst {
let (ReadDirCmd(name), succ, fail, cont) = cmd.unpack3();
let name = name.get_string();
let cancel = sched.run_orphan(
move |_| {
Path::new(&name)
.read_dir()?
.map(|r| r.and_then(|e| Ok((e.file_name(), e.file_type()?.is_dir()))))
.collect()
},
|items: std::io::Result<Vec<(OsString, bool)>>, _| match items {
Err(e) => vec![call(fail, [wrap_io_error(e)]).wrap()],
Ok(os_namev) => {
let converted = (os_namev.into_iter())
.map(|(n, d)| {
Ok(tuple([os_str_cls(n)?.wrap(), Boolean(d).atom_exi()]).wrap())
})
.collect::<Result<Vec<_>, Clause>>();
match converted {
Err(e) => vec![call(fail, [e.wrap()]).wrap()],
Ok(names) => vec![call(succ, [list(names).wrap()]).wrap()],
}
},
},
);
call(cont, [init_cps(1, cancel).wrap()]).wrap()
}
#[must_use]
pub fn write_file(sched: &SeqScheduler, cmd: CPSBox<WriteFile>) -> ExprInst {
let (WriteFile { name, append }, succ, fail, cont) = cmd.unpack3();
let name = name.get_string();
let cancel = sched.run_orphan(
move |_| File::options().write(true).append(append).open(name),
|file, _| match file {
Err(e) => vec![call(fail, [wrap_io_error(e)]).wrap()],
Ok(f) => {
let handle = SharedHandle::wrap(Box::new(f) as Box<dyn Write>);
vec![call(succ, [handle.atom_exi()]).wrap()]
},
},
);
call(cont, [init_cps(1, cancel).wrap()]).wrap()
}
#[derive(Debug, Clone)]
pub struct InvalidString(OsString);
impl InertAtomic for InvalidString {
fn type_str() -> &'static str { "invalidstring error" }
}
fn os_str_cls(str: OsString) -> Result<Clause, Clause> {
(str.into_string())
.map_err(|e| InvalidString(e).atom_cls())
.map(|s| OrcString::from(s).cls())
}
define_fn! {
pub OpenFileRead = |x| Ok(init_cps(3, ReadFile(x.downcast()?)))
pub IsInvalidString = |x| {
Ok(Boolean(x.downcast::<InvalidString>().is_ok()).atom_cls())
};
pub OpenFileRead = |x| Ok(init_cps(3, ReadFileCmd(x.downcast()?)));
pub ReadDir = |x| Ok(init_cps(3, ReadDirCmd(x.downcast()?)));
pub OpenFileWrite = |x| {
Ok(init_cps(3, WriteFile{ name: x.downcast()?, append: false }))
};
pub OpenFileAppend = |x| {
Ok(init_cps(3, WriteFile{ name: x.downcast()?, append: true }))
};
pub JoinPaths { root: OrcString, sub: OrcString } => {
let res = Path::new(root.as_str())
.join(sub.as_str())
.into_os_string();
os_str_cls(res.clone()).map_err(|_| RuntimeError::ext(
format!("result {res:?} contains illegal characters"),
"joining paths"
))
};
pub PopPath = |x| {
eprintln!("argument is {x}");
let arg = x.downcast::<OrcString>()?;
let full_path = Path::new(arg.as_str());
let parent = unwrap_or! {full_path.parent(); {
return Ok(orchid_opt(None))
}};
let sub = unwrap_or! {full_path.file_name(); {
return Ok(orchid_opt(None))
}};
Ok(orchid_opt(Some(tuple(
[parent.as_os_str(), sub]
.into_iter()
.map(|s| os_str_cls(s.to_owned()).map_err(|_| RuntimeError::ext(
format!("Result {s:?} contains illegal characters"),
"splitting a path"
)))
.map_ok(Clause::wrap)
.collect::<Result<Vec<_>, _>>()?
).wrap())))
}
}
/// A rudimentary system to read and write files.
#[derive(Clone)]
pub struct DirectFS {
scheduler: SeqScheduler,
}
impl DirectFS {
/// Create a new instance of the system.
pub fn new(scheduler: SeqScheduler) -> Self { Self { scheduler } }
}
impl IntoSystem<'static> for DirectFS {
fn into_system(self, i: &crate::Interner) -> System<'static> {
let mut handlers = HandlerTable::new();
let sched = self.scheduler.clone();
handlers.register(move |cmd| Ok(read_file(&sched, *cmd)));
let sched = self.scheduler.clone();
handlers.register(move |cmd| Ok(read_dir(&sched, *cmd)));
let sched = self.scheduler;
handlers.register(move |cmd| Ok(write_file(&sched, *cmd)));
System {
name: ["system", "directfs"].into_iter().map_into().collect(),
code: HashMap::new(),
prelude: Vec::new(),
constants: ConstTree::namespace(
[i.i("system"), i.i("directfs")],
ConstTree::tree([
(i.i("is_invalid_string"), ConstTree::xfn(IsInvalidString)),
(i.i("readfile"), ConstTree::xfn(OpenFileRead)),
(i.i("readdir"), ConstTree::xfn(ReadDir)),
(i.i("writefile"), ConstTree::xfn(OpenFileWrite)),
(i.i("appendfile"), ConstTree::xfn(OpenFileAppend)),
(i.i("join_paths"), ConstTree::xfn(JoinPaths)),
(i.i("pop_path"), ConstTree::xfn(PopPath)),
]),
)
.unwrap_tree(),
handlers,
}
}
}

View File

@@ -1,2 +1,5 @@
//! A rudimentary system exposing methods for Orchid to interact with the file
//! system. All paths are strings.
mod commands;
pub use commands::DirectFS;

View File

@@ -1,10 +1,11 @@
use super::flow::IOCmdHandlePack;
use super::instances::{
BRead, ReadCmd, SRead, SinkHandle, SourceHandle, WriteCmd,
BRead, ReadCmd, SRead, WriteCmd, Sink, Source,
};
use crate::foreign::cps_box::init_cps;
use crate::foreign::{Atom, Atomic};
use crate::representations::OrcString;
use crate::systems::scheduler::SharedHandle;
use crate::systems::stl::Binary;
use crate::systems::RuntimeError;
use crate::{ast, define_fn, ConstTree, Interner, Primitive};
@@ -22,17 +23,13 @@ define_fn! {
cmd: ReadCmd::RBytes(BRead::All),
handle: x.downcast()?
}));
ReadBytes {
stream: SourceHandle,
n: u64
} => Ok(init_cps(3, IOCmdHandlePack{
cmd: ReadCmd::RBytes(BRead::N(n.try_into().unwrap())),
handle: stream.clone()
}));
ReadUntil {
stream: SourceHandle,
pattern: u64
} => {
ReadBytes { stream: SharedHandle<Source>, n: u64 } => {
Ok(init_cps(3, IOCmdHandlePack{
cmd: ReadCmd::RBytes(BRead::N(n.try_into().unwrap())),
handle: stream.clone()
}))
};
ReadUntil { stream: SharedHandle<Source>, pattern: u64 } => {
let delim = pattern.try_into().map_err(|_| RuntimeError::ext(
"greater than 255".to_string(),
"converting number to byte"
@@ -42,20 +39,18 @@ define_fn! {
handle: stream
}))
};
WriteStr {
stream: SinkHandle,
string: OrcString
} => Ok(init_cps(3, IOCmdHandlePack {
cmd: WriteCmd::WStr(string.get_string()),
handle: stream.clone(),
}));
WriteBin {
stream: SinkHandle,
bytes: Binary
} => Ok(init_cps(3, IOCmdHandlePack {
cmd: WriteCmd::WBytes(bytes),
handle: stream.clone(),
}));
WriteStr { stream: SharedHandle<Sink>, string: OrcString } => {
Ok(init_cps(3, IOCmdHandlePack {
cmd: WriteCmd::WStr(string.get_string()),
handle: stream.clone(),
}))
};
WriteBin { stream: SharedHandle<Sink>, bytes: Binary } => {
Ok(init_cps(3, IOCmdHandlePack {
cmd: WriteCmd::WBytes(bytes),
handle: stream.clone(),
}))
};
Flush = |x| Ok(init_cps(3, IOCmdHandlePack {
cmd: WriteCmd::Flush,
handle: x.downcast()?

View File

@@ -9,12 +9,11 @@ use crate::systems::scheduler::{Canceller, SharedHandle};
use crate::systems::stl::Binary;
use crate::Literal;
/// Any type that we can read controlled amounts of data from
pub type Source = BufReader<Box<dyn Read + Send>>;
/// Any type that we can write data to
pub type Sink = Box<dyn Write + Send>;
pub type SourceHandle = SharedHandle<Source>;
pub type SinkHandle = SharedHandle<Sink>;
/// String reading command
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum SRead {
@@ -39,7 +38,7 @@ pub enum ReadCmd {
impl IOCmd for ReadCmd {
type Stream = Source;
type Result = ReadResult;
type Handle = SourceHandle;
type Handle = SharedHandle<Source>;
// This is a buggy rule, check manually
#[allow(clippy::read_zero_byte_vec)]
@@ -82,22 +81,21 @@ impl ReadResult {
pub fn dispatch(self, succ: ExprInst, fail: ExprInst) -> Vec<ExprInst> {
match self {
ReadResult::RBin(_, Err(e)) | ReadResult::RStr(_, Err(e)) => {
vec![call(fail, vec![wrap_io_error(e)]).wrap()]
vec![call(fail, [wrap_io_error(e)]).wrap()]
},
ReadResult::RBin(_, Ok(bytes)) => {
let arg = Binary(Arc::new(bytes)).atom_cls().wrap();
vec![call(succ, vec![arg]).wrap()]
vec![call(succ, [arg]).wrap()]
},
ReadResult::RStr(_, Ok(text)) => {
vec![call(succ, vec![Literal::Str(text.into()).into()]).wrap()]
vec![call(succ, [Literal::Str(text.into()).into()]).wrap()]
},
}
}
}
/// Placeholder function for an eventual conversion from [io::Error] to Orchid
/// data
fn wrap_io_error(_e: io::Error) -> ExprInst { Literal::Uint(0u64).into() }
/// Function to convert [io::Error] to Orchid data
pub fn wrap_io_error(_e: io::Error) -> ExprInst { Literal::Uint(0u64).into() }
/// Writing command (string or binary)
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -109,7 +107,7 @@ pub enum WriteCmd {
impl IOCmd for WriteCmd {
type Stream = Sink;
type Handle = SinkHandle;
type Handle = SharedHandle<Sink>;
type Result = WriteResult;
fn execute(

View File

@@ -9,3 +9,4 @@ mod service;
// pub use facade::{io_system, IOStream, IOSystem};
pub use service::{Service, Stream, StreamTable};
pub use instances::{wrap_io_error, Source, Sink};

View File

@@ -1,6 +1,7 @@
#[allow(unused)] // for doc
use std::io::{BufReader, Read, Write};
use itertools::Itertools;
use rust_embed::RustEmbed;
use trait_set::trait_set;
@@ -69,8 +70,8 @@ impl<'a, ST: IntoIterator<Item = (&'a str, Stream)>> IntoSystem<'static>
|stream| (stream, Vec::new()),
);
match result {
Ok(cancel) => Ok(call(tail, vec![init_cps(1, cancel).wrap()]).wrap()),
Err(e) => Ok(call(fail, vec![e.atom_exi()]).wrap()),
Ok(cancel) => Ok(call(tail, [init_cps(1, cancel).wrap()]).wrap()),
Err(e) => Ok(call(fail, [e.atom_exi()]).wrap()),
}
});
let scheduler = self.scheduler.clone();
@@ -87,8 +88,8 @@ impl<'a, ST: IntoIterator<Item = (&'a str, Stream)>> IntoSystem<'static>
|stream| (stream, Vec::new()),
);
match result {
Ok(cancel) => Ok(call(tail, vec![init_cps(1, cancel).wrap()]).wrap()),
Err(e) => Ok(call(fail, vec![e.atom_exi()]).wrap()),
Ok(cancel) => Ok(call(tail, [init_cps(1, cancel).wrap()]).wrap()),
Err(e) => Ok(call(fail, [e.atom_exi()]).wrap()),
}
});
let streams = self.global_streams.into_iter().map(|(n, stream)| {
@@ -101,7 +102,7 @@ impl<'a, ST: IntoIterator<Item = (&'a str, Stream)>> IntoSystem<'static>
});
System {
handlers,
name: vec!["system".to_string(), "io".to_string()],
name: ["system", "io"].into_iter().map_into().collect(),
constants: io_bindings(i, streams).unwrap_tree(),
code: embed_to_map::<IOEmbed>(".orc", i),
prelude: vec![FileEntry {

View File

@@ -3,7 +3,7 @@ mod assertion_error;
pub mod asynch;
pub mod cast_exprinst;
pub mod codegen;
// mod directfs;
pub mod directfs;
pub mod io;
mod runtime_error;
pub mod scheduler;

View File

@@ -1,14 +1,9 @@
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use crate::foreign::InertAtomic;
/// A single-fire thread-safe boolean flag with relaxed ordering
#[derive(Debug, Clone)]
pub struct Canceller(Arc<AtomicBool>);
impl InertAtomic for Canceller {
fn type_str() -> &'static str { "a canceller" }
}
impl Canceller {
/// Create a new canceller

View File

@@ -223,6 +223,26 @@ impl SeqScheduler {
})
}
/// Run an operation asynchronously and then process its result in thread,
/// without queuing on any particular data.
pub fn run_orphan<T: Send + 'static>(
&self,
operation: impl FnOnce(Canceller) -> T + Send + 'static,
handler: impl FnOnce(T, Canceller) -> Vec<ExprInst> + 'static,
) -> Canceller {
let cancelled = Canceller::new();
let canc1 = cancelled.clone();
let opid = self.0.pending.borrow_mut().insert(Box::new(|data, _| {
handler(*data.downcast().expect("This is associated by ID"), canc1)
}));
let canc1 = cancelled.clone();
let mut port = self.0.port.clone();
self.0.pool.submit(Box::new(move || {
port.send(SyncReply { opid, data: Box::new(operation(canc1)) });
}));
cancelled
}
/// Schedule a function that will consume the value. After this the handle is
/// considered sealed and all [SeqScheduler::schedule] calls will fail.
pub fn seal<T>(

View File

@@ -125,7 +125,7 @@ expr=x in
)?
}
let (asl, bsl) = bin.0.split_at(i as usize);
Ok(tuple(vec![
Ok(tuple([
Binary(Arc::new(asl.to_vec())).atom_cls().into(),
Binary(Arc::new(bsl.to_vec())).atom_cls().into(),
]))

View File

@@ -1,13 +1,17 @@
import super::(option, fn::*, proc::*, loop::*, bool::*, known::*, num::*)
import super::(option, fn::*, proc::*, loop::*, bool::*, known::*, num::*, tuple::*)
const pair := \a.\b. \f. f a b
-- Constructors
export const cons := \hd.\tl. option::some (pair hd tl)
export const cons := \hd.\tl. option::some t[hd, tl]
export const end := option::none
export const pop := \list.\default.\f.list default \cons.cons f
export const pop := \list.\default.\f. do{
cps tuple = list default;
cps head, tail = tuple;
f head tail
}
-- Operators
@@ -100,6 +104,25 @@ export const get := \list.\n. (
}
)
--[
Map every element to a pair of the index and the original element
]--
export const enumerate := \list. (
recursive r (list, n = 0)
pop list end \head.\tail.
cons t[n, head] $ r tail $ n + 1
)
--[
Turn a list of CPS commands into a sequence. This is achieved by calling every
element on the return value of the next element with the tail passed to it.
The continuation is passed to the very last argument.
]--
export const chain := \list.\cont. loop_over (list) {
cps head, list = pop list cont;
cps head;
}
macro new[...$item, ...$rest:1] =0x2p84=> (cons (...$item) new[...$rest])
macro new[...$end] =0x1p84=> (cons (...$end) end)
macro new[] =0x1p84=> end

View File

@@ -1,7 +1,7 @@
export operators[ + - * % / ]
macro ...$a + ...$b =0x2p36=> (add (...$a) (...$b))
macro ...$a - ...$b:1 =0x2p36=> (subtract (...$a) (...$b))
macro ...$a:1 - ...$b =0x2p36=> (subtract (...$a) (...$b))
macro ...$a * ...$b =0x1p36=> (multiply (...$a) (...$b))
macro ...$a % ...$b:1 =0x1p36=> (remainder (...$a) (...$b))
macro ...$a / ...$b:1 =0x1p36=> (divide (...$a) (...$b))
macro ...$a:1 % ...$b =0x1p36=> (remainder (...$a) (...$b))
macro ...$a:1 / ...$b =0x1p36=> (divide (...$a) (...$b))

View File

@@ -6,10 +6,13 @@ import std::bool::*
export ::([==], if, then, else, true, false)
import std::fn::*
export ::([$ |> =>], identity, pass, pass2, return)
import std::tuple::*
export ::(t)
import std::tuple
import std::list
import std::map
import std::option
export ::(list, map, option)
export ::(tuple, list, map, option)
import std::loop::*
export ::(loop_over, recursive)

View File

@@ -65,7 +65,7 @@ expr=x in
let mut graphs = s.as_str().graphemes(true);
let a = graphs.by_ref().take(i as usize).collect::<String>();
let b = graphs.collect::<String>();
Ok(tuple(vec![a.into(), b.into()]))
Ok(tuple([a.into(), b.into()]))
}
}

16
src/systems/stl/tuple.orc Normal file
View File

@@ -0,0 +1,16 @@
import super::(known::*, bool::*, num::*)
const discard_args := \n.\value. (
if n == 0 then value
else \_. discard_args (n - 1) value
)
export const pick := \tuple. \i.\n. tuple (
discard_args i \val. discard_args (n - 1 - i) val
)
macro t[...$item, ...$rest:1] =0x2p84=> (\f. t[...$rest] (f (...$item)))
macro t[...$end] =0x1p84=> (\f. f (...$end))
macro t[] =0x1p84=> \f.f
export ::(t)