From 88b6c8adf1f589b42178b3aedb1ab931abddca36 Mon Sep 17 00:00:00 2001 From: Axel PASCON Date: Sat, 19 Jul 2025 18:11:51 +0200 Subject: [PATCH 1/3] utility code and modifications --- eager2-core/Cargo.toml | 2 +- eager2-core/build.rs | 201 ++++++++++++++++++++++++++++++++ eager2-core/src/consts.rs | 21 ++++ eager2-core/src/lib.rs | 26 ++++- eager2-core/src/utils.rs | 237 +++++++++++++++++++++++++++++++++++++- eager2/build.rs | 201 ++++++++++++++++++++++++++++++++ 6 files changed, 685 insertions(+), 3 deletions(-) create mode 100644 eager2-core/build.rs create mode 100644 eager2/build.rs diff --git a/eager2-core/Cargo.toml b/eager2-core/Cargo.toml index ca7e7ab..f7987d0 100644 --- a/eager2-core/Cargo.toml +++ b/eager2-core/Cargo.toml @@ -19,4 +19,4 @@ proc-macro2 = { version = "1", optional = true } quote = { version = "1", optional = true } [features] -"testing" = ["proc-macro2", "quote"] +testing = ["dep:proc-macro2", "dep:quote"] \ No newline at end of file diff --git a/eager2-core/build.rs b/eager2-core/build.rs new file mode 100644 index 0000000..2333f5f --- /dev/null +++ b/eager2-core/build.rs @@ -0,0 +1,201 @@ +use std::{ + borrow::Cow, + collections::VecDeque, + env, + error::Error as StdError, + fmt::{self, Debug, Formatter, Write}, + ops::{Deref, DerefMut}, + process::{Command, ExitCode, Termination}, + rc::Rc, +}; + +#[repr(transparent)] +struct Error(Rc); + +impl From for Error { + fn from(value: T) -> Self { + Self(Rc::new(value)) + } +} + +impl Deref for Error { + type Target = Rc; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for Error { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl Debug for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_char('\n')?; + let mut errors = VecDeque::from([&*self.0]); + while let Some(src) = self.0.source() { + errors.push_front(src); + } + for (i, err) in errors.iter().enumerate() { + writeln!(f, "\t#{i}: {err}")?; + } + Ok(()) + } +} + +#[derive(Debug)] +struct ErrString(Cow<'static, str>); + +impl StdError for ErrString {} + +impl fmt::Display for ErrString { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str(self.0.to_string().as_str()) + } +} + +#[derive(PartialEq, Eq, PartialOrd, Ord)] +enum RustcChannel { + Stable, + Beta, + Nightly, +} + +impl fmt::Display for RustcChannel { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str(match self { + Self::Stable => "stable", + Self::Beta => "beta", + Self::Nightly => "nightly", + }) + } +} + +#[derive(PartialEq, Eq, PartialOrd, Ord)] +struct RustcVersion { + major: u32, + minor: u32, + patch: u32, + channel: RustcChannel, +} + +impl TryFrom<(&str, &str, &str, &str)> for RustcVersion { + type Error = Error; + fn try_from(value: (&str, &str, &str, &str)) -> Result { + Ok(Self { + major: value.0.parse()?, + minor: value.1.parse()?, + patch: value.2.parse()?, + channel: match value.3.to_lowercase().as_str() { + "stable" => Ok(RustcChannel::Stable), + "beta" => Ok(RustcChannel::Beta), + "nightly" => Ok(RustcChannel::Nightly), + s => Err(ErrString(format!("unknown rust channel: {s}").into())), + }?, + }) + } +} + +impl fmt::Display for RustcVersion { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!( + f, + "rustc {}.{}.{}-{}", + self.major, self.minor, self.patch, self.channel + ) + } +} + +fn main() -> Result { + let rustc = env::var("RUSTC")?; + let output = Command::new(rustc).arg("--version").output()?; + if !output.status.success() { + return Ok(ExitCode::from( + output + .status + .code() + .expect("could not retrieve status code") as u8, + )); + } + let stdout = output + .stdout + .trim_ascii_start() + .strip_prefix(b"rustc ") + .expect("rustc version output does not start with \"rustc\""); + + let next_sep = stdout + .iter() + .position(|byte| *byte == b'.') + .expect("could not retrieve rustc major version"); + let maj = &stdout[0..next_sep]; + let stdout = unsafe { + stdout + .strip_prefix(maj) + .unwrap_unchecked() + .strip_prefix(b".") + .unwrap_unchecked() + }; + + let next_sep = stdout + .iter() + .position(|byte| *byte == b'.') + .expect("could not retrieve rustc minor version"); + let min = &stdout[0..next_sep]; + let stdout = unsafe { + stdout + .strip_prefix(min) + .unwrap_unchecked() + .strip_prefix(b".") + .unwrap_unchecked() + }; + + let mut have_channel_info = false; + let next_sep = stdout + .iter() + .position(|byte| { + if *byte == b'-' { + have_channel_info = true; + } + byte.is_ascii_whitespace() || *byte == b'-' + }) + .expect("could not retrieve rustc major version"); + let patch = &stdout[0..next_sep]; + let stdout = unsafe { + stdout + .strip_prefix(patch) + .unwrap_unchecked() + .strip_prefix(if have_channel_info { + b"-".as_slice() + } else { + b"" + }) + .unwrap_unchecked() + .trim_ascii_start() + }; + + let channel = if have_channel_info { + stdout + .iter() + .take_while(|byte| !byte.is_ascii_whitespace()) + .copied() + .collect() + } else { + b"stable".to_vec() + }; + + let rustversion: RustcVersion = ( + str::from_utf8(maj)?, + str::from_utf8(min)?, + str::from_utf8(patch)?, + str::from_utf8(&channel)?, + ) + .try_into()?; + + println!("cargo::rustc-check-cfg=cfg(rustchan, values(\"stable\", \"beta\", \"nightly\"))"); + println!("cargo::rustc-cfg=rustchan=\"{}\"", rustversion.channel); + + Ok(ExitCode::SUCCESS) +} diff --git a/eager2-core/src/consts.rs b/eager2-core/src/consts.rs index 9dfe925..73d6720 100644 --- a/eager2-core/src/consts.rs +++ b/eager2-core/src/consts.rs @@ -8,15 +8,36 @@ pub const LAZY_SIGIL: &str = "๐“†‰"; pub const EAGER_SIGIL: &str = "๐“‚บ"; #[must_use] +#[inline(always)] pub fn get_eager_2_ident() -> Ident { Ident::new(EAGER2_IDENT, Span::call_site()) } #[must_use] +#[inline(always)] +pub(crate) fn get_eager_2_pm_ident(suffix: Option<&str>) -> Ident { + Ident::new( + format!("{EAGER2_IDENT}{s}", s = suffix.unwrap_or_default()).as_str(), + Span::call_site(), + ) +} + +#[must_use] +#[inline(always)] pub fn eager_call_sigil() -> Literal { Literal::from_str(EAGER_CALL_SIGIL).unwrap() } +#[must_use] +#[inline(always)] +pub(crate) fn eager_call_sigil_proc_macro() -> TokenStream { + use crate::interpolate; + + interpolate! { + ::proc_macro::Literal::from_str(#EAGER_CALL_SIGIL).unwrap() + } +} + #[must_use] pub fn crate_path() -> TokenStream { let mut tokens = TokenStream::new(); diff --git a/eager2-core/src/lib.rs b/eager2-core/src/lib.rs index eae5a6e..cec18f1 100644 --- a/eager2-core/src/lib.rs +++ b/eager2-core/src/lib.rs @@ -1,5 +1,7 @@ -//! This library is not meant for public consumption +#![cfg_attr(rustchan = "nightly", feature(proc_macro_span))] +#![recursion_limit = "1024"] +//! This library is not meant for public consumption #![doc(hidden)] use std::borrow::Cow; @@ -52,6 +54,28 @@ pub mod pm { tokens.extend([self.clone()]); } } + impl ToTokens for TokenStream { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.extend([self.clone()]); + } + } + impl ToTokens for &str { + fn to_tokens(&self, tokens: &mut TokenStream) { + Literal::string(self).to_tokens(tokens); + } + } + impl ToTokens for [T; N] { + fn to_tokens(&self, tokens: &mut TokenStream) { + for item in self { + item.to_tokens(tokens); + } + } + } + impl ToTokens for &T { + fn to_tokens(&self, tokens: &mut TokenStream) { + (*self).to_tokens(tokens); + } + } } #[cfg(feature = "testing")] diff --git a/eager2-core/src/utils.rs b/eager2-core/src/utils.rs index 04d5259..9e30265 100644 --- a/eager2-core/src/utils.rs +++ b/eager2-core/src/utils.rs @@ -1,4 +1,7 @@ -use crate::pm::{Delimiter, Group, Literal, ToTokens, TokenStream}; +use crate::{ + parse::IdentOrString, + pm::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, ToTokens, TokenStream}, +}; pub trait NextOr: Iterator { fn next_or(&mut self, e: E) -> Result; } @@ -29,6 +32,22 @@ where } } +#[must_use] +pub(crate) fn lifetime(name: IdentOrString) -> TokenStream { + let mut stream = TokenStream::new(); + Punct::new('\'', Spacing::Joint).to_tokens(&mut stream); + Ident::new( + match name { + IdentOrString::Ident(i) => i.to_string(), + IdentOrString::String(s) => s, + } + .as_str(), + Span::call_site(), + ) + .to_tokens(&mut stream); + stream +} + #[must_use] pub fn eager_data( eager_call_sigil: &Literal, @@ -41,3 +60,219 @@ pub fn eager_data( tokens.extend(tail); tokens } + +/// Roughly the same as `quote::quote!`, but faster, without repetitions, +/// and working directly with proc_macro. There might exist some valid Rust +/// code which could not be recognised by this macro, but it's enough for +/// our use case +#[macro_export] +macro_rules! interpolate { + {} => { + ::proc_macro::TokenStream::new() + }; + {$($tokens:tt)+} => {{ + let mut interpolated = $crate::interpolate! {}; + interpolated.extend( + $crate::interpolate_inner! { + @already_interpolated[::proc_macro::TokenStream::new()] + $($tokens)+ + } + ); + interpolated + }}; +} + +#[macro_export] +macro_rules! interpolate_inner { + {@already_interpolated[$($interpolated:tt)*]} => {[$($interpolated)*]}; + + // interpolated + {@already_interpolated[$($interpolated:tt)*] #$ident:ident $($rest:tt)*} => { + $crate::interpolate_inner! { + @already_interpolated[ + $($interpolated)*, + $crate::pm::ToTokens::to_token_stream( + &$ident + ) + ] + $($rest)* + } + }; + + // ident + {@already_interpolated[$($interpolated:tt)*] $ident:ident $($rest:tt)*} => { + $crate::interpolate_inner! { + @already_interpolated[ + $($interpolated)*, + $crate::pm::ToTokens::into_token_stream( + ::proc_macro::Ident::new( + stringify!($ident), + ::proc_macro::Span::call_site() + ) + ) + ] + $($rest)* + } + }; + {@already_interpolated[$($interpolated:tt)*] _ $($rest:tt)*} => { + $crate::interpolate_inner! { + @already_interpolated[ + $($interpolated)*, + $crate::pm::ToTokens::into_token_stream( + ::proc_macro::Ident::new( + "_", + ::proc_macro::Span::call_site() + ) + ) + ] + $($rest)* + } + }; + + // literal + {@already_interpolated[$($interpolated:tt)*] $lit:literal $($rest:tt)*} => { + $crate::interpolate_inner! { + @already_interpolated[ + $($interpolated)*, + { + use ::core::str::FromStr; + + $crate::pm::ToTokens::into_token_stream( + ::proc_macro::Literal::from_str( + stringify!($lit) + ).expect( + concat!( + "`interpolate!` could not create a literal with ", + stringify!($lit) + ) + ) + ) + } + ] + + $($rest)* + } + }; + + // group + {@already_interpolated[$($interpolated:tt)*] { $($tokens:tt)* } $($rest:tt)*} => { + $crate::interpolate_inner! { + @already_interpolated[ + $($interpolated)*, + $crate::pm::ToTokens::into_token_stream( + ::proc_macro::Group::new( + ::proc_macro::Delimiter::Brace, + $crate::interpolate! { $($tokens)* } + ) + ) + ] + $($rest)* + } + }; + {@already_interpolated[$($interpolated:tt)*] [ $($tokens:tt)* ] $($rest:tt)*} => { + $crate::interpolate_inner! { + @already_interpolated[ + $($interpolated)*, + $crate::pm::ToTokens::into_token_stream( + ::proc_macro::Group::new( + ::proc_macro::Delimiter::Bracket, + $crate::interpolate! { $($tokens)* } + ) + ) + ] + $($rest)* + } + }; + {@already_interpolated[$($interpolated:tt)*] ( $($tokens:tt)* ) $($rest:tt)*} => { + $crate::interpolate_inner! { + @already_interpolated[ + $($interpolated)*, + $crate::pm::ToTokens::into_token_stream( + ::proc_macro::Group::new( + ::proc_macro::Delimiter::Parenthesis, + $crate::interpolate! { $($tokens)* } + ) + ) + ] + $($rest)* + } + }; + + // special tokens + {@already_interpolated[$($interpolated:tt)*] $lifetime:lifetime $($rest:tt)*} => { + $crate::interpolate_inner! { + @already_interpolated[ + $($interpolated)*, + $crate::pm::ToTokens::into_token_stream( + $crate::utils::quote_lifetime( + stringify!($lifetime) + ) + ) + ] + $($rest)* + } + }; + + // punct (or related) + // this matches a [`Token`](https://doc.rust-lang.org/nightly/reference/tokens.html#grammar-Token), + // which, at this point must be a [`PUNCTUATION`](https://doc.rust-lang.org/nightly/reference/tokens.html#grammar-PUNCTUATION) + {@already_interpolated[$($interpolated:tt)*] $punct:tt $($rest:tt)*} => { + $crate::interpolate_inner! { + @already_interpolated[ + $($interpolated)*, + $crate::pm::ToTokens::into_token_stream( + $crate::utils::quote_punct( + stringify!($punct) + ) + ) + ] + $($rest)* + } + }; +} + +#[allow(dead_code)] +pub(crate) fn quote_lifetime(lifetime: &str) -> TokenStream { + let trimmed = lifetime.trim(); + debug_assert_eq!(trimmed.chars().next(), Some('\'')); + + let mut stream = TokenStream::new(); + + // SAFETY: ยซ ' ยป needs one byte to be encoded as UTF-8, + // so removing it still yields valid UTF-8 + let name = unsafe { str::from_utf8_unchecked(&lifetime.as_bytes()[1..]) }; + + Punct::new('\'', Spacing::Joint).to_tokens(&mut stream); + Ident::new(name, Span::call_site()).to_tokens(&mut stream); + + stream +} + +#[allow(dead_code)] +pub(crate) fn quote_punct(punct: &str) -> TokenStream { + let trimmed = punct.trim(); + debug_assert!(trimmed.chars().count() <= 3); + + let mut stream = TokenStream::new(); + let mut chars = trimmed.chars().peekable(); + + while let Some(ch) = chars.next() { + if chars.peek().is_some() { + Punct::new(ch, Spacing::Joint).to_tokens(&mut stream); + } else { + Punct::new(ch, Spacing::Alone).to_tokens(&mut stream); + } + } + + stream +} + +#[cfg(rustchan = "nightly")] +pub(crate) fn join_spans(s1: Span, s2: Span) -> Span { + s1.join(s2).unwrap_or(s1) +} + +#[cfg(not(rustchan = "nightly"))] +pub(crate) fn join_spans(s1: Span, _: Span) -> Span { + s1 +} diff --git a/eager2/build.rs b/eager2/build.rs new file mode 100644 index 0000000..2333f5f --- /dev/null +++ b/eager2/build.rs @@ -0,0 +1,201 @@ +use std::{ + borrow::Cow, + collections::VecDeque, + env, + error::Error as StdError, + fmt::{self, Debug, Formatter, Write}, + ops::{Deref, DerefMut}, + process::{Command, ExitCode, Termination}, + rc::Rc, +}; + +#[repr(transparent)] +struct Error(Rc); + +impl From for Error { + fn from(value: T) -> Self { + Self(Rc::new(value)) + } +} + +impl Deref for Error { + type Target = Rc; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for Error { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl Debug for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_char('\n')?; + let mut errors = VecDeque::from([&*self.0]); + while let Some(src) = self.0.source() { + errors.push_front(src); + } + for (i, err) in errors.iter().enumerate() { + writeln!(f, "\t#{i}: {err}")?; + } + Ok(()) + } +} + +#[derive(Debug)] +struct ErrString(Cow<'static, str>); + +impl StdError for ErrString {} + +impl fmt::Display for ErrString { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str(self.0.to_string().as_str()) + } +} + +#[derive(PartialEq, Eq, PartialOrd, Ord)] +enum RustcChannel { + Stable, + Beta, + Nightly, +} + +impl fmt::Display for RustcChannel { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str(match self { + Self::Stable => "stable", + Self::Beta => "beta", + Self::Nightly => "nightly", + }) + } +} + +#[derive(PartialEq, Eq, PartialOrd, Ord)] +struct RustcVersion { + major: u32, + minor: u32, + patch: u32, + channel: RustcChannel, +} + +impl TryFrom<(&str, &str, &str, &str)> for RustcVersion { + type Error = Error; + fn try_from(value: (&str, &str, &str, &str)) -> Result { + Ok(Self { + major: value.0.parse()?, + minor: value.1.parse()?, + patch: value.2.parse()?, + channel: match value.3.to_lowercase().as_str() { + "stable" => Ok(RustcChannel::Stable), + "beta" => Ok(RustcChannel::Beta), + "nightly" => Ok(RustcChannel::Nightly), + s => Err(ErrString(format!("unknown rust channel: {s}").into())), + }?, + }) + } +} + +impl fmt::Display for RustcVersion { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!( + f, + "rustc {}.{}.{}-{}", + self.major, self.minor, self.patch, self.channel + ) + } +} + +fn main() -> Result { + let rustc = env::var("RUSTC")?; + let output = Command::new(rustc).arg("--version").output()?; + if !output.status.success() { + return Ok(ExitCode::from( + output + .status + .code() + .expect("could not retrieve status code") as u8, + )); + } + let stdout = output + .stdout + .trim_ascii_start() + .strip_prefix(b"rustc ") + .expect("rustc version output does not start with \"rustc\""); + + let next_sep = stdout + .iter() + .position(|byte| *byte == b'.') + .expect("could not retrieve rustc major version"); + let maj = &stdout[0..next_sep]; + let stdout = unsafe { + stdout + .strip_prefix(maj) + .unwrap_unchecked() + .strip_prefix(b".") + .unwrap_unchecked() + }; + + let next_sep = stdout + .iter() + .position(|byte| *byte == b'.') + .expect("could not retrieve rustc minor version"); + let min = &stdout[0..next_sep]; + let stdout = unsafe { + stdout + .strip_prefix(min) + .unwrap_unchecked() + .strip_prefix(b".") + .unwrap_unchecked() + }; + + let mut have_channel_info = false; + let next_sep = stdout + .iter() + .position(|byte| { + if *byte == b'-' { + have_channel_info = true; + } + byte.is_ascii_whitespace() || *byte == b'-' + }) + .expect("could not retrieve rustc major version"); + let patch = &stdout[0..next_sep]; + let stdout = unsafe { + stdout + .strip_prefix(patch) + .unwrap_unchecked() + .strip_prefix(if have_channel_info { + b"-".as_slice() + } else { + b"" + }) + .unwrap_unchecked() + .trim_ascii_start() + }; + + let channel = if have_channel_info { + stdout + .iter() + .take_while(|byte| !byte.is_ascii_whitespace()) + .copied() + .collect() + } else { + b"stable".to_vec() + }; + + let rustversion: RustcVersion = ( + str::from_utf8(maj)?, + str::from_utf8(min)?, + str::from_utf8(patch)?, + str::from_utf8(&channel)?, + ) + .try_into()?; + + println!("cargo::rustc-check-cfg=cfg(rustchan, values(\"stable\", \"beta\", \"nightly\"))"); + println!("cargo::rustc-cfg=rustchan=\"{}\"", rustversion.channel); + + Ok(ExitCode::SUCCESS) +} From 7f86fe88dc4a2ba7aeee1fccc11d1d49f31863a4 Mon Sep 17 00:00:00 2001 From: Axel PASCON Date: Sat, 19 Jul 2025 18:26:34 +0200 Subject: [PATCH 2/3] rename `trace_macros` feature to `trace-macros` --- eager2/Cargo.toml | 2 +- eager2/src/impls.rs | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/eager2/Cargo.toml b/eager2/Cargo.toml index 06fa082..0b2a7ae 100644 --- a/eager2/Cargo.toml +++ b/eager2/Cargo.toml @@ -21,4 +21,4 @@ litrs = { version = "0.4", default-features = false } eager2-core = { workspace = true } [features] -trace_macros = [] +trace-macros = [] diff --git a/eager2/src/impls.rs b/eager2/src/impls.rs index 21c967a..b4a51ab 100644 --- a/eager2/src/impls.rs +++ b/eager2/src/impls.rs @@ -6,7 +6,7 @@ pub fn eval(stream: TokenStream, eager: bool) -> TokenStream { init(); let _name = if eager { "eager" } else { "lazy" }; - #[cfg(feature = "trace_macros")] + #[cfg(feature = "trace-macros")] println!("{} input: {}", _name, stream); let output = match eager2_core::funcs::mode(stream, eager) { @@ -14,7 +14,7 @@ pub fn eval(stream: TokenStream, eager: bool) -> TokenStream { Err(err) => return err.into_token_stream(), }; - #[cfg(feature = "trace_macros")] + #[cfg(feature = "trace-macros")] println!("{} output: {}", _name, output); output @@ -23,7 +23,7 @@ pub fn eval(stream: TokenStream, eager: bool) -> TokenStream { #[allow(clippy::needless_pass_by_value)] pub fn eager_wrap(stream: TokenStream, name: &str) -> TokenStream { init(); - #[cfg(feature = "trace_macros")] + #[cfg(feature = "trace-macros")] println!("{} input: {}", name, stream); let output = match eager2_core::funcs::eager_wrap(stream, name) { @@ -31,7 +31,7 @@ pub fn eager_wrap(stream: TokenStream, name: &str) -> TokenStream { Err(err) => return err.into_token_stream(), }; - #[cfg(feature = "trace_macros")] + #[cfg(feature = "trace-macros")] println!("{} output: {}", name, output); output From cf0827650caab34b10f7288a6c4ef1d512432729 Mon Sep 17 00:00:00 2001 From: Axel PASCON Date: Sat, 19 Jul 2025 18:31:18 +0200 Subject: [PATCH 3/3] processing code and tests --- Cargo.toml | 2 +- dummy/Cargo.toml | 20 ++ dummy/src/lib.rs | 10 + eager2-core/src/rules.rs | 533 ++++++++++++++++++++++++++++++++-- eager2/Cargo.toml | 3 + eager2/src/lib.rs | 13 +- eager2/src/rules.rs | 17 +- eager2/tests/decl_macro_v2.rs | 133 +++++++++ eager2/tests/proc_macro.rs | 34 +++ 9 files changed, 736 insertions(+), 29 deletions(-) create mode 100644 dummy/Cargo.toml create mode 100644 dummy/src/lib.rs create mode 100644 eager2/tests/decl_macro_v2.rs create mode 100644 eager2/tests/proc_macro.rs diff --git a/Cargo.toml b/Cargo.toml index c6bfde7..7427610 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [workspace] resolver = "2" -members = ["eager2", "eager2-core"] +members = ["eager2", "eager2-core", "dummy"] [workspace.dependencies] eager2-core = { path = "./eager2-core", version = "1.1.2" } diff --git a/dummy/Cargo.toml b/dummy/Cargo.toml new file mode 100644 index 0000000..993d859 --- /dev/null +++ b/dummy/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "dummy" +version = "1.1.2" +authors = ["Daniel Bloom"] +edition = "2021" +categories = ["development-tools"] +description = "Proc-macros for eager macro expansion" +documentation = "https://docs.rs/eager2" +license = "MIT" +repository = "https://github.com/Daniel-Aaron-Bloom/eager2" +readme = "../README.md" +rust-version = "1.88" + +[lib] +proc-macro = true +path = "src/lib.rs" + +[dependencies] +eager2 = { path = "../eager2" } +eager2-core = { workspace = true } diff --git a/dummy/src/lib.rs b/dummy/src/lib.rs new file mode 100644 index 0000000..26a70cd --- /dev/null +++ b/dummy/src/lib.rs @@ -0,0 +1,10 @@ +use eager2::eager_proc_macro; +use proc_macro::{Punct, Spacing, TokenStream, TokenTree}; + +#[eager_proc_macro] +pub fn add(input: TokenStream) -> TokenStream { + let mut res = TokenStream::new(); + res.extend([TokenTree::from(Punct::new('+', Spacing::Alone))]); + res.extend(input.into_iter().collect::>()); + res +} diff --git a/eager2-core/src/rules.rs b/eager2-core/src/rules.rs index 4c1f36a..9ec0230 100644 --- a/eager2-core/src/rules.rs +++ b/eager2-core/src/rules.rs @@ -1,16 +1,174 @@ -use std::iter::Peekable; +use std::{borrow::Cow, iter::Peekable}; use crate::{ - consts::{crate_path, eager_call_sigil, get_eager_2_ident}, - parse::{expect_group, expect_ident, expect_punct, Param}, + consts::{ + crate_path, eager_call_sigil, eager_call_sigil_proc_macro, get_eager_2_ident, + get_eager_2_pm_ident, + }, + interpolate, + parse::{expect_group, expect_ident, expect_punct, IdentOrString, Param}, pm::{ Delimiter, Group, Ident, Literal, Punct, Spacing, Span, ToTokens, TokenStream, TokenTree, }, state::State, - utils::{eager_data, NextOr}, + utils::{eager_data, join_spans, lifetime, NextOr}, Error, }; +#[derive(Clone)] +enum MacroKind { + /// `macro_rules! { ... }` + DeclMacro1, + /// `pub? macro ...` + DeclMacro2 { visibility: TokenStream }, +} + +impl MacroKind { + fn is_v1(&self) -> bool { + matches!(self, Self::DeclMacro1) + } + fn is_v2(&self) -> bool { + matches!(self, Self::DeclMacro2 { .. }) + } + + fn vis_mut(&mut self) -> Option<&mut TokenStream> { + match self { + Self::DeclMacro1 => None, + Self::DeclMacro2 { visibility } => Some(visibility), + } + } +} + +struct EagerProcMacro { + eager_sigil: TokenStream, + has_proc_macro_attr: bool, + attributes: TokenStream, + visibility: TokenStream, + name: Ident, + input: Ident, + input_type: TokenStream, + output_type: TokenStream, + body: Group, +} + +impl ToTokens for EagerProcMacro { + fn to_tokens(&self, tokens: &mut TokenStream) { + let EagerProcMacro { + input: user_input, + input_type: user_input_type, + output_type: user_output_type, + eager_sigil, + has_proc_macro_attr, + attributes, + visibility, + name, + body, + } = self; + + let mut maybe_proc_macro_attr = TokenStream::new(); + if !has_proc_macro_attr { + maybe_proc_macro_attr.extend(Punct::new('#', Spacing::Alone).into_token_stream()); + maybe_proc_macro_attr.extend( + Group::new( + Delimiter::Bracket, + Ident::new("proc_macro", Span::call_site()).into_token_stream(), + ) + .into_token_stream(), + ); + } + + let input = get_eager_2_pm_ident(Some("input")); + let user_lambda = get_eager_2_pm_ident(Some("user_lambda")); + let input_copy = get_eager_2_pm_ident(Some("input_copy")); + let iter = get_eager_2_pm_ident(Some("iter")); + let eager_checks = lifetime(IdentOrString::Ident(get_eager_2_pm_ident(Some( + "eager_checks", + )))); + let sigil = get_eager_2_pm_ident(Some("sigil")); + let tmp = get_eager_2_pm_ident(Some("tmp")); + let eager_group = get_eager_2_pm_ident(Some("eager_group")); + let res = get_eager_2_pm_ident(Some("res")); + let tt = get_eager_2_pm_ident(Some("tt")); + tokens.extend([interpolate! { + #maybe_proc_macro_attr + #attributes + #visibility fn #name (#input: ::proc_macro::TokenStream) -> ::proc_macro::TokenStream { + let mut #user_lambda = |#user_input: #user_input_type| -> #user_output_type #body; + let #input_copy = #input.clone(); + #eager_checks: { + use ::core::str::FromStr; + + let #sigil = #eager_sigil; + let mut #iter = #input.into_iter(); + let mut #eager_group = ::proc_macro::TokenStream::new(); + let mut #res = ::proc_macro::TokenStream::new(); + + match #iter.next() + { + Some(ref #tt @ ::proc_macro::TokenTree::Literal(ref #tmp)) if #tmp.to_string() == #sigil.to_string() => { + #eager_group.extend([#tt.clone()]); + }, + _ => break #eager_checks, + } + + match #iter.next() + { + Some(ref #tt @ ::proc_macro::TokenTree::Group(ref #tmp)) if #tmp.delimiter() == ::proc_macro::Delimiter::Bracket => { + #eager_group.extend([#tt.clone()]); + }, + _ => break #eager_checks, + } + + #eager_group.extend([ + Into::<::proc_macro::TokenStream>::into( + #user_lambda( + Into::<#user_input_type>::into( + #iter.collect::<::proc_macro::TokenStream>() + ) + ) + ) + ]); + + #res.extend([ + ::proc_macro::TokenTree::from( + ::proc_macro::Punct::new(':', ::proc_macro::Spacing::Joint) + ), + ::proc_macro::TokenTree::from( + ::proc_macro::Punct::new(':', ::proc_macro::Spacing::Joint) + ), + ::proc_macro::TokenTree::from( + ::proc_macro::Ident::new("eager2", ::proc_macro::Span::call_site()) + ), + ::proc_macro::TokenTree::from( + ::proc_macro::Punct::new(':', ::proc_macro::Spacing::Joint) + ), + ::proc_macro::TokenTree::from( + ::proc_macro::Punct::new(':', ::proc_macro::Spacing::Joint) + ), + ::proc_macro::TokenTree::from( + ::proc_macro::Ident::new("eager", ::proc_macro::Span::call_site()) + ), + ::proc_macro::TokenTree::from( + ::proc_macro::Punct::new('!', ::proc_macro::Spacing::Alone) + ), + ::proc_macro::TokenTree::from( + ::proc_macro::Group::new(::proc_macro::Delimiter::Brace, #eager_group) + ) + ]); + + return #res; + } + + Into::<::proc_macro::TokenStream>::into( + #user_lambda( + Into::<#user_input_type>::into(#input_copy) + ) + ) + } + }] as [TokenStream; 1]); + } +} + #[derive(Clone)] struct Rule { grammar: Group, @@ -95,10 +253,10 @@ impl ToTokens for Rule { Punct::new('=', Spacing::Joint).to_tokens(tokens); Punct::new('>', Spacing::Alone).to_tokens(tokens); self.expansion.to_tokens(tokens); - Punct::new(';', Spacing::Alone).to_tokens(tokens); } } pub struct Rules { + kind: MacroKind, metas: Vec, macro_name: Ident, eager: Vec, @@ -107,22 +265,254 @@ pub struct Rules { impl ToTokens for Rules { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.extend(self.metas.iter().cloned()); - Ident::new("macro_rules", Span::call_site()).to_tokens(tokens); - Punct::new('!', Spacing::Alone).to_tokens(tokens); - self.macro_name.to_tokens(tokens); + + let separator = match &self.kind { + MacroKind::DeclMacro1 => { + Ident::new("macro_rules", Span::call_site()).to_tokens(tokens); + Punct::new('!', Spacing::Alone).to_tokens(tokens); + self.macro_name.to_tokens(tokens); + Punct::new(';', Spacing::Alone) + } + MacroKind::DeclMacro2 { visibility } => { + tokens.extend(visibility.clone()); + Ident::new("macro", Span::call_site()).to_tokens(tokens); + self.macro_name.to_tokens(tokens); + Punct::new(',', Spacing::Alone) + } + }; let mut rules = TokenStream::new(); for rule in &self.eager { rule.to_tokens(&mut rules); + separator.to_tokens(&mut rules); } // Put the pure version after so eager is always tried first for rule in &self.pure { rule.to_tokens(&mut rules); + separator.to_tokens(&mut rules); } Group::new(Delimiter::Brace, rules).to_tokens(tokens); } } +fn expand_proc_macro( + span: Span, + eager_call_sigil: &TokenStream, + stream: &mut Peekable<&mut dyn Iterator>, +) -> Result { + macro_rules! err_eoi { + ($s:expr) => { + (get_helpers::().0)(span, $s) + }; + } + macro_rules! err_token { + ($tt:expr, $s:expr) => { + (get_helpers::().1)($tt, span, $s) + }; + } + macro_rules! err_token_custom { + ($tt:expr, $err:expr, $note:expr) => { + (get_helpers().2)(Some($tt), span, $err, $note) + }; + ($err:expr, $note:expr) => { + (get_helpers().2)(None, span, $err, $note) + }; + } + + let mut has_proc_macro_attr = false; + let mut attributes = vec![]; + let mut visibility = None; + let mut vis_span: Option = None; + loop { + match stream + .peek() + .ok_or_else(|| err_eoi!("start of proc-macro definition"))? + { + TokenTree::Punct(p) if p.as_char() == '#' => { + let pound = stream.next().unwrap(); + attributes.push(pound); + + let g = stream + .next() + .ok_or_else(|| err_eoi!("proc-macro's attribute"))?; + match g { + ref tt @ TokenTree::Group(ref gr) + if gr.stream().to_string().trim() == "proc_macro" => + { + if has_proc_macro_attr { + continue; + } + has_proc_macro_attr = true; + attributes.push(tt.clone()); + } + other => attributes.push(other), + } + } + tt @ TokenTree::Ident(i) if i.to_string() == "pub" => { + if visibility.is_some() { + return Err(err_token_custom!( + tt.clone(), + "unexpected visibility specifier", + format!( + "one was previously found (at {line}:{col}:{file}): {tt}", + line = unsafe { vis_span.unwrap_unchecked().line() }, + col = unsafe { vis_span.unwrap_unchecked().column() }, + file = unsafe { vis_span.unwrap_unchecked().file() } + ) + )); + } + visibility = Some(TokenStream::new()); + let base_spec; + let ext_spec; + + unsafe { + base_spec = stream.next().unwrap_unchecked(); + vis_span = Some(base_spec.span()); + visibility.as_mut().unwrap_unchecked().extend([base_spec]); + } + + match stream.peek() { + Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Parenthesis => unsafe { + ext_spec = stream.next().unwrap_unchecked(); + vis_span = Some(join_spans( + *vis_span.as_ref().unwrap_unchecked(), + ext_spec.span(), + )); + visibility.as_mut().unwrap_unchecked().extend([ext_spec]); + }, + _ => continue, + } + } + TokenTree::Ident(i) if i.to_string() == "fn" => { + let _ = stream.next(); + break; + } + other => { + return Err(err_token_custom!( + other.clone(), + format!("unknown token {other}"), + "while parsing proc-macro signature" + )); + } + } + } + + let name = stream + .next() + .ok_or_else(|| err_eoi!("name of proc-macro")) + .and_then(|tt| match tt { + TokenTree::Ident(i) => Ok(i), + other => Err(err_token!(other, "name of proc-macro")), + })?; + + let mut args = stream + .next() + .ok_or_else(|| err_eoi!("proc-macro arguments")) + .and_then(|tt| match tt { + TokenTree::Group(g) if g.delimiter() == Delimiter::Parenthesis => Ok(g.stream()), + other => Err(err_token!(other, "proc-macro arguments")), + })? + .into_iter(); + let input = args + .next() + .ok_or_else(|| err_eoi!("name of proc-macro input variable")) + .and_then(|tt| match tt { + TokenTree::Ident(i) => Ok(i), + other => Err(err_token!(other, "name of proc-macro input variable")), + })?; + args.next() + .ok_or_else(|| err_eoi!("token `:`")) + .and_then(|tt| match tt { + TokenTree::Punct(p) if p.as_char() == ':' => Ok(()), + other => Err(err_token!(other, "token `:`")), + })?; + let input_type = args.collect(); + + stream + .next() + .ok_or_else(|| err_eoi!("token `-`")) + .and_then(|tt| match tt { + TokenTree::Punct(p) if p.as_char() == '-' => Ok(()), + other => Err(err_token!(other, "token `-`")), + })?; + stream + .next() + .ok_or_else(|| err_eoi!("token `>`")) + .and_then(|tt| match tt { + TokenTree::Punct(p) if p.as_char() == '>' => Ok(()), + other => Err(err_token!(other, "token `>`")), + })?; + + let mut output_type = TokenStream::new(); + while stream + .peek() + .ok_or_else(|| err_eoi!("proc-macro output type")) + .map(|tt| !matches!(tt, TokenTree::Group(g) if g.delimiter() == Delimiter::Brace))? + { + output_type.extend([unsafe { stream.next().unwrap_unchecked() }]); + } + + let body = stream + .next() + .ok_or_else(|| err_eoi!("proc-macro body")) + .and_then(|tt| match tt { + TokenTree::Group(g) if g.delimiter() == Delimiter::Brace => Ok(g), + other => Err(err_token!(other, "proc-macro body")), + })?; + + Ok(EagerProcMacro { + has_proc_macro_attr, + eager_sigil: eager_call_sigil.clone(), + attributes: attributes.into_iter().collect(), + visibility: visibility.unwrap_or_default(), + name, + input, + input_type, + output_type, + body, + }) +} + +#[allow(clippy::type_complexity)] +fn get_helpers>, S2: Into>>() -> ( + impl for<'s> Fn(Span, &'s str) -> Error, + impl for<'s> Fn(TokenTree, Span, &'s str) -> Error, + impl Fn(Option, Span, S1, S2) -> Error, +) { + ( + |span: Span, s: &'_ str| -> Error { + Error { + span, + msg: "unexpected end of input".into(), + note: Some(crate::Note { + span: None, + msg: ("while trying to match ".to_string() + s).into(), + }), + } + }, + |tt: TokenTree, span: Span, s: &'_ str| -> Error { + Error { + span: tt.span(), + msg: format!("unexpected token {tt}").into(), + note: Some(crate::Note { + span: Some(span), + msg: ("while trying to match ".to_string() + s).into(), + }), + } + }, + |tt: Option, span: Span, err: S1, note: S2| -> Error { + Error { + span: tt.map_or(span, |tree| tree.span()), + msg: err.into(), + note: Some(crate::Note { + span: Some(span), + msg: note.into(), + }), + } + }, + ) +} + pub fn expand_rules( span: Span, hidden_ident: &Ident, @@ -131,15 +521,16 @@ pub fn expand_rules( stream: &mut Peekable<&mut dyn Iterator>, ) -> Result { let mut metas = vec![]; + let mut kind = MacroKind::DeclMacro1; loop { match stream.peek() { None => { return Err(Error { span, - msg: "unexpected end of macro invocation".into(), + msg: "unexpected end of input".into(), note: Some(crate::Note { span: None, - msg: "while trying to match token `#` or ident `macro_rules`".into(), + msg: "while trying to match start of macro definition".into(), }), }) } @@ -150,20 +541,111 @@ pub fn expand_rules( let g = expect_group(stream.next_or(span), Delimiter::Bracket)?; metas.push(g.into()); } - Some(TokenTree::Ident(i)) if i.to_string() == "macro_rules" => break, - Some(t) => { - return Err(Error { - span: t.span(), - msg: "expected token `#` or or ident `macro_rules`".into(), - note: None, - }) + Some(TokenTree::Ident(i)) if i.to_string() == "macro_rules" => match &kind { + MacroKind::DeclMacro1 => break, + MacroKind::DeclMacro2 { visibility } => { + return Err(Error { + span: i.span(), + msg: "expected ident `macro`".into(), + note: Some(crate::Note { + span: visibility.clone().into_iter().next().map(|tt| tt.span()), + msg: "previously matched a visibility specifier".into(), + }), + }) + } + }, + Some(TokenTree::Ident(i)) if i.to_string() == "macro" => { + if kind.is_v1() { + kind = MacroKind::DeclMacro2 { + visibility: TokenStream::new(), + }; + } + break; + } + Some(TokenTree::Ident(i)) if i.to_string() == "pub" => { + kind = MacroKind::DeclMacro2 { + visibility: TokenStream::new(), + }; + let visibility = kind.vis_mut().unwrap(); + + let p = stream.next().unwrap(); + visibility.extend([p].iter().cloned()); + + match stream.peek() { + Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Parenthesis => { + // TODO: verify that we got valid tokens, e.g. `crate`, `super` or `in ` + let vis_cont = stream.next().unwrap(); + visibility.extend([vis_cont].iter().cloned()); + } + Some(TokenTree::Ident(i)) if i.to_string() == "macro" => { + continue; + } + Some(tt) => { + return Err(Error { + span: tt.span(), + msg: + "expected visibility specifier (e.g. `pub(crate)`) or ident `macro`" + .into(), + note: None, + }); + } + None => { + return Err(Error { + span, + msg: "unexpected end of input".into(), + note: Some(crate::Note { + span: None, + msg: "while trying to match start of macro definition".into(), + }), + }) + } + }; } + Some(t) => return Err(Error { + span: t.span(), + msg: + "expected token `#`, ident `macro_rules`, visibility specifier or ident `macro`" + .into(), + note: None, + }), } } - let _macro_rules = stream.next().unwrap(); - expect_punct(stream.next_or(span), '!')?; + let _macro_rules_or_macro = stream.next().unwrap(); + if kind.is_v1() { + expect_punct(stream.next_or(span), '!')?; + } let macro_name = expect_ident(stream.next_or(span), Param::Named("macro_name"))?; + let delim; + if kind.is_v2() { + delim = ','; + if stream.peek().is_some_and( + |tt| matches!(tt, TokenTree::Group(g) if g.delimiter() == Delimiter::Parenthesis), + ) { + let grammar = expect_group( + stream.next().ok_or_else(|| unreachable!()), + Param::Named("grammar"), + )?; + + let expansion = expect_group(stream.next_or(span), Param::Named("expansion"))?; + + let rule = Rule { grammar, expansion }; + + return Ok(Rules { + kind, + metas, + macro_name, + eager: [rule + .clone() + .make_eager(crate_path, eager_call_sigil, hidden_ident)] + .to_vec(), + pure: [rule].to_vec(), + }); + } + } else { + delim = ';'; + } + let group = expect_group(stream.next_or(span), Delimiter::Brace)?; let mut rules = vec![]; @@ -182,7 +664,7 @@ pub fn expand_rules( rules.push(Rule { grammar, expansion }); let Some(tt) = stream.next() else { break }; - expect_punct(Ok(tt), ';')?; + expect_punct(Ok(tt), delim)?; } let eager_rules = rules @@ -193,6 +675,7 @@ pub fn expand_rules( let pure_rules = rules; Ok(Rules { + kind, metas, macro_name, eager: eager_rules, @@ -287,3 +770,13 @@ pub fn eager_macro(attr: TokenStream, stream: TokenStream) -> Result Result { + let eager_call_sigil = eager_call_sigil_proc_macro(); + let span = Span::call_site(); + + let stream: &mut dyn Iterator = &mut stream.into_iter(); + + expand_proc_macro(span, &eager_call_sigil, &mut stream.peekable()) + .map(ToTokens::into_token_stream) +} diff --git a/eager2/Cargo.toml b/eager2/Cargo.toml index 0b2a7ae..b0561c5 100644 --- a/eager2/Cargo.toml +++ b/eager2/Cargo.toml @@ -20,5 +20,8 @@ convert_case = "0.8.0" litrs = { version = "0.4", default-features = false } eager2-core = { workspace = true } +[dev-dependencies] +dummy = { path = "../dummy" } + [features] trace-macros = [] diff --git a/eager2/src/lib.rs b/eager2/src/lib.rs index 4f2d42f..6c589d2 100644 --- a/eager2/src/lib.rs +++ b/eager2/src/lib.rs @@ -323,6 +323,13 @@ pub fn eager_macro(attr: TokenStream, stream: TokenStream) -> TokenStream { rules::eager_macro(attr.into(), stream.into()).into() } +/// Declares an [eager!](macro.eager.html)-enabled proc-macro. +#[proc_macro_attribute] +pub fn eager_proc_macro(attr: TokenStream, stream: TokenStream) -> TokenStream { + #[allow(clippy::useless_conversion)] + rules::eager_proc_macro(attr.into(), stream.into()).into() +} + /// [[eager!](macro.eager.html)] Emulates eager expansion of macros. /// /// # Examples @@ -592,8 +599,6 @@ pub fn cfg(stream: TokenStream) -> TokenStream { impls::eager_wrap(stream.into(), "cfg").into() } -/// ๐Ÿšง Not yet implemented! -/// /// [[eager!](macro.eager.html)] Expands to the column number at which it was invoked. /// /// With [`line!`] and [`file!`], these macros provide debugging information for @@ -662,8 +667,6 @@ pub fn option_env(stream: TokenStream) -> TokenStream { impls::eager_wrap(stream.into(), "option_env").into() } -/// ๐Ÿšง Not yet implemented! -/// /// [[eager!](macro.eager.html)] Expands to the file name in which it was invoked. /// /// With [`line!`] and [`column!`], these macros provide debugging information for @@ -727,8 +730,6 @@ pub fn include_str(stream: TokenStream) -> TokenStream { impls::eager_wrap(stream.into(), "include_str").into() } -/// ๐Ÿšง Not yet implemented! -/// /// [[eager!](macro.eager.html)] Expands to the line number on which it was invoked. /// /// With [`column!`] and [`file!`], these macros provide debugging information for diff --git a/eager2/src/rules.rs b/eager2/src/rules.rs index 6ad9c25..629ed93 100644 --- a/eager2/src/rules.rs +++ b/eager2/src/rules.rs @@ -9,7 +9,7 @@ pub fn eager_macro_rules(stream: TokenStream) -> TokenStream { Err(err) => return err.into_token_stream(), }; - #[cfg(feature = "trace_macros")] + #[cfg(feature = "trace-macros")] println!("eager_macro_rules output: {}", output); output @@ -22,8 +22,21 @@ pub fn eager_macro(attr: TokenStream, stream: TokenStream) -> TokenStream { Err(err) => return err.into_token_stream(), }; - #[cfg(feature = "trace_macros")] + #[cfg(feature = "trace-macros")] println!("eager_macro output: {}", output); output } + +pub fn eager_proc_macro(attr: TokenStream, stream: TokenStream) -> TokenStream { + init(); + let output = match eager2_core::rules::eager_proc_macro(attr, stream) { + Ok(output) => output, + Err(err) => return err.into_token_stream(), + }; + + #[cfg(feature = "trace-macros")] + println!("eager_proc_macro output: {}", output); + + output +} diff --git a/eager2/tests/decl_macro_v2.rs b/eager2/tests/decl_macro_v2.rs new file mode 100644 index 0000000..edf1540 --- /dev/null +++ b/eager2/tests/decl_macro_v2.rs @@ -0,0 +1,133 @@ +#![cfg_attr(rustchan = "nightly", feature(decl_macro))] + +#[cfg(all(rustchan = "nightly", test))] +mod does_it_compile { + use eager2::{eager, eager_macro}; + + #[eager_macro] + macro test_macro($($tokens:tt)*) { + $($tokens)* + } + + eager! { + test_macro!( + fn return_input(input: usize) -> usize { + input + } + ) + } + + #[test] + fn it_does() { + assert_eq!(return_input(4), 4); + } +} + +#[cfg(test)] +mod visibility { + use eager2::eager; + + mod dummy { + use eager2::eager_macro; + + #[eager_macro] + pub(super) macro test_macro1($($tokens:tt)*) { + $($tokens)* + } + + #[eager_macro] + pub(crate) macro test_macro2($($tokens:tt)*) { + $($tokens)* + } + + #[eager_macro] + pub(in crate::visibility) macro test_macro3($($tokens:tt)*) { + $($tokens)* + } + } + + use dummy::{test_macro1, test_macro2, test_macro3}; + + eager! { + test_macro1!( + fn return_input1(input: usize) -> usize { + input + } + ) + + test_macro2!( + fn return_input2(input: usize) -> usize { + input + } + ) + + test_macro3!( + fn return_input3(input: usize) -> usize { + input + } + ) + } + + #[test] + fn it_works() { + assert_eq!(return_input1(1), 1); + assert_eq!(return_input2(2), 2); + assert_eq!(return_input3(3), 3); + } +} + +#[cfg(test)] +mod multiple_rules { + use eager2::{eager, eager_macro}; + + #[eager_macro] + macro make_array_impl + { + (0) => { }, + (1) => { 0 }, + (2) => { 0, make_array_impl!(1) }, + (3) => { 0, make_array_impl!(2) }, + (4) => { 0, make_array_impl!(3) }, + (5) => { 0, make_array_impl!(4) }, + (6) => { 0, make_array_impl!(5) }, + (7) => { 0, make_array_impl!(6) }, + (8) => { 0, make_array_impl!(7) }, + } + + #[eager_macro] + macro make_array($size:tt) { + [make_array_impl!($size)] + } + + #[eager_macro] + macro array_size + { + ([]) => { 0 }, + ([$first:expr $(, $others:expr)* $(,)?]) => { + 1 + array_size!([$($others,)*]) + } + } + + #[test] + fn array_size_normal() { + assert_eq!(array_size!([]), 0); + assert_eq!(array_size!([0]), 1); + assert_eq!(array_size!([0, 0]), 2); + assert_eq!(array_size!([0, 0, 0]), 3); + assert_eq!(array_size!([0, 0, 0, 0]), 4); + assert_eq!(array_size!([0, 0, 0, 0, 0]), 5); + } + + #[test] + fn array_size_eager() { + assert_eq!(eager! { array_size!(make_array!(0)) }, 0); + assert_eq!(eager! { array_size!(make_array!(1)) }, 1); + assert_eq!(eager! { array_size!(make_array!(2)) }, 2); + assert_eq!(eager! { array_size!(make_array!(3)) }, 3); + assert_eq!(eager! { array_size!(make_array!(4)) }, 4); + assert_eq!(eager! { array_size!(make_array!(5)) }, 5); + assert_eq!(eager! { array_size!(make_array!(6)) }, 6); + assert_eq!(eager! { array_size!(make_array!(7)) }, 7); + assert_eq!(eager! { array_size!(make_array!(8)) }, 8); + } +} diff --git a/eager2/tests/proc_macro.rs b/eager2/tests/proc_macro.rs new file mode 100644 index 0000000..87ceec3 --- /dev/null +++ b/eager2/tests/proc_macro.rs @@ -0,0 +1,34 @@ +#[cfg(test)] +mod test { + use dummy::add; + use eager2::eager; + + #[test] + fn it_works() { + assert_eq!( + eager! { + 0 add!(1) + }, + 1 + ); + assert_eq!( + eager! { + 0 add!(1) add!(1) add!(1) + }, + 3 + ); + + assert_eq!( + eager! { + 0 add!(2) + }, + 2 + ); + assert_eq!( + eager! { + 0 add!(2) add!(2) add!(2) + }, + 6 + ); + } +}