✨ [macros] Añade macros relevantes para PageTop
El crate pagetop-macros proporciona una colección de macros que impulsan el desarrollo con PageTop.
This commit is contained in:
parent
2f3424b1c6
commit
87aac362d3
14 changed files with 1991 additions and 6 deletions
25
helpers/pagetop-macros/Cargo.toml
Normal file
25
helpers/pagetop-macros/Cargo.toml
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
[package]
|
||||
name = "pagetop-macros"
|
||||
version = "0.0.14"
|
||||
edition = "2021"
|
||||
|
||||
description = """\
|
||||
Una colección de macros que impulsan el desarrollo con PageTop.\
|
||||
"""
|
||||
categories = ["development-tools::procedural-macro-helpers", "web-programming"]
|
||||
keywords = ["pagetop", "macros", "proc-macros", "codegen"]
|
||||
|
||||
repository.workspace = true
|
||||
homepage.workspace = true
|
||||
license.workspace = true
|
||||
authors.workspace = true
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
proc-macro2 = "1.0.92"
|
||||
proc-macro-crate = "3.2.0"
|
||||
proc-macro-error = "1.0.4"
|
||||
quote = "1.0.37"
|
||||
syn = { version = "2.0.90", features = ["full"] }
|
||||
50
helpers/pagetop-macros/README.md
Normal file
50
helpers/pagetop-macros/README.md
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
<div align="center">
|
||||
|
||||
<h1>PageTop Macros</h1>
|
||||
|
||||
<p>Una colección de macros que impulsan el desarrollo con <strong>PageTop</strong>.</p>
|
||||
|
||||
[](#-license)
|
||||
[](https://docs.rs/pagetop-macros)
|
||||
[](https://crates.io/crates/pagetop-macros)
|
||||
[](https://crates.io/crates/pagetop-macros)
|
||||
|
||||
</div>
|
||||
|
||||
# 📦 Sobre PageTop
|
||||
|
||||
[PageTop](https://docs.rs/pagetop) es un entorno de desarrollo con convenciones que reivindican la
|
||||
web clásica aplicando *renderizado en el servidor* (SSR), HTML, CSS y JS.
|
||||
|
||||
|
||||
# 🚧 Advertencia
|
||||
|
||||
**PageTop** es un proyecto personal que se encuentra en desarrollo activo. Actualmente su API es
|
||||
inestable y está sujeta a cambios frecuentes. No se recomienda su uso en producción, al menos hasta
|
||||
que alcance la versión **0.1.0**.
|
||||
|
||||
|
||||
# 🔖 Créditos
|
||||
|
||||
Este *crate* incluye una versión adaptada de [maud-macros](https://crates.io/crates/maud_macros)
|
||||
(versión [0.25.0](https://github.com/lambda-fairy/maud/tree/v0.25.0/maud_macros)) creada por
|
||||
[Chris Wong](https://crates.io/users/lambda-fairy). También se ha integrado el *crate*
|
||||
[SmartDefault](https://crates.io/crates/smart_default) (versión 0.7.1), desarrollado por
|
||||
[Jane Doe](https://crates.io/users/jane-doe), como `AutoDefault` para ampliar el uso de `Default`.
|
||||
|
||||
Ambos eliminan la necesidad de referenciar explícitamente `maud` o `smart_default` en el archivo
|
||||
`Cargo.toml` de cada proyecto.
|
||||
|
||||
|
||||
# 📜 Licencia
|
||||
|
||||
El código está disponible bajo una doble licencia:
|
||||
|
||||
* **Licencia MIT**
|
||||
([LICENSE-MIT](LICENSE-MIT) or https://opensource.org/licenses/MIT)
|
||||
|
||||
* **Licencia Apache, Versión 2.0**
|
||||
([LICENSE-APACHE](LICENSE-APACHE) or https://www.apache.org/licenses/LICENSE-2.0)
|
||||
|
||||
Puedes elegir la licencia que prefieras. Este enfoque de doble licencia es el estándar de facto en
|
||||
el ecosistema Rust.
|
||||
198
helpers/pagetop-macros/src/lib.rs
Normal file
198
helpers/pagetop-macros/src/lib.rs
Normal file
|
|
@ -0,0 +1,198 @@
|
|||
//! Una colección de macros que impulsan el desarrollo con **PageTop**.
|
||||
|
||||
mod maud;
|
||||
mod smart_default;
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro_error::proc_macro_error;
|
||||
use quote::{quote, quote_spanned, ToTokens};
|
||||
use syn::{parse_macro_input, parse_str, DeriveInput, ItemFn};
|
||||
|
||||
/// Macro (*attribute*) que asocia a un método `set_` su correspondiente método `with_` para aplicar
|
||||
/// el patrón *builder*.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Esta función provocará un *panic* si no encuentra identificadores en la lista de argumentos.
|
||||
///
|
||||
/// # Ejemplos
|
||||
///
|
||||
/// ```
|
||||
/// #[fn_builder]
|
||||
/// pub fn set_example(&mut self) -> &mut Self {
|
||||
/// // implementación
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Añadirá al código el siguiente método:
|
||||
///
|
||||
/// ```
|
||||
/// #[inline]
|
||||
/// pub fn with_example(mut self) -> Self {
|
||||
/// self.set_example();
|
||||
/// self
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_attribute]
|
||||
pub fn fn_builder(_: TokenStream, item: TokenStream) -> TokenStream {
|
||||
let fn_set = parse_macro_input!(item as ItemFn);
|
||||
let fn_set_name = fn_set.sig.ident.to_string();
|
||||
|
||||
if !fn_set_name.starts_with("set_") {
|
||||
let expanded = quote_spanned! {
|
||||
fn_set.sig.ident.span() =>
|
||||
compile_error!("expected a \"pub fn set_...() -> &mut Self\" method");
|
||||
};
|
||||
return expanded.into();
|
||||
}
|
||||
|
||||
let fn_with_name = fn_set_name.replace("set_", "with_");
|
||||
let fn_with_generics = if fn_set.sig.generics.params.is_empty() {
|
||||
fn_with_name.clone()
|
||||
} else {
|
||||
let g = &fn_set.sig.generics;
|
||||
format!("{fn_with_name}{}", quote! { #g }.to_string())
|
||||
};
|
||||
|
||||
let where_clause = fn_set
|
||||
.sig
|
||||
.generics
|
||||
.where_clause
|
||||
.as_ref()
|
||||
.map_or(String::new(), |where_clause| {
|
||||
format!("{} ", quote! { #where_clause }.to_string())
|
||||
});
|
||||
|
||||
let args: Vec<String> = fn_set
|
||||
.sig
|
||||
.inputs
|
||||
.iter()
|
||||
.skip(1)
|
||||
.map(|arg| arg.to_token_stream().to_string())
|
||||
.collect();
|
||||
|
||||
let params: Vec<String> = args
|
||||
.iter()
|
||||
.map(|arg| {
|
||||
arg.split_whitespace()
|
||||
.next()
|
||||
.unwrap()
|
||||
.trim_end_matches(':')
|
||||
.to_string()
|
||||
})
|
||||
.collect();
|
||||
|
||||
#[rustfmt::skip]
|
||||
let fn_with = parse_str::<ItemFn>(format!(r##"
|
||||
pub fn {fn_with_generics}(mut self, {}) -> Self {where_clause} {{
|
||||
self.{fn_set_name}({});
|
||||
self
|
||||
}}
|
||||
"##, args.join(", "), params.join(", ")
|
||||
).as_str()).unwrap();
|
||||
|
||||
#[rustfmt::skip]
|
||||
let fn_set_doc = format!(r##"
|
||||
<p id="method.{fn_with_name}" style="margin-bottom: 12px;">Use
|
||||
<code class="code-header">pub fn <span class="fn" href="#method.{fn_with_name}">{fn_with_name}</span>(self, …) -> Self</code>
|
||||
for the <a href="#method.new">builder pattern</a>.
|
||||
</p>
|
||||
"##);
|
||||
|
||||
let expanded = quote! {
|
||||
#[doc(hidden)]
|
||||
#fn_with
|
||||
#[inline]
|
||||
#[doc = #fn_set_doc]
|
||||
#fn_set
|
||||
};
|
||||
expanded.into()
|
||||
}
|
||||
|
||||
#[proc_macro]
|
||||
#[proc_macro_error]
|
||||
pub fn html(input: TokenStream) -> TokenStream {
|
||||
maud::expand(input.into()).into()
|
||||
}
|
||||
|
||||
#[proc_macro_derive(AutoDefault, attributes(default))]
|
||||
pub fn derive_auto_default(input: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
match smart_default::body_impl::impl_my_derive(&input) {
|
||||
Ok(output) => output.into(),
|
||||
Err(error) => error.to_compile_error().into(),
|
||||
}
|
||||
}
|
||||
|
||||
#[proc_macro_derive(ComponentClasses)]
|
||||
pub fn derive_component_classes(input: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
let name = &input.ident;
|
||||
|
||||
#[rustfmt::skip]
|
||||
let fn_set_doc = format!(r##"
|
||||
<p id="method.with_classes">Use
|
||||
<code class="code-header"><span class="fn" href="#method.with_classes">with_classes</span>(self, …) -> Self</code>
|
||||
to apply the <a href="#method.new">builder pattern</a>.
|
||||
</p>
|
||||
"##);
|
||||
|
||||
let expanded = quote! {
|
||||
impl ComponentClasses for #name {
|
||||
#[inline]
|
||||
#[doc = #fn_set_doc]
|
||||
fn set_classes(&mut self, op: ClassesOp, classes: impl Into<String>) -> &mut Self {
|
||||
self.classes.set_value(op, classes);
|
||||
self
|
||||
}
|
||||
|
||||
fn classes(&self) -> &OptionClasses {
|
||||
&self.classes
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
TokenStream::from(expanded)
|
||||
}
|
||||
|
||||
/// Define una función `main` asíncrona como punto de entrada de `PageTop`.
|
||||
///
|
||||
/// # Ejemplos
|
||||
///
|
||||
/// ```
|
||||
/// #[pagetop::main]
|
||||
/// async fn main() {
|
||||
/// async { println!("Hello world!"); }.await
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_attribute]
|
||||
pub fn main(_: TokenStream, item: TokenStream) -> TokenStream {
|
||||
let mut output: TokenStream = (quote! {
|
||||
#[::pagetop::service::rt::main(system = "::pagetop::service::rt::System")]
|
||||
})
|
||||
.into();
|
||||
|
||||
output.extend(item);
|
||||
output
|
||||
}
|
||||
|
||||
/// Define funciones de prueba asíncronas para usar con `PageTop`.
|
||||
///
|
||||
/// # Ejemplos
|
||||
///
|
||||
/// ```
|
||||
/// #[pagetop::test]
|
||||
/// async fn test() {
|
||||
/// assert_eq!(async { "Hello world" }.await, "Hello world");
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_attribute]
|
||||
pub fn test(_: TokenStream, item: TokenStream) -> TokenStream {
|
||||
let mut output: TokenStream = (quote! {
|
||||
#[::pagetop::service::rt::test(system = "::pagetop::service::rt::System")]
|
||||
})
|
||||
.into();
|
||||
|
||||
output.extend(item);
|
||||
output
|
||||
}
|
||||
39
helpers/pagetop-macros/src/maud.rs
Normal file
39
helpers/pagetop-macros/src/maud.rs
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
// #![doc(html_root_url = "https://docs.rs/maud_macros/0.25.0")]
|
||||
// TokenStream values are reference counted, and the mental overhead of tracking
|
||||
// lifetimes outweighs the marginal gains from explicit borrowing
|
||||
// #![allow(clippy::needless_pass_by_value)]
|
||||
|
||||
mod ast;
|
||||
mod escape;
|
||||
mod generate;
|
||||
mod parse;
|
||||
|
||||
use proc_macro2::{Ident, Span, TokenStream, TokenTree};
|
||||
use proc_macro_crate::{crate_name, FoundCrate};
|
||||
use quote::quote;
|
||||
|
||||
pub fn expand(input: TokenStream) -> TokenStream {
|
||||
let output_ident = TokenTree::Ident(Ident::new("__maud_output", Span::mixed_site()));
|
||||
// Heuristic: the size of the resulting markup tends to correlate with the
|
||||
// code size of the template itself
|
||||
let size_hint = input.to_string().len();
|
||||
let markups = parse::parse(input);
|
||||
let stmts = generate::generate(markups, output_ident.clone());
|
||||
|
||||
let found_crate = crate_name("pagetop").expect("pagetop is present in `Cargo.toml`");
|
||||
let pre_escaped = match found_crate {
|
||||
FoundCrate::Itself => quote!(
|
||||
crate::html::PreEscaped(#output_ident)
|
||||
),
|
||||
_ => quote!(
|
||||
pagetop::html::PreEscaped(#output_ident)
|
||||
),
|
||||
};
|
||||
|
||||
quote!({
|
||||
extern crate alloc;
|
||||
let mut #output_ident = alloc::string::String::with_capacity(#size_hint);
|
||||
#stmts
|
||||
#pre_escaped
|
||||
})
|
||||
}
|
||||
221
helpers/pagetop-macros/src/maud/ast.rs
Normal file
221
helpers/pagetop-macros/src/maud/ast.rs
Normal file
|
|
@ -0,0 +1,221 @@
|
|||
use proc_macro2::{TokenStream, TokenTree};
|
||||
use proc_macro_error::SpanRange;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Markup {
|
||||
/// Used as a placeholder value on parse error.
|
||||
ParseError {
|
||||
span: SpanRange,
|
||||
},
|
||||
Block(Block),
|
||||
Literal {
|
||||
content: String,
|
||||
span: SpanRange,
|
||||
},
|
||||
Symbol {
|
||||
symbol: TokenStream,
|
||||
},
|
||||
Splice {
|
||||
expr: TokenStream,
|
||||
outer_span: SpanRange,
|
||||
},
|
||||
Element {
|
||||
name: TokenStream,
|
||||
attrs: Vec<Attr>,
|
||||
body: ElementBody,
|
||||
},
|
||||
Let {
|
||||
at_span: SpanRange,
|
||||
tokens: TokenStream,
|
||||
},
|
||||
Special {
|
||||
segments: Vec<Special>,
|
||||
},
|
||||
Match {
|
||||
at_span: SpanRange,
|
||||
head: TokenStream,
|
||||
arms: Vec<MatchArm>,
|
||||
arms_span: SpanRange,
|
||||
},
|
||||
}
|
||||
|
||||
impl Markup {
|
||||
pub fn span(&self) -> SpanRange {
|
||||
match *self {
|
||||
Markup::ParseError { span } => span,
|
||||
Markup::Block(ref block) => block.span(),
|
||||
Markup::Literal { span, .. } => span,
|
||||
Markup::Symbol { ref symbol } => span_tokens(symbol.clone()),
|
||||
Markup::Splice { outer_span, .. } => outer_span,
|
||||
Markup::Element {
|
||||
ref name, ref body, ..
|
||||
} => {
|
||||
let name_span = span_tokens(name.clone());
|
||||
name_span.join_range(body.span())
|
||||
}
|
||||
Markup::Let {
|
||||
at_span,
|
||||
ref tokens,
|
||||
} => at_span.join_range(span_tokens(tokens.clone())),
|
||||
Markup::Special { ref segments } => join_ranges(segments.iter().map(Special::span)),
|
||||
Markup::Match {
|
||||
at_span, arms_span, ..
|
||||
} => at_span.join_range(arms_span),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Attr {
|
||||
Class {
|
||||
dot_span: SpanRange,
|
||||
name: Markup,
|
||||
toggler: Option<Toggler>,
|
||||
},
|
||||
Id {
|
||||
hash_span: SpanRange,
|
||||
name: Markup,
|
||||
},
|
||||
Named {
|
||||
named_attr: NamedAttr,
|
||||
},
|
||||
}
|
||||
|
||||
impl Attr {
|
||||
pub fn span(&self) -> SpanRange {
|
||||
match *self {
|
||||
Attr::Class {
|
||||
dot_span,
|
||||
ref name,
|
||||
ref toggler,
|
||||
} => {
|
||||
let name_span = name.span();
|
||||
let dot_name_span = dot_span.join_range(name_span);
|
||||
if let Some(toggler) = toggler {
|
||||
dot_name_span.join_range(toggler.cond_span)
|
||||
} else {
|
||||
dot_name_span
|
||||
}
|
||||
}
|
||||
Attr::Id {
|
||||
hash_span,
|
||||
ref name,
|
||||
} => {
|
||||
let name_span = name.span();
|
||||
hash_span.join_range(name_span)
|
||||
}
|
||||
Attr::Named { ref named_attr } => named_attr.span(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ElementBody {
|
||||
Void { semi_span: SpanRange },
|
||||
Block { block: Block },
|
||||
}
|
||||
|
||||
impl ElementBody {
|
||||
pub fn span(&self) -> SpanRange {
|
||||
match *self {
|
||||
ElementBody::Void { semi_span } => semi_span,
|
||||
ElementBody::Block { ref block } => block.span(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Block {
|
||||
pub markups: Vec<Markup>,
|
||||
pub outer_span: SpanRange,
|
||||
}
|
||||
|
||||
impl Block {
|
||||
pub fn span(&self) -> SpanRange {
|
||||
self.outer_span
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Special {
|
||||
pub at_span: SpanRange,
|
||||
pub head: TokenStream,
|
||||
pub body: Block,
|
||||
}
|
||||
|
||||
impl Special {
|
||||
pub fn span(&self) -> SpanRange {
|
||||
let body_span = self.body.span();
|
||||
self.at_span.join_range(body_span)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct NamedAttr {
|
||||
pub name: TokenStream,
|
||||
pub attr_type: AttrType,
|
||||
}
|
||||
|
||||
impl NamedAttr {
|
||||
fn span(&self) -> SpanRange {
|
||||
let name_span = span_tokens(self.name.clone());
|
||||
if let Some(attr_type_span) = self.attr_type.span() {
|
||||
name_span.join_range(attr_type_span)
|
||||
} else {
|
||||
name_span
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum AttrType {
|
||||
Normal { value: Markup },
|
||||
Optional { toggler: Toggler },
|
||||
Empty { toggler: Option<Toggler> },
|
||||
}
|
||||
|
||||
impl AttrType {
|
||||
fn span(&self) -> Option<SpanRange> {
|
||||
match *self {
|
||||
AttrType::Normal { ref value } => Some(value.span()),
|
||||
AttrType::Optional { ref toggler } => Some(toggler.span()),
|
||||
AttrType::Empty { ref toggler } => toggler.as_ref().map(Toggler::span),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Toggler {
|
||||
pub cond: TokenStream,
|
||||
pub cond_span: SpanRange,
|
||||
}
|
||||
|
||||
impl Toggler {
|
||||
fn span(&self) -> SpanRange {
|
||||
self.cond_span
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MatchArm {
|
||||
pub head: TokenStream,
|
||||
pub body: Block,
|
||||
}
|
||||
|
||||
pub fn span_tokens<I: IntoIterator<Item = TokenTree>>(tokens: I) -> SpanRange {
|
||||
join_ranges(tokens.into_iter().map(|s| SpanRange::single_span(s.span())))
|
||||
}
|
||||
|
||||
pub fn join_ranges<I: IntoIterator<Item = SpanRange>>(ranges: I) -> SpanRange {
|
||||
let mut iter = ranges.into_iter();
|
||||
let first = match iter.next() {
|
||||
Some(span) => span,
|
||||
None => return SpanRange::call_site(),
|
||||
};
|
||||
let last = iter.last().unwrap_or(first);
|
||||
first.join_range(last)
|
||||
}
|
||||
|
||||
pub fn name_to_string(name: TokenStream) -> String {
|
||||
name.into_iter().map(|token| token.to_string()).collect()
|
||||
}
|
||||
34
helpers/pagetop-macros/src/maud/escape.rs
Normal file
34
helpers/pagetop-macros/src/maud/escape.rs
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
// !!!!!!!! PLEASE KEEP THIS IN SYNC WITH `maud/src/escape.rs` !!!!!!!!!
|
||||
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
|
||||
extern crate alloc;
|
||||
|
||||
use alloc::string::String;
|
||||
|
||||
pub fn escape_to_string(input: &str, output: &mut String) {
|
||||
for b in input.bytes() {
|
||||
match b {
|
||||
b'&' => output.push_str("&"),
|
||||
b'<' => output.push_str("<"),
|
||||
b'>' => output.push_str(">"),
|
||||
b'"' => output.push_str("""),
|
||||
_ => unsafe { output.as_mut_vec().push(b) },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
extern crate alloc;
|
||||
|
||||
use super::escape_to_string;
|
||||
use alloc::string::String;
|
||||
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let mut s = String::new();
|
||||
escape_to_string("<script>launchMissiles()</script>", &mut s);
|
||||
assert_eq!(s, "<script>launchMissiles()</script>");
|
||||
}
|
||||
}
|
||||
308
helpers/pagetop-macros/src/maud/generate.rs
Normal file
308
helpers/pagetop-macros/src/maud/generate.rs
Normal file
|
|
@ -0,0 +1,308 @@
|
|||
use proc_macro2::{Delimiter, Group, Ident, Literal, Span, TokenStream, TokenTree};
|
||||
use proc_macro_error::SpanRange;
|
||||
use quote::quote;
|
||||
|
||||
use crate::maud::{ast::*, escape};
|
||||
|
||||
use proc_macro_crate::{crate_name, FoundCrate};
|
||||
|
||||
pub fn generate(markups: Vec<Markup>, output_ident: TokenTree) -> TokenStream {
|
||||
let mut build = Builder::new(output_ident.clone());
|
||||
Generator::new(output_ident).markups(markups, &mut build);
|
||||
build.finish()
|
||||
}
|
||||
|
||||
struct Generator {
|
||||
output_ident: TokenTree,
|
||||
}
|
||||
|
||||
impl Generator {
|
||||
fn new(output_ident: TokenTree) -> Generator {
|
||||
Generator { output_ident }
|
||||
}
|
||||
|
||||
fn builder(&self) -> Builder {
|
||||
Builder::new(self.output_ident.clone())
|
||||
}
|
||||
|
||||
fn markups(&self, markups: Vec<Markup>, build: &mut Builder) {
|
||||
for markup in markups {
|
||||
self.markup(markup, build);
|
||||
}
|
||||
}
|
||||
|
||||
fn markup(&self, markup: Markup, build: &mut Builder) {
|
||||
match markup {
|
||||
Markup::ParseError { .. } => {}
|
||||
Markup::Block(Block {
|
||||
markups,
|
||||
outer_span,
|
||||
}) => {
|
||||
if markups
|
||||
.iter()
|
||||
.any(|markup| matches!(*markup, Markup::Let { .. }))
|
||||
{
|
||||
self.block(
|
||||
Block {
|
||||
markups,
|
||||
outer_span,
|
||||
},
|
||||
build,
|
||||
);
|
||||
} else {
|
||||
self.markups(markups, build);
|
||||
}
|
||||
}
|
||||
Markup::Literal { content, .. } => build.push_escaped(&content),
|
||||
Markup::Symbol { symbol } => self.name(symbol, build),
|
||||
Markup::Splice { expr, .. } => self.splice(expr, build),
|
||||
Markup::Element { name, attrs, body } => self.element(name, attrs, body, build),
|
||||
Markup::Let { tokens, .. } => build.push_tokens(tokens),
|
||||
Markup::Special { segments } => {
|
||||
for Special { head, body, .. } in segments {
|
||||
build.push_tokens(head);
|
||||
self.block(body, build);
|
||||
}
|
||||
}
|
||||
Markup::Match {
|
||||
head,
|
||||
arms,
|
||||
arms_span,
|
||||
..
|
||||
} => {
|
||||
let body = {
|
||||
let mut build = self.builder();
|
||||
for MatchArm { head, body } in arms {
|
||||
build.push_tokens(head);
|
||||
self.block(body, &mut build);
|
||||
}
|
||||
build.finish()
|
||||
};
|
||||
let mut body = TokenTree::Group(Group::new(Delimiter::Brace, body));
|
||||
body.set_span(arms_span.collapse());
|
||||
build.push_tokens(quote!(#head #body));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn block(
|
||||
&self,
|
||||
Block {
|
||||
markups,
|
||||
outer_span,
|
||||
}: Block,
|
||||
build: &mut Builder,
|
||||
) {
|
||||
let block = {
|
||||
let mut build = self.builder();
|
||||
self.markups(markups, &mut build);
|
||||
build.finish()
|
||||
};
|
||||
let mut block = TokenTree::Group(Group::new(Delimiter::Brace, block));
|
||||
block.set_span(outer_span.collapse());
|
||||
build.push_tokens(TokenStream::from(block));
|
||||
}
|
||||
|
||||
fn splice(&self, expr: TokenStream, build: &mut Builder) {
|
||||
let output_ident = self.output_ident.clone();
|
||||
|
||||
let found_crate = crate_name("pagetop").expect("pagetop is present in `Cargo.toml`");
|
||||
build.push_tokens(match found_crate {
|
||||
FoundCrate::Itself => quote!(
|
||||
crate::html::html_private::render_to!(&#expr, &mut #output_ident);
|
||||
),
|
||||
_ => quote!(
|
||||
pagetop::html::html_private::render_to!(&#expr, &mut #output_ident);
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
fn element(&self, name: TokenStream, attrs: Vec<Attr>, body: ElementBody, build: &mut Builder) {
|
||||
build.push_str("<");
|
||||
self.name(name.clone(), build);
|
||||
self.attrs(attrs, build);
|
||||
build.push_str(">");
|
||||
if let ElementBody::Block { block } = body {
|
||||
self.markups(block.markups, build);
|
||||
build.push_str("</");
|
||||
self.name(name, build);
|
||||
build.push_str(">");
|
||||
}
|
||||
}
|
||||
|
||||
fn name(&self, name: TokenStream, build: &mut Builder) {
|
||||
build.push_escaped(&name_to_string(name));
|
||||
}
|
||||
|
||||
fn attrs(&self, attrs: Vec<Attr>, build: &mut Builder) {
|
||||
for NamedAttr { name, attr_type } in desugar_attrs(attrs) {
|
||||
match attr_type {
|
||||
AttrType::Normal { value } => {
|
||||
build.push_str(" ");
|
||||
self.name(name, build);
|
||||
build.push_str("=\"");
|
||||
self.markup(value, build);
|
||||
build.push_str("\"");
|
||||
}
|
||||
AttrType::Optional {
|
||||
toggler: Toggler { cond, .. },
|
||||
} => {
|
||||
let inner_value = quote!(inner_value);
|
||||
let body = {
|
||||
let mut build = self.builder();
|
||||
build.push_str(" ");
|
||||
self.name(name, &mut build);
|
||||
build.push_str("=\"");
|
||||
self.splice(inner_value.clone(), &mut build);
|
||||
build.push_str("\"");
|
||||
build.finish()
|
||||
};
|
||||
build.push_tokens(quote!(if let Some(#inner_value) = (#cond) { #body }));
|
||||
}
|
||||
AttrType::Empty { toggler: None } => {
|
||||
build.push_str(" ");
|
||||
self.name(name, build);
|
||||
}
|
||||
AttrType::Empty {
|
||||
toggler: Some(Toggler { cond, .. }),
|
||||
} => {
|
||||
let body = {
|
||||
let mut build = self.builder();
|
||||
build.push_str(" ");
|
||||
self.name(name, &mut build);
|
||||
build.finish()
|
||||
};
|
||||
build.push_tokens(quote!(if (#cond) { #body }));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
|
||||
fn desugar_attrs(attrs: Vec<Attr>) -> Vec<NamedAttr> {
|
||||
let mut classes_static = vec![];
|
||||
let mut classes_toggled = vec![];
|
||||
let mut ids = vec![];
|
||||
let mut named_attrs = vec![];
|
||||
for attr in attrs {
|
||||
match attr {
|
||||
Attr::Class {
|
||||
name,
|
||||
toggler: Some(toggler),
|
||||
..
|
||||
} => classes_toggled.push((name, toggler)),
|
||||
Attr::Class {
|
||||
name,
|
||||
toggler: None,
|
||||
..
|
||||
} => classes_static.push(name),
|
||||
Attr::Id { name, .. } => ids.push(name),
|
||||
Attr::Named { named_attr } => named_attrs.push(named_attr),
|
||||
}
|
||||
}
|
||||
let classes = desugar_classes_or_ids("class", classes_static, classes_toggled);
|
||||
let ids = desugar_classes_or_ids("id", ids, vec![]);
|
||||
classes.into_iter().chain(ids).chain(named_attrs).collect()
|
||||
}
|
||||
|
||||
fn desugar_classes_or_ids(
|
||||
attr_name: &'static str,
|
||||
values_static: Vec<Markup>,
|
||||
values_toggled: Vec<(Markup, Toggler)>,
|
||||
) -> Option<NamedAttr> {
|
||||
if values_static.is_empty() && values_toggled.is_empty() {
|
||||
return None;
|
||||
}
|
||||
let mut markups = Vec::new();
|
||||
let mut leading_space = false;
|
||||
for name in values_static {
|
||||
markups.extend(prepend_leading_space(name, &mut leading_space));
|
||||
}
|
||||
for (name, Toggler { cond, cond_span }) in values_toggled {
|
||||
let body = Block {
|
||||
markups: prepend_leading_space(name, &mut leading_space),
|
||||
// TODO: is this correct?
|
||||
outer_span: cond_span,
|
||||
};
|
||||
markups.push(Markup::Special {
|
||||
segments: vec![Special {
|
||||
at_span: SpanRange::call_site(),
|
||||
head: quote!(if (#cond)),
|
||||
body,
|
||||
}],
|
||||
});
|
||||
}
|
||||
Some(NamedAttr {
|
||||
name: TokenStream::from(TokenTree::Ident(Ident::new(attr_name, Span::call_site()))),
|
||||
attr_type: AttrType::Normal {
|
||||
value: Markup::Block(Block {
|
||||
markups,
|
||||
outer_span: SpanRange::call_site(),
|
||||
}),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn prepend_leading_space(name: Markup, leading_space: &mut bool) -> Vec<Markup> {
|
||||
let mut markups = Vec::new();
|
||||
if *leading_space {
|
||||
markups.push(Markup::Literal {
|
||||
content: " ".to_owned(),
|
||||
span: name.span(),
|
||||
});
|
||||
}
|
||||
*leading_space = true;
|
||||
markups.push(name);
|
||||
markups
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
|
||||
struct Builder {
|
||||
output_ident: TokenTree,
|
||||
tokens: Vec<TokenTree>,
|
||||
tail: String,
|
||||
}
|
||||
|
||||
impl Builder {
|
||||
fn new(output_ident: TokenTree) -> Builder {
|
||||
Builder {
|
||||
output_ident,
|
||||
tokens: Vec::new(),
|
||||
tail: String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn push_str(&mut self, string: &str) {
|
||||
self.tail.push_str(string);
|
||||
}
|
||||
|
||||
fn push_escaped(&mut self, string: &str) {
|
||||
escape::escape_to_string(string, &mut self.tail);
|
||||
}
|
||||
|
||||
fn push_tokens(&mut self, tokens: TokenStream) {
|
||||
self.cut();
|
||||
self.tokens.extend(tokens);
|
||||
}
|
||||
|
||||
fn cut(&mut self) {
|
||||
if self.tail.is_empty() {
|
||||
return;
|
||||
}
|
||||
let push_str_expr = {
|
||||
let output_ident = self.output_ident.clone();
|
||||
let string = TokenTree::Literal(Literal::string(&self.tail));
|
||||
quote!(#output_ident.push_str(#string);)
|
||||
};
|
||||
self.tail.clear();
|
||||
self.tokens.extend(push_str_expr);
|
||||
}
|
||||
|
||||
fn finish(mut self) -> TokenStream {
|
||||
self.cut();
|
||||
self.tokens.into_iter().collect()
|
||||
}
|
||||
}
|
||||
752
helpers/pagetop-macros/src/maud/parse.rs
Normal file
752
helpers/pagetop-macros/src/maud/parse.rs
Normal file
|
|
@ -0,0 +1,752 @@
|
|||
use proc_macro2::{Delimiter, Ident, Literal, Spacing, Span, TokenStream, TokenTree};
|
||||
use proc_macro_error::{abort, abort_call_site, emit_error, SpanRange};
|
||||
use std::collections::HashMap;
|
||||
|
||||
use syn::Lit;
|
||||
|
||||
use crate::maud::ast;
|
||||
|
||||
pub fn parse(input: TokenStream) -> Vec<ast::Markup> {
|
||||
Parser::new(input).markups()
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct Parser {
|
||||
/// If we're inside an attribute, then this contains the attribute name.
|
||||
current_attr: Option<String>,
|
||||
input: <TokenStream as IntoIterator>::IntoIter,
|
||||
}
|
||||
|
||||
impl Iterator for Parser {
|
||||
type Item = TokenTree;
|
||||
|
||||
fn next(&mut self) -> Option<TokenTree> {
|
||||
self.input.next()
|
||||
}
|
||||
}
|
||||
|
||||
impl Parser {
|
||||
fn new(input: TokenStream) -> Parser {
|
||||
Parser {
|
||||
current_attr: None,
|
||||
input: input.into_iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn with_input(&self, input: TokenStream) -> Parser {
|
||||
Parser {
|
||||
current_attr: self.current_attr.clone(),
|
||||
input: input.into_iter(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the next token in the stream without consuming it.
|
||||
fn peek(&mut self) -> Option<TokenTree> {
|
||||
self.clone().next()
|
||||
}
|
||||
|
||||
/// Returns the next two tokens in the stream without consuming them.
|
||||
fn peek2(&mut self) -> Option<(TokenTree, Option<TokenTree>)> {
|
||||
let mut clone = self.clone();
|
||||
clone.next().map(|first| (first, clone.next()))
|
||||
}
|
||||
|
||||
/// Advances the cursor by one step.
|
||||
fn advance(&mut self) {
|
||||
self.next();
|
||||
}
|
||||
|
||||
/// Advances the cursor by two steps.
|
||||
fn advance2(&mut self) {
|
||||
self.next();
|
||||
self.next();
|
||||
}
|
||||
|
||||
/// Parses multiple blocks of markup.
|
||||
fn markups(&mut self) -> Vec<ast::Markup> {
|
||||
let mut result = Vec::new();
|
||||
loop {
|
||||
match self.peek2() {
|
||||
None => break,
|
||||
Some((TokenTree::Punct(ref punct), _)) if punct.as_char() == ';' => self.advance(),
|
||||
Some((TokenTree::Punct(ref punct), Some(TokenTree::Ident(ref ident))))
|
||||
if punct.as_char() == '@' && *ident == "let" =>
|
||||
{
|
||||
self.advance2();
|
||||
let keyword = TokenTree::Ident(ident.clone());
|
||||
result.push(self.let_expr(punct.span(), keyword));
|
||||
}
|
||||
_ => result.push(self.markup()),
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Parses a single block of markup.
|
||||
fn markup(&mut self) -> ast::Markup {
|
||||
let token = match self.peek() {
|
||||
Some(token) => token,
|
||||
None => {
|
||||
abort_call_site!("unexpected end of input");
|
||||
}
|
||||
};
|
||||
let markup = match token {
|
||||
// Literal
|
||||
TokenTree::Literal(literal) => {
|
||||
self.advance();
|
||||
self.literal(literal)
|
||||
}
|
||||
// Special form
|
||||
TokenTree::Punct(ref punct) if punct.as_char() == '@' => {
|
||||
self.advance();
|
||||
let at_span = punct.span();
|
||||
match self.next() {
|
||||
Some(TokenTree::Ident(ident)) => {
|
||||
let keyword = TokenTree::Ident(ident.clone());
|
||||
match ident.to_string().as_str() {
|
||||
"if" => {
|
||||
let mut segments = Vec::new();
|
||||
self.if_expr(at_span, vec![keyword], &mut segments);
|
||||
ast::Markup::Special { segments }
|
||||
}
|
||||
"while" => self.while_expr(at_span, keyword),
|
||||
"for" => self.for_expr(at_span, keyword),
|
||||
"match" => self.match_expr(at_span, keyword),
|
||||
"let" => {
|
||||
let span = SpanRange {
|
||||
first: at_span,
|
||||
last: ident.span(),
|
||||
};
|
||||
abort!(span, "`@let` only works inside a block");
|
||||
}
|
||||
other => {
|
||||
let span = SpanRange {
|
||||
first: at_span,
|
||||
last: ident.span(),
|
||||
};
|
||||
abort!(span, "unknown keyword `@{}`", other);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
abort!(at_span, "expected keyword after `@`");
|
||||
}
|
||||
}
|
||||
}
|
||||
// Element
|
||||
TokenTree::Ident(ident) => {
|
||||
let ident_string = ident.to_string();
|
||||
match ident_string.as_str() {
|
||||
"if" | "while" | "for" | "match" | "let" => {
|
||||
abort!(
|
||||
ident,
|
||||
"found keyword `{}`", ident_string;
|
||||
help = "should this be a `@{}`?", ident_string
|
||||
);
|
||||
}
|
||||
"true" | "false" => {
|
||||
if let Some(attr_name) = &self.current_attr {
|
||||
emit_error!(
|
||||
ident,
|
||||
r#"attribute value must be a string"#;
|
||||
help = "to declare an empty attribute, omit the equals sign: `{}`",
|
||||
attr_name;
|
||||
help = "to toggle the attribute, use square brackets: `{}[some_boolean_flag]`",
|
||||
attr_name;
|
||||
);
|
||||
return ast::Markup::ParseError {
|
||||
span: SpanRange::single_span(ident.span()),
|
||||
};
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// `.try_namespaced_name()` should never fail as we've
|
||||
// already seen an `Ident`
|
||||
let name = self.try_namespaced_name().expect("identifier");
|
||||
self.element(name)
|
||||
}
|
||||
// Div element shorthand
|
||||
TokenTree::Punct(ref punct) if punct.as_char() == '.' || punct.as_char() == '#' => {
|
||||
let name = TokenTree::Ident(Ident::new("div", punct.span()));
|
||||
self.element(name.into())
|
||||
}
|
||||
// Splice
|
||||
TokenTree::Group(ref group) if group.delimiter() == Delimiter::Parenthesis => {
|
||||
self.advance();
|
||||
ast::Markup::Splice {
|
||||
expr: group.stream(),
|
||||
outer_span: SpanRange::single_span(group.span()),
|
||||
}
|
||||
}
|
||||
// Block
|
||||
TokenTree::Group(ref group) if group.delimiter() == Delimiter::Brace => {
|
||||
self.advance();
|
||||
ast::Markup::Block(self.block(group.stream(), SpanRange::single_span(group.span())))
|
||||
}
|
||||
// ???
|
||||
token => {
|
||||
abort!(token, "invalid syntax");
|
||||
}
|
||||
};
|
||||
markup
|
||||
}
|
||||
|
||||
/// Parses a literal string.
|
||||
fn literal(&mut self, literal: Literal) -> ast::Markup {
|
||||
match Lit::new(literal.clone()) {
|
||||
Lit::Str(lit_str) => {
|
||||
return ast::Markup::Literal {
|
||||
content: lit_str.value(),
|
||||
span: SpanRange::single_span(literal.span()),
|
||||
}
|
||||
}
|
||||
// Boolean literals are idents, so `Lit::Bool` is handled in
|
||||
// `markup`, not here.
|
||||
Lit::Int(..) | Lit::Float(..) => {
|
||||
emit_error!(literal, r#"literal must be double-quoted: `"{}"`"#, literal);
|
||||
}
|
||||
Lit::Char(lit_char) => {
|
||||
emit_error!(
|
||||
literal,
|
||||
r#"literal must be double-quoted: `"{}"`"#,
|
||||
lit_char.value(),
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
emit_error!(literal, "expected string");
|
||||
}
|
||||
}
|
||||
ast::Markup::ParseError {
|
||||
span: SpanRange::single_span(literal.span()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses an `@if` expression.
|
||||
///
|
||||
/// The leading `@if` should already be consumed.
|
||||
fn if_expr(&mut self, at_span: Span, prefix: Vec<TokenTree>, segments: &mut Vec<ast::Special>) {
|
||||
let mut head = prefix;
|
||||
let body = loop {
|
||||
match self.next() {
|
||||
Some(TokenTree::Group(ref block)) if block.delimiter() == Delimiter::Brace => {
|
||||
break self.block(block.stream(), SpanRange::single_span(block.span()));
|
||||
}
|
||||
Some(token) => head.push(token),
|
||||
None => {
|
||||
let mut span = ast::span_tokens(head);
|
||||
span.first = at_span;
|
||||
abort!(span, "expected body for this `@if`");
|
||||
}
|
||||
}
|
||||
};
|
||||
segments.push(ast::Special {
|
||||
at_span: SpanRange::single_span(at_span),
|
||||
head: head.into_iter().collect(),
|
||||
body,
|
||||
});
|
||||
self.else_if_expr(segments)
|
||||
}
|
||||
|
||||
/// Parses an optional `@else if` or `@else`.
|
||||
///
|
||||
/// The leading `@else if` or `@else` should *not* already be consumed.
|
||||
fn else_if_expr(&mut self, segments: &mut Vec<ast::Special>) {
|
||||
match self.peek2() {
|
||||
Some((TokenTree::Punct(ref punct), Some(TokenTree::Ident(ref else_keyword))))
|
||||
if punct.as_char() == '@' && *else_keyword == "else" =>
|
||||
{
|
||||
self.advance2();
|
||||
let at_span = punct.span();
|
||||
let else_keyword = TokenTree::Ident(else_keyword.clone());
|
||||
match self.peek() {
|
||||
// `@else if`
|
||||
Some(TokenTree::Ident(ref if_keyword)) if *if_keyword == "if" => {
|
||||
self.advance();
|
||||
let if_keyword = TokenTree::Ident(if_keyword.clone());
|
||||
self.if_expr(at_span, vec![else_keyword, if_keyword], segments)
|
||||
}
|
||||
// Just an `@else`
|
||||
_ => match self.next() {
|
||||
Some(TokenTree::Group(ref group))
|
||||
if group.delimiter() == Delimiter::Brace =>
|
||||
{
|
||||
let body =
|
||||
self.block(group.stream(), SpanRange::single_span(group.span()));
|
||||
segments.push(ast::Special {
|
||||
at_span: SpanRange::single_span(at_span),
|
||||
head: vec![else_keyword].into_iter().collect(),
|
||||
body,
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
let span = SpanRange {
|
||||
first: at_span,
|
||||
last: else_keyword.span(),
|
||||
};
|
||||
abort!(span, "expected body for this `@else`");
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
// We didn't find an `@else`; stop
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses an `@while` expression.
|
||||
///
|
||||
/// The leading `@while` should already be consumed.
|
||||
fn while_expr(&mut self, at_span: Span, keyword: TokenTree) -> ast::Markup {
|
||||
let keyword_span = keyword.span();
|
||||
let mut head = vec![keyword];
|
||||
let body = loop {
|
||||
match self.next() {
|
||||
Some(TokenTree::Group(ref block)) if block.delimiter() == Delimiter::Brace => {
|
||||
break self.block(block.stream(), SpanRange::single_span(block.span()));
|
||||
}
|
||||
Some(token) => head.push(token),
|
||||
None => {
|
||||
let span = SpanRange {
|
||||
first: at_span,
|
||||
last: keyword_span,
|
||||
};
|
||||
abort!(span, "expected body for this `@while`");
|
||||
}
|
||||
}
|
||||
};
|
||||
ast::Markup::Special {
|
||||
segments: vec![ast::Special {
|
||||
at_span: SpanRange::single_span(at_span),
|
||||
head: head.into_iter().collect(),
|
||||
body,
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a `@for` expression.
|
||||
///
|
||||
/// The leading `@for` should already be consumed.
|
||||
fn for_expr(&mut self, at_span: Span, keyword: TokenTree) -> ast::Markup {
|
||||
let keyword_span = keyword.span();
|
||||
let mut head = vec![keyword];
|
||||
loop {
|
||||
match self.next() {
|
||||
Some(TokenTree::Ident(ref in_keyword)) if *in_keyword == "in" => {
|
||||
head.push(TokenTree::Ident(in_keyword.clone()));
|
||||
break;
|
||||
}
|
||||
Some(token) => head.push(token),
|
||||
None => {
|
||||
let span = SpanRange {
|
||||
first: at_span,
|
||||
last: keyword_span,
|
||||
};
|
||||
abort!(span, "missing `in` in `@for` loop");
|
||||
}
|
||||
}
|
||||
}
|
||||
let body = loop {
|
||||
match self.next() {
|
||||
Some(TokenTree::Group(ref block)) if block.delimiter() == Delimiter::Brace => {
|
||||
break self.block(block.stream(), SpanRange::single_span(block.span()));
|
||||
}
|
||||
Some(token) => head.push(token),
|
||||
None => {
|
||||
let span = SpanRange {
|
||||
first: at_span,
|
||||
last: keyword_span,
|
||||
};
|
||||
abort!(span, "expected body for this `@for`");
|
||||
}
|
||||
}
|
||||
};
|
||||
ast::Markup::Special {
|
||||
segments: vec![ast::Special {
|
||||
at_span: SpanRange::single_span(at_span),
|
||||
head: head.into_iter().collect(),
|
||||
body,
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a `@match` expression.
|
||||
///
|
||||
/// The leading `@match` should already be consumed.
|
||||
fn match_expr(&mut self, at_span: Span, keyword: TokenTree) -> ast::Markup {
|
||||
let keyword_span = keyword.span();
|
||||
let mut head = vec![keyword];
|
||||
let (arms, arms_span) = loop {
|
||||
match self.next() {
|
||||
Some(TokenTree::Group(ref body)) if body.delimiter() == Delimiter::Brace => {
|
||||
let span = SpanRange::single_span(body.span());
|
||||
break (self.with_input(body.stream()).match_arms(), span);
|
||||
}
|
||||
Some(token) => head.push(token),
|
||||
None => {
|
||||
let span = SpanRange {
|
||||
first: at_span,
|
||||
last: keyword_span,
|
||||
};
|
||||
abort!(span, "expected body for this `@match`");
|
||||
}
|
||||
}
|
||||
};
|
||||
ast::Markup::Match {
|
||||
at_span: SpanRange::single_span(at_span),
|
||||
head: head.into_iter().collect(),
|
||||
arms,
|
||||
arms_span,
|
||||
}
|
||||
}
|
||||
|
||||
fn match_arms(&mut self) -> Vec<ast::MatchArm> {
|
||||
let mut arms = Vec::new();
|
||||
while let Some(arm) = self.match_arm() {
|
||||
arms.push(arm);
|
||||
}
|
||||
arms
|
||||
}
|
||||
|
||||
fn match_arm(&mut self) -> Option<ast::MatchArm> {
|
||||
let mut head = Vec::new();
|
||||
loop {
|
||||
match self.peek2() {
|
||||
Some((TokenTree::Punct(ref eq), Some(TokenTree::Punct(ref gt))))
|
||||
if eq.as_char() == '='
|
||||
&& gt.as_char() == '>'
|
||||
&& eq.spacing() == Spacing::Joint =>
|
||||
{
|
||||
self.advance2();
|
||||
head.push(TokenTree::Punct(eq.clone()));
|
||||
head.push(TokenTree::Punct(gt.clone()));
|
||||
break;
|
||||
}
|
||||
Some((token, _)) => {
|
||||
self.advance();
|
||||
head.push(token);
|
||||
}
|
||||
None => {
|
||||
if head.is_empty() {
|
||||
return None;
|
||||
} else {
|
||||
let head_span = ast::span_tokens(head);
|
||||
abort!(head_span, "unexpected end of @match pattern");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let body = match self.next() {
|
||||
// $pat => { $stmts }
|
||||
Some(TokenTree::Group(ref body)) if body.delimiter() == Delimiter::Brace => {
|
||||
let body = self.block(body.stream(), SpanRange::single_span(body.span()));
|
||||
// Trailing commas are optional if the match arm is a braced block
|
||||
if let Some(TokenTree::Punct(ref punct)) = self.peek() {
|
||||
if punct.as_char() == ',' {
|
||||
self.advance();
|
||||
}
|
||||
}
|
||||
body
|
||||
}
|
||||
// $pat => $expr
|
||||
Some(first_token) => {
|
||||
let mut span = SpanRange::single_span(first_token.span());
|
||||
let mut body = vec![first_token];
|
||||
loop {
|
||||
match self.next() {
|
||||
Some(TokenTree::Punct(ref punct)) if punct.as_char() == ',' => break,
|
||||
Some(token) => {
|
||||
span.last = token.span();
|
||||
body.push(token);
|
||||
}
|
||||
None => break,
|
||||
}
|
||||
}
|
||||
self.block(body.into_iter().collect(), span)
|
||||
}
|
||||
None => {
|
||||
let span = ast::span_tokens(head);
|
||||
abort!(span, "unexpected end of @match arm");
|
||||
}
|
||||
};
|
||||
Some(ast::MatchArm {
|
||||
head: head.into_iter().collect(),
|
||||
body,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses a `@let` expression.
|
||||
///
|
||||
/// The leading `@let` should already be consumed.
|
||||
fn let_expr(&mut self, at_span: Span, keyword: TokenTree) -> ast::Markup {
|
||||
let mut tokens = vec![keyword];
|
||||
loop {
|
||||
match self.next() {
|
||||
Some(token) => match token {
|
||||
TokenTree::Punct(ref punct) if punct.as_char() == '=' => {
|
||||
tokens.push(token.clone());
|
||||
break;
|
||||
}
|
||||
_ => tokens.push(token),
|
||||
},
|
||||
None => {
|
||||
let mut span = ast::span_tokens(tokens);
|
||||
span.first = at_span;
|
||||
abort!(span, "unexpected end of `@let` expression");
|
||||
}
|
||||
}
|
||||
}
|
||||
loop {
|
||||
match self.next() {
|
||||
Some(token) => match token {
|
||||
TokenTree::Punct(ref punct) if punct.as_char() == ';' => {
|
||||
tokens.push(token.clone());
|
||||
break;
|
||||
}
|
||||
_ => tokens.push(token),
|
||||
},
|
||||
None => {
|
||||
let mut span = ast::span_tokens(tokens);
|
||||
span.first = at_span;
|
||||
abort!(
|
||||
span,
|
||||
"unexpected end of `@let` expression";
|
||||
help = "are you missing a semicolon?"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::Markup::Let {
|
||||
at_span: SpanRange::single_span(at_span),
|
||||
tokens: tokens.into_iter().collect(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses an element node.
|
||||
///
|
||||
/// The element name should already be consumed.
|
||||
fn element(&mut self, name: TokenStream) -> ast::Markup {
|
||||
if self.current_attr.is_some() {
|
||||
let span = ast::span_tokens(name);
|
||||
abort!(span, "unexpected element");
|
||||
}
|
||||
let attrs = self.attrs();
|
||||
let body = match self.peek() {
|
||||
Some(TokenTree::Punct(ref punct))
|
||||
if punct.as_char() == ';' || punct.as_char() == '/' =>
|
||||
{
|
||||
// Void element
|
||||
self.advance();
|
||||
if punct.as_char() == '/' {
|
||||
emit_error!(
|
||||
punct,
|
||||
"void elements must use `;`, not `/`";
|
||||
help = "change this to `;`";
|
||||
help = "see https://github.com/lambda-fairy/maud/pull/315 for details";
|
||||
);
|
||||
}
|
||||
ast::ElementBody::Void {
|
||||
semi_span: SpanRange::single_span(punct.span()),
|
||||
}
|
||||
}
|
||||
Some(_) => match self.markup() {
|
||||
ast::Markup::Block(block) => ast::ElementBody::Block { block },
|
||||
markup => {
|
||||
let markup_span = markup.span();
|
||||
abort!(
|
||||
markup_span,
|
||||
"element body must be wrapped in braces";
|
||||
help = "see https://github.com/lambda-fairy/maud/pull/137 for details"
|
||||
);
|
||||
}
|
||||
},
|
||||
None => abort_call_site!("expected `;`, found end of macro"),
|
||||
};
|
||||
ast::Markup::Element { name, attrs, body }
|
||||
}
|
||||
|
||||
/// Parses the attributes of an element.
|
||||
fn attrs(&mut self) -> Vec<ast::Attr> {
|
||||
let mut attrs = Vec::new();
|
||||
loop {
|
||||
if let Some(name) = self.try_namespaced_name() {
|
||||
// Attribute
|
||||
match self.peek() {
|
||||
// Non-empty attribute
|
||||
Some(TokenTree::Punct(ref punct)) if punct.as_char() == '=' => {
|
||||
self.advance();
|
||||
// Parse a value under an attribute context
|
||||
assert!(self.current_attr.is_none());
|
||||
self.current_attr = Some(ast::name_to_string(name.clone()));
|
||||
let attr_type = match self.attr_toggler() {
|
||||
Some(toggler) => ast::AttrType::Optional { toggler },
|
||||
None => {
|
||||
let value = self.markup();
|
||||
ast::AttrType::Normal { value }
|
||||
}
|
||||
};
|
||||
self.current_attr = None;
|
||||
attrs.push(ast::Attr::Named {
|
||||
named_attr: ast::NamedAttr { name, attr_type },
|
||||
});
|
||||
}
|
||||
// Empty attribute (legacy syntax)
|
||||
Some(TokenTree::Punct(ref punct)) if punct.as_char() == '?' => {
|
||||
self.advance();
|
||||
let toggler = self.attr_toggler();
|
||||
attrs.push(ast::Attr::Named {
|
||||
named_attr: ast::NamedAttr {
|
||||
name: name.clone(),
|
||||
attr_type: ast::AttrType::Empty { toggler },
|
||||
},
|
||||
});
|
||||
}
|
||||
// Empty attribute (new syntax)
|
||||
_ => {
|
||||
let toggler = self.attr_toggler();
|
||||
attrs.push(ast::Attr::Named {
|
||||
named_attr: ast::NamedAttr {
|
||||
name: name.clone(),
|
||||
attr_type: ast::AttrType::Empty { toggler },
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
match self.peek() {
|
||||
// Class shorthand
|
||||
Some(TokenTree::Punct(ref punct)) if punct.as_char() == '.' => {
|
||||
self.advance();
|
||||
let name = self.class_or_id_name();
|
||||
let toggler = self.attr_toggler();
|
||||
attrs.push(ast::Attr::Class {
|
||||
dot_span: SpanRange::single_span(punct.span()),
|
||||
name,
|
||||
toggler,
|
||||
});
|
||||
}
|
||||
// ID shorthand
|
||||
Some(TokenTree::Punct(ref punct)) if punct.as_char() == '#' => {
|
||||
self.advance();
|
||||
let name = self.class_or_id_name();
|
||||
attrs.push(ast::Attr::Id {
|
||||
hash_span: SpanRange::single_span(punct.span()),
|
||||
name,
|
||||
});
|
||||
}
|
||||
// If it's not a valid attribute, backtrack and bail out
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut attr_map: HashMap<String, Vec<SpanRange>> = HashMap::new();
|
||||
let mut has_class = false;
|
||||
for attr in &attrs {
|
||||
let name = match attr {
|
||||
ast::Attr::Class { .. } => {
|
||||
if has_class {
|
||||
// Only check the first class to avoid spurious duplicates
|
||||
continue;
|
||||
}
|
||||
has_class = true;
|
||||
"class".to_string()
|
||||
}
|
||||
ast::Attr::Id { .. } => "id".to_string(),
|
||||
ast::Attr::Named { named_attr } => named_attr
|
||||
.name
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|token| token.to_string())
|
||||
.collect(),
|
||||
};
|
||||
let entry = attr_map.entry(name).or_default();
|
||||
entry.push(attr.span());
|
||||
}
|
||||
|
||||
for (name, spans) in attr_map {
|
||||
if spans.len() > 1 {
|
||||
let mut spans = spans.into_iter();
|
||||
let first_span = spans.next().expect("spans should be non-empty");
|
||||
abort!(first_span, "duplicate attribute `{}`", name);
|
||||
}
|
||||
}
|
||||
|
||||
attrs
|
||||
}
|
||||
|
||||
/// Parses the name of a class or ID.
|
||||
fn class_or_id_name(&mut self) -> ast::Markup {
|
||||
if let Some(symbol) = self.try_name() {
|
||||
ast::Markup::Symbol { symbol }
|
||||
} else {
|
||||
self.markup()
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses the `[cond]` syntax after an empty attribute or class shorthand.
|
||||
fn attr_toggler(&mut self) -> Option<ast::Toggler> {
|
||||
match self.peek() {
|
||||
Some(TokenTree::Group(ref group)) if group.delimiter() == Delimiter::Bracket => {
|
||||
self.advance();
|
||||
Some(ast::Toggler {
|
||||
cond: group.stream(),
|
||||
cond_span: SpanRange::single_span(group.span()),
|
||||
})
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses an identifier, without dealing with namespaces.
|
||||
fn try_name(&mut self) -> Option<TokenStream> {
|
||||
let mut result = Vec::new();
|
||||
if let Some(token @ TokenTree::Ident(_)) = self.peek() {
|
||||
self.advance();
|
||||
result.push(token);
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
let mut expect_ident = false;
|
||||
loop {
|
||||
expect_ident = match self.peek() {
|
||||
Some(TokenTree::Punct(ref punct)) if punct.as_char() == '-' => {
|
||||
self.advance();
|
||||
result.push(TokenTree::Punct(punct.clone()));
|
||||
true
|
||||
}
|
||||
Some(TokenTree::Ident(ref ident)) if expect_ident => {
|
||||
self.advance();
|
||||
result.push(TokenTree::Ident(ident.clone()));
|
||||
false
|
||||
}
|
||||
_ => break,
|
||||
};
|
||||
}
|
||||
Some(result.into_iter().collect())
|
||||
}
|
||||
|
||||
/// Parses a HTML element or attribute name, along with a namespace
|
||||
/// if necessary.
|
||||
fn try_namespaced_name(&mut self) -> Option<TokenStream> {
|
||||
let mut result = vec![self.try_name()?];
|
||||
if let Some(TokenTree::Punct(ref punct)) = self.peek() {
|
||||
if punct.as_char() == ':' {
|
||||
self.advance();
|
||||
result.push(TokenStream::from(TokenTree::Punct(punct.clone())));
|
||||
result.push(self.try_name()?);
|
||||
}
|
||||
}
|
||||
Some(result.into_iter().collect())
|
||||
}
|
||||
|
||||
/// Parses the given token stream as a Maud expression.
|
||||
fn block(&mut self, body: TokenStream, outer_span: SpanRange) -> ast::Block {
|
||||
let markups = self.with_input(body).markups();
|
||||
ast::Block {
|
||||
markups,
|
||||
outer_span,
|
||||
}
|
||||
}
|
||||
}
|
||||
4
helpers/pagetop-macros/src/smart_default.rs
Normal file
4
helpers/pagetop-macros/src/smart_default.rs
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
pub mod body_impl;
|
||||
|
||||
mod default_attr;
|
||||
mod util;
|
||||
158
helpers/pagetop-macros/src/smart_default/body_impl.rs
Normal file
158
helpers/pagetop-macros/src/smart_default/body_impl.rs
Normal file
|
|
@ -0,0 +1,158 @@
|
|||
use proc_macro2::TokenStream;
|
||||
|
||||
use quote::quote;
|
||||
use syn::parse::Error;
|
||||
use syn::spanned::Spanned;
|
||||
use syn::DeriveInput;
|
||||
|
||||
use crate::smart_default::default_attr::{ConversionStrategy, DefaultAttr};
|
||||
use crate::smart_default::util::find_only;
|
||||
|
||||
pub fn impl_my_derive(input: &DeriveInput) -> Result<TokenStream, Error> {
|
||||
let name = &input.ident;
|
||||
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
|
||||
|
||||
let (default_expr, doc) = match input.data {
|
||||
syn::Data::Struct(ref body) => {
|
||||
let (body_assignment, _doc) = default_body_tt(&body.fields)?;
|
||||
(
|
||||
quote! {
|
||||
#name #body_assignment
|
||||
},
|
||||
format!("Returns a `{}` default.", name),
|
||||
)
|
||||
}
|
||||
syn::Data::Enum(ref body) => {
|
||||
let default_variant = find_only(body.variants.iter(), |variant| {
|
||||
if let Some(meta) = DefaultAttr::find_in_attributes(&variant.attrs)? {
|
||||
if meta.code.is_none() {
|
||||
Ok(true)
|
||||
} else {
|
||||
Err(Error::new(
|
||||
meta.code.span(),
|
||||
"Attribute #[default] on variants should have no value",
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
})?
|
||||
.ok_or_else(|| Error::new(input.span(), "No default variant"))?;
|
||||
let default_variant_name = &default_variant.ident;
|
||||
let (body_assignment, _doc) = default_body_tt(&default_variant.fields)?;
|
||||
(
|
||||
quote! {
|
||||
#name :: #default_variant_name #body_assignment
|
||||
},
|
||||
format!("Returns a `{}::{}` default.", name, default_variant_name),
|
||||
)
|
||||
}
|
||||
syn::Data::Union(_) => {
|
||||
panic!()
|
||||
}
|
||||
};
|
||||
Ok(quote! {
|
||||
#[automatically_derived]
|
||||
impl #impl_generics Default for #name #ty_generics #where_clause {
|
||||
#[doc = #doc]
|
||||
fn default() -> Self {
|
||||
#default_expr
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Return a token-tree for the default "body" - the part after the name that contains the values.
|
||||
/// That is, the `{ ... }` part for structs, the `(...)` part for tuples, and nothing for units.
|
||||
fn default_body_tt(body: &syn::Fields) -> Result<(TokenStream, String), Error> {
|
||||
let mut doc = String::new();
|
||||
use std::fmt::Write;
|
||||
let body_tt = match body {
|
||||
syn::Fields::Named(ref fields) => {
|
||||
doc.push_str(" {");
|
||||
let result = {
|
||||
let field_assignments = fields
|
||||
.named
|
||||
.iter()
|
||||
.map(|field| {
|
||||
let field_name = field.ident.as_ref();
|
||||
let (default_value, default_doc) = field_default_expr_and_doc(field)?;
|
||||
write!(
|
||||
&mut doc,
|
||||
"\n {}: {},",
|
||||
field_name.expect("field value in struct is empty"),
|
||||
default_doc
|
||||
)
|
||||
.unwrap();
|
||||
// let default_value = default_value.into_token_stream();
|
||||
Ok(quote! { #field_name : #default_value })
|
||||
})
|
||||
.collect::<Result<Vec<_>, Error>>()?;
|
||||
quote! {
|
||||
{
|
||||
#( #field_assignments ),*
|
||||
}
|
||||
}
|
||||
};
|
||||
if doc.ends_with(',') {
|
||||
doc.pop();
|
||||
doc.push('\n');
|
||||
};
|
||||
doc.push('}');
|
||||
result
|
||||
}
|
||||
syn::Fields::Unnamed(ref fields) => {
|
||||
doc.push('(');
|
||||
let result = {
|
||||
let field_assignments = fields
|
||||
.unnamed
|
||||
.iter()
|
||||
.map(|field| {
|
||||
let (default_value, default_doc) = field_default_expr_and_doc(field)?;
|
||||
write!(&mut doc, "{}, ", default_doc).unwrap();
|
||||
Ok(default_value)
|
||||
})
|
||||
.collect::<Result<Vec<TokenStream>, Error>>()?;
|
||||
quote! {
|
||||
(
|
||||
#( #field_assignments ),*
|
||||
)
|
||||
}
|
||||
};
|
||||
if doc.ends_with(", ") {
|
||||
doc.pop();
|
||||
doc.pop();
|
||||
};
|
||||
doc.push(')');
|
||||
result
|
||||
}
|
||||
&syn::Fields::Unit => quote! {},
|
||||
};
|
||||
Ok((body_tt, doc))
|
||||
}
|
||||
|
||||
/// Return a default expression for a field based on it's `#[default = "..."]` attribute. Panic
|
||||
/// if there is more than one, of if there is a `#[default]` attribute without value.
|
||||
fn field_default_expr_and_doc(field: &syn::Field) -> Result<(TokenStream, String), Error> {
|
||||
if let Some(default_attr) = DefaultAttr::find_in_attributes(&field.attrs)? {
|
||||
let conversion_strategy = default_attr.conversion_strategy();
|
||||
let field_value = default_attr.code.ok_or_else(|| {
|
||||
Error::new(field.span(), "Expected #[default = ...] or #[default(...)]")
|
||||
})?;
|
||||
|
||||
let field_value = match conversion_strategy {
|
||||
ConversionStrategy::NoConversion => field_value,
|
||||
ConversionStrategy::Into => quote!((#field_value).into()),
|
||||
};
|
||||
|
||||
let field_doc = format!("{}", field_value);
|
||||
Ok((field_value, field_doc))
|
||||
} else {
|
||||
Ok((
|
||||
quote! {
|
||||
Default::default()
|
||||
},
|
||||
"Default::default()".to_owned(),
|
||||
))
|
||||
}
|
||||
}
|
||||
89
helpers/pagetop-macros/src/smart_default/default_attr.rs
Normal file
89
helpers/pagetop-macros/src/smart_default/default_attr.rs
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
use syn::{parse::Error, MetaNameValue};
|
||||
|
||||
use crate::smart_default::util::find_only;
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum ConversionStrategy {
|
||||
NoConversion,
|
||||
Into,
|
||||
}
|
||||
|
||||
pub struct DefaultAttr {
|
||||
pub code: Option<TokenStream>,
|
||||
conversion_strategy: Option<ConversionStrategy>,
|
||||
}
|
||||
|
||||
impl DefaultAttr {
|
||||
pub fn find_in_attributes(attrs: &[syn::Attribute]) -> Result<Option<Self>, Error> {
|
||||
if let Some(default_attr) =
|
||||
find_only(attrs.iter(), |attr| Ok(attr.path().is_ident("default")))?
|
||||
{
|
||||
match &default_attr.meta {
|
||||
syn::Meta::Path(_) => Ok(Some(Self {
|
||||
code: None,
|
||||
conversion_strategy: None,
|
||||
})),
|
||||
syn::Meta::List(meta) => {
|
||||
// If the meta contains exactly (_code = "...") take the string literal as the
|
||||
// expression
|
||||
if let Ok(ParseCodeHack(code_hack)) = syn::parse(meta.tokens.clone().into()) {
|
||||
Ok(Some(Self {
|
||||
code: Some(code_hack),
|
||||
conversion_strategy: Some(ConversionStrategy::NoConversion),
|
||||
}))
|
||||
} else {
|
||||
Ok(Some(Self {
|
||||
code: Some(meta.tokens.clone()),
|
||||
conversion_strategy: None,
|
||||
}))
|
||||
}
|
||||
}
|
||||
syn::Meta::NameValue(MetaNameValue { value, .. }) => Ok(Some(Self {
|
||||
code: Some(value.into_token_stream()),
|
||||
conversion_strategy: None,
|
||||
})),
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn conversion_strategy(&self) -> ConversionStrategy {
|
||||
if let Some(conversion_strategy) = self.conversion_strategy {
|
||||
// Conversion strategy already set
|
||||
return conversion_strategy;
|
||||
}
|
||||
let code = if let Some(code) = &self.code {
|
||||
code
|
||||
} else {
|
||||
// #[default] - so no conversion (`Default::default()` already has the correct type)
|
||||
return ConversionStrategy::NoConversion;
|
||||
};
|
||||
match syn::parse::<syn::Lit>(code.clone().into()) {
|
||||
Ok(syn::Lit::Str(_)) | Ok(syn::Lit::ByteStr(_)) => {
|
||||
// A string literal - so we need a conversion in case we need to make it a `String`
|
||||
return ConversionStrategy::Into;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
// Not handled by one of the rules, so we don't convert it to avoid causing trouble
|
||||
ConversionStrategy::NoConversion
|
||||
}
|
||||
}
|
||||
|
||||
struct ParseCodeHack(TokenStream);
|
||||
|
||||
impl syn::parse::Parse for ParseCodeHack {
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
let ident: syn::Ident = input.parse()?;
|
||||
if ident != "_code" {
|
||||
return Err(Error::new(ident.span(), "Expected `_code`"));
|
||||
}
|
||||
input.parse::<syn::token::Eq>()?;
|
||||
let code: syn::LitStr = input.parse()?;
|
||||
let code: TokenStream = code.parse()?;
|
||||
Ok(ParseCodeHack(code))
|
||||
}
|
||||
}
|
||||
21
helpers/pagetop-macros/src/smart_default/util.rs
Normal file
21
helpers/pagetop-macros/src/smart_default/util.rs
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
use syn::parse::Error;
|
||||
use syn::spanned::Spanned;
|
||||
|
||||
/// Return the value that fulfills the predicate if there is one in the slice. Panic if there is
|
||||
/// more than one.
|
||||
pub fn find_only<T, F>(iter: impl Iterator<Item = T>, pred: F) -> Result<Option<T>, Error>
|
||||
where
|
||||
T: Spanned,
|
||||
F: Fn(&T) -> Result<bool, Error>,
|
||||
{
|
||||
let mut result = None;
|
||||
for item in iter {
|
||||
if pred(&item)? {
|
||||
if result.is_some() {
|
||||
return Err(Error::new(item.span(), "Multiple defaults"));
|
||||
}
|
||||
result = Some(item);
|
||||
}
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue