From bceb43e6d0997e857e42fea254af1dde6c117d2c Mon Sep 17 00:00:00 2001 From: Manuel Cillero Date: Mon, 7 Jul 2025 20:31:00 +0200 Subject: [PATCH] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Actualiza=20el=20c=C3=B3di?= =?UTF-8?q?go=20de=20maud=20a=20la=20versi=C3=B3n=200.27.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Cargo.lock | 82 +- helpers/pagetop-macros/Cargo.toml | 4 +- helpers/pagetop-macros/src/lib.rs | 3 +- helpers/pagetop-macros/src/maud.rs | 53 +- helpers/pagetop-macros/src/maud/ast.rs | 1222 ++++++++++++++++--- helpers/pagetop-macros/src/maud/escape.rs | 7 - helpers/pagetop-macros/src/maud/generate.rs | 478 +++++--- helpers/pagetop-macros/src/maud/parse.rs | 752 ------------ src/html.rs | 2 +- src/html/maud.rs | 75 +- src/service.rs | 2 +- 11 files changed, 1459 insertions(+), 1221 deletions(-) delete mode 100644 helpers/pagetop-macros/src/maud/parse.rs diff --git a/Cargo.lock b/Cargo.lock index d53ffd7..cf974c8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -65,7 +65,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" dependencies = [ "quote", - "syn 2.0.104", + "syn", ] [[package]] @@ -182,7 +182,7 @@ dependencies = [ "actix-router", "proc-macro2", "quote", - "syn 2.0.104", + "syn", ] [[package]] @@ -426,7 +426,7 @@ checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn", "unicode-xid", ] @@ -448,7 +448,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn", ] [[package]] @@ -937,13 +937,13 @@ dependencies = [ [[package]] name = "pagetop-macros" -version = "0.0.1" +version = "0.0.2" dependencies = [ "proc-macro-crate", - "proc-macro-error", "proc-macro2", + "proc-macro2-diagnostics", "quote", - "syn 2.0.104", + "syn", ] [[package]] @@ -998,7 +998,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn", ] [[package]] @@ -1052,30 +1052,6 @@ dependencies = [ "toml_edit", ] -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn 1.0.109", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - [[package]] name = "proc-macro2" version = "1.0.95" @@ -1085,6 +1061,18 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "proc-macro2-diagnostics" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "version_check", +] + [[package]] name = "quote" version = "1.0.40" @@ -1242,7 +1230,7 @@ checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn", ] [[package]] @@ -1350,16 +1338,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "unicode-ident", -] - [[package]] name = "syn" version = "2.0.104" @@ -1379,7 +1357,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn", ] [[package]] @@ -1409,7 +1387,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn", ] [[package]] @@ -1571,7 +1549,7 @@ checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn", ] [[package]] @@ -1721,7 +1699,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn 2.0.104", + "syn", "wasm-bindgen-shared", ] @@ -1743,7 +1721,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -1905,7 +1883,7 @@ checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn", "synstructure", ] @@ -1926,7 +1904,7 @@ checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn", ] [[package]] @@ -1946,7 +1924,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn", "synstructure", ] @@ -1980,7 +1958,7 @@ checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn", ] [[package]] diff --git a/helpers/pagetop-macros/Cargo.toml b/helpers/pagetop-macros/Cargo.toml index 8041382..ea6404b 100644 --- a/helpers/pagetop-macros/Cargo.toml +++ b/helpers/pagetop-macros/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pagetop-macros" -version = "0.0.1" +version = "0.0.2" edition = "2021" description = """\ @@ -19,7 +19,7 @@ proc-macro = true [dependencies] proc-macro2 = "1.0.95" +proc-macro2-diagnostics = { version = "0.10.1", default-features = false } proc-macro-crate = "3.3.0" -proc-macro-error = "1.0.4" quote = "1.0.40" syn = { version = "2.0.104", features = ["full"] } diff --git a/helpers/pagetop-macros/src/lib.rs b/helpers/pagetop-macros/src/lib.rs index 65badb0..e0f3bad 100644 --- a/helpers/pagetop-macros/src/lib.rs +++ b/helpers/pagetop-macros/src/lib.rs @@ -17,11 +17,10 @@ mod maud; use proc_macro::TokenStream; -use proc_macro_error::proc_macro_error; use quote::quote; +/// Macro para escribir plantillas HTML ([Maud](https://docs.rs/maud)). #[proc_macro] -#[proc_macro_error] pub fn html(input: TokenStream) -> TokenStream { maud::expand(input.into()).into() } diff --git a/helpers/pagetop-macros/src/maud.rs b/helpers/pagetop-macros/src/maud.rs index a4e7873..9077dbb 100644 --- a/helpers/pagetop-macros/src/maud.rs +++ b/helpers/pagetop-macros/src/maud.rs @@ -1,4 +1,4 @@ -// #![doc(html_root_url = "https://docs.rs/maud_macros/0.25.0")] +// #![doc(html_root_url = "https://docs.rs/maud_macros/0.27.0")] // TokenStream values are reference counted, and the mental overhead of tracking // lifetimes outweighs the marginal gains from explicit borrowing // #![allow(clippy::needless_pass_by_value)] @@ -6,34 +6,55 @@ mod ast; mod escape; mod generate; -mod parse; -use proc_macro2::{Ident, Span, TokenStream, TokenTree}; +use ast::DiagnosticParse; +use proc_macro2::{Ident, Span, TokenStream}; +use proc_macro2_diagnostics::Diagnostic; use proc_macro_crate::{crate_name, FoundCrate}; use quote::quote; +use syn::parse::{ParseStream, Parser}; pub fn expand(input: TokenStream) -> TokenStream { - let output_ident = TokenTree::Ident(Ident::new("__maud_output", Span::mixed_site())); // Heuristic: the size of the resulting markup tends to correlate with the // code size of the template itself let size_hint = input.to_string().len(); - let markups = parse::parse(input); - let stmts = generate::generate(markups, output_ident.clone()); - let found_crate = crate_name("pagetop").expect("pagetop is present in `Cargo.toml`"); - let pre_escaped = match found_crate { - FoundCrate::Itself => quote!( - crate::html::PreEscaped(#output_ident) - ), - _ => quote!( - pagetop::html::PreEscaped(#output_ident) - ), + let mut diagnostics = Vec::new(); + let markups = match Parser::parse2( + |input: ParseStream| ast::Markups::diagnostic_parse(input, &mut diagnostics), + input, + ) { + Ok(data) => data, + Err(err) => { + let err = err.to_compile_error(); + let diag_tokens = diagnostics.into_iter().map(Diagnostic::emit_as_expr_tokens); + + return quote! {{ + #err + #(#diag_tokens)* + }}; + } }; - quote!({ + let diag_tokens = diagnostics.into_iter().map(Diagnostic::emit_as_expr_tokens); + + let output_ident = Ident::new("__maud_output", Span::mixed_site()); + let stmts = generate::generate(markups, output_ident.clone()); + + let found_crate = crate_name("pagetop").expect("pagetop must be in Cargo.toml"); + let crate_ident = match found_crate { + FoundCrate::Itself => Ident::new("pagetop", Span::call_site()), + FoundCrate::Name(ref name) => Ident::new(name, Span::call_site()), + }; + let pre_escaped = quote! { + #crate_ident::html::PreEscaped(#output_ident) + }; + + quote! {{ extern crate alloc; let mut #output_ident = alloc::string::String::with_capacity(#size_hint); #stmts + #(#diag_tokens)* #pre_escaped - }) + }} } diff --git a/helpers/pagetop-macros/src/maud/ast.rs b/helpers/pagetop-macros/src/maud/ast.rs index cd8a2ce..fd499ae 100644 --- a/helpers/pagetop-macros/src/maud/ast.rs +++ b/helpers/pagetop-macros/src/maud/ast.rs @@ -1,221 +1,1105 @@ -use proc_macro2::{TokenStream, TokenTree}; -use proc_macro_error::SpanRange; +use std::fmt::{self, Display, Formatter}; -#[derive(Debug)] -pub enum Markup { - /// Used as a placeholder value on parse error. - ParseError { - span: SpanRange, - }, - Block(Block), - Literal { - content: String, - span: SpanRange, - }, - Symbol { - symbol: TokenStream, - }, - Splice { - expr: TokenStream, - outer_span: SpanRange, - }, - Element { - name: TokenStream, - attrs: Vec, - body: ElementBody, - }, - Let { - at_span: SpanRange, - tokens: TokenStream, - }, - Special { - segments: Vec, - }, - Match { - at_span: SpanRange, - head: TokenStream, - arms: Vec, - arms_span: SpanRange, +use proc_macro2::TokenStream; +use proc_macro2_diagnostics::{Diagnostic, SpanDiagnosticExt}; +use quote::ToTokens; +use syn::{ + braced, bracketed, + ext::IdentExt, + parenthesized, + parse::{Lookahead1, Parse, ParseStream}, + punctuated::{Pair, Punctuated}, + spanned::Spanned, + token::{ + At, Brace, Bracket, Colon, Comma, Dot, Else, Eq, FatArrow, For, If, In, Let, Match, Minus, + Paren, Pound, Question, Semi, Slash, While, }, + Error, Expr, Ident, Lit, LitBool, LitInt, LitStr, Local, Pat, Stmt, +}; + +#[derive(Debug, Clone)] +pub struct Markups { + pub markups: Vec>, } -impl Markup { - pub fn span(&self) -> SpanRange { - match *self { - Markup::ParseError { span } => span, - Markup::Block(ref block) => block.span(), - Markup::Literal { span, .. } => span, - Markup::Symbol { ref symbol } => span_tokens(symbol.clone()), - Markup::Splice { outer_span, .. } => outer_span, - Markup::Element { - ref name, ref body, .. - } => { - let name_span = span_tokens(name.clone()); - name_span.join_range(body.span()) - } - Markup::Let { - at_span, - ref tokens, - } => at_span.join_range(span_tokens(tokens.clone())), - Markup::Special { ref segments } => join_ranges(segments.iter().map(Special::span)), - Markup::Match { - at_span, arms_span, .. - } => at_span.join_range(arms_span), +impl DiagnosticParse for Markups { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + let mut markups = Vec::new(); + while !input.is_empty() { + markups.push(Markup::diagnostic_parse_in_block(input, diagnostics)?) + } + Ok(Self { markups }) + } +} + +impl ToTokens for Markups { + fn to_tokens(&self, tokens: &mut TokenStream) { + for markup in &self.markups { + markup.to_tokens(tokens); } } } -#[derive(Debug)] -pub enum Attr { +#[derive(Debug, Clone)] +pub enum Markup { + Block(Block), + Lit(HtmlLit), + Splice { paren_token: Paren, expr: Expr }, + Element(E), + ControlFlow(ControlFlow), + Semi(Semi), +} + +impl Markup { + pub fn diagnostic_parse_in_block( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + if input.peek(Let) + || input.peek(If) + || input.peek(Else) + || input.peek(For) + || input.peek(While) + || input.peek(Match) + { + let kw = input.call(Ident::parse_any)?; + diagnostics.push( + kw.span() + .error(format!("found keyword `{kw}`")) + .help(format!("should this be `@{kw}`?")), + ); + } + + let lookahead = input.lookahead1(); + + if lookahead.peek(Brace) { + input.diagnostic_parse(diagnostics).map(Self::Block) + } else if lookahead.peek(Lit) { + input.diagnostic_parse(diagnostics).map(Self::Lit) + } else if lookahead.peek(Paren) { + let content; + Ok(Self::Splice { + paren_token: parenthesized!(content in input), + expr: content.parse()?, + }) + } else if let Some(parse_element) = E::should_parse(&lookahead) { + parse_element(input, diagnostics).map(Self::Element) + } else if lookahead.peek(At) { + input.diagnostic_parse(diagnostics).map(Self::ControlFlow) + } else if lookahead.peek(Semi) { + input.parse().map(Self::Semi) + } else { + Err(lookahead.error()) + } + } +} + +impl DiagnosticParse for Markup { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + let markup = Self::diagnostic_parse_in_block(input, diagnostics)?; + + if let Self::ControlFlow(ControlFlow { + kind: ControlFlowKind::Let(_), + .. + }) = &markup + { + diagnostics.push( + markup + .span() + .error("`@let` bindings are only allowed inside blocks"), + ) + } + + Ok(markup) + } +} + +impl ToTokens for Markup { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Block(block) => block.to_tokens(tokens), + Self::Lit(lit) => lit.to_tokens(tokens), + Self::Splice { paren_token, expr } => { + paren_token.surround(tokens, |tokens| { + expr.to_tokens(tokens); + }); + } + Self::Element(element) => element.to_tokens(tokens), + Self::ControlFlow(control_flow) => control_flow.to_tokens(tokens), + Self::Semi(semi) => semi.to_tokens(tokens), + } + } +} + +/// Represents a context that may or may not allow elements. +/// +/// An attribute accepts almost the same syntax as an element body, except child elements aren't +/// allowed. To enable code reuse, introduce a trait that abstracts over whether an element is +/// allowed or not. +pub trait MaybeElement: Sized + ToTokens { + /// If an element can be parsed here, returns `Some` with a parser for the rest of the element. + fn should_parse(lookahead: &Lookahead1<'_>) -> Option>; +} + +/// An implementation of `DiagnosticParse::diagnostic_parse`. +pub type DiagnosticParseFn = fn(ParseStream, &mut Vec) -> syn::Result; + +/// Represents an attribute context, where elements are disallowed. +#[derive(Debug, Clone)] +pub enum NoElement {} + +impl MaybeElement for NoElement { + fn should_parse( + _lookahead: &Lookahead1<'_>, + ) -> Option) -> syn::Result> { + None + } +} + +impl ToTokens for NoElement { + fn to_tokens(&self, _tokens: &mut TokenStream) { + match *self {} + } +} + +#[derive(Debug, Clone)] +pub struct Element { + pub name: Option, + pub attrs: Vec, + pub body: ElementBody, +} + +impl From for Element { + fn from(value: NoElement) -> Self { + match value {} + } +} + +impl MaybeElement for Element { + fn should_parse( + lookahead: &Lookahead1<'_>, + ) -> Option) -> syn::Result> { + if lookahead.peek(Ident::peek_any) || lookahead.peek(Dot) || lookahead.peek(Pound) { + Some(Element::diagnostic_parse) + } else { + None + } + } +} + +impl DiagnosticParse for Element { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + Ok(Self { + name: if input.peek(Ident::peek_any) { + Some(input.diagnostic_parse(diagnostics)?) + } else { + None + }, + attrs: { + let mut id_pushed = false; + let mut attrs = Vec::new(); + + while input.peek(Ident::peek_any) + || input.peek(Lit) + || input.peek(Dot) + || input.peek(Pound) + { + let attr = input.diagnostic_parse(diagnostics)?; + + if let Attribute::Id { .. } = attr { + if id_pushed { + return Err(Error::new_spanned( + attr, + "duplicate id (`#`) attribute specified", + )); + } + id_pushed = true; + } + + attrs.push(attr); + } + + if !(input.peek(Brace) || input.peek(Semi) || input.peek(Slash)) { + let lookahead = input.lookahead1(); + + lookahead.peek(Ident::peek_any); + lookahead.peek(Lit); + lookahead.peek(Dot); + lookahead.peek(Pound); + + lookahead.peek(Brace); + lookahead.peek(Semi); + + return Err(lookahead.error()); + } + + attrs + }, + body: input.diagnostic_parse(diagnostics)?, + }) + } +} + +impl ToTokens for Element { + fn to_tokens(&self, tokens: &mut TokenStream) { + if let Some(name) = &self.name { + name.to_tokens(tokens); + } + for attr in &self.attrs { + attr.to_tokens(tokens); + } + self.body.to_tokens(tokens); + } +} + +#[derive(Debug, Clone)] +pub enum ElementBody { + Void(Semi), + Block(Block), +} + +impl DiagnosticParse for ElementBody { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(Semi) { + input.parse().map(Self::Void) + } else if lookahead.peek(Brace) { + input.diagnostic_parse(diagnostics).map(Self::Block) + } else if lookahead.peek(Slash) { + diagnostics.push( + input + .parse::()? + .span() + .error("void elements must use `;`, not `/`") + .help("change this to `;`") + .help("see https://github.com/lambda-fairy/maud/pull/315 for details"), + ); + + Ok(Self::Void(::default())) + } else { + Err(lookahead.error()) + } + } +} + +impl ToTokens for ElementBody { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Void(semi) => semi.to_tokens(tokens), + Self::Block(block) => block.to_tokens(tokens), + } + } +} + +#[derive(Debug, Clone)] +pub struct Block { + pub brace_token: Brace, + pub markups: Markups, +} + +impl DiagnosticParse for Block { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + let content; + Ok(Self { + brace_token: braced!(content in input), + markups: content.diagnostic_parse(diagnostics)?, + }) + } +} + +impl ToTokens for Block { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.brace_token.surround(tokens, |tokens| { + self.markups.to_tokens(tokens); + }); + } +} + +#[derive(Debug, Clone)] +pub enum Attribute { Class { - dot_span: SpanRange, - name: Markup, + dot_token: Dot, + name: HtmlNameOrMarkup, toggler: Option, }, Id { - hash_span: SpanRange, - name: Markup, + pound_token: Pound, + name: HtmlNameOrMarkup, }, Named { - named_attr: NamedAttr, + name: HtmlName, + attr_type: AttributeType, }, } -impl Attr { - pub fn span(&self) -> SpanRange { - match *self { - Attr::Class { - dot_span, - ref name, - ref toggler, +impl DiagnosticParse for Attribute { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(Dot) { + Ok(Self::Class { + dot_token: input.parse()?, + name: input.diagnostic_parse(diagnostics)?, + toggler: { + let lookahead = input.lookahead1(); + + if lookahead.peek(Bracket) { + Some(input.diagnostic_parse(diagnostics)?) + } else { + None + } + }, + }) + } else if lookahead.peek(Pound) { + Ok(Self::Id { + pound_token: input.parse()?, + name: input.diagnostic_parse(diagnostics)?, + }) + } else { + let name = input.diagnostic_parse::(diagnostics)?; + + if input.peek(Question) { + input.parse::()?; + } + + let fork = input.fork(); + + let attr = Self::Named { + name: name.clone(), + attr_type: input.diagnostic_parse(diagnostics)?, + }; + + if fork.peek(Eq) && fork.peek2(LitBool) { + diagnostics.push( + attr.span() + .error("attribute value must be a string") + .help(format!("to declare an empty attribute, omit the equals sign: `{name}`")) + .help(format!("to toggle the attribute, use square brackets: `{name}[some_boolean_flag]`")) + ); + } + + Ok(attr) + } + } +} + +impl ToTokens for Attribute { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Class { + dot_token, + name, + toggler, } => { - let name_span = name.span(); - let dot_name_span = dot_span.join_range(name_span); + dot_token.to_tokens(tokens); + name.to_tokens(tokens); if let Some(toggler) = toggler { - dot_name_span.join_range(toggler.cond_span) - } else { - dot_name_span + toggler.to_tokens(tokens); } } - Attr::Id { - hash_span, - ref name, - } => { - let name_span = name.span(); - hash_span.join_range(name_span) + Self::Id { pound_token, name } => { + pound_token.to_tokens(tokens); + name.to_tokens(tokens); + } + Self::Named { name, attr_type } => { + name.to_tokens(tokens); + attr_type.to_tokens(tokens); } - Attr::Named { ref named_attr } => named_attr.span(), } } } -#[derive(Debug)] -pub enum ElementBody { - Void { semi_span: SpanRange }, - Block { block: Block }, +#[derive(Debug, Clone)] +pub enum HtmlNameOrMarkup { + HtmlName(HtmlName), + Markup(Markup), } -impl ElementBody { - pub fn span(&self) -> SpanRange { - match *self { - ElementBody::Void { semi_span } => semi_span, - ElementBody::Block { ref block } => block.span(), - } - } -} - -#[derive(Debug)] -pub struct Block { - pub markups: Vec, - pub outer_span: SpanRange, -} - -impl Block { - pub fn span(&self) -> SpanRange { - self.outer_span - } -} - -#[derive(Debug)] -pub struct Special { - pub at_span: SpanRange, - pub head: TokenStream, - pub body: Block, -} - -impl Special { - pub fn span(&self) -> SpanRange { - let body_span = self.body.span(); - self.at_span.join_range(body_span) - } -} - -#[derive(Debug)] -pub struct NamedAttr { - pub name: TokenStream, - pub attr_type: AttrType, -} - -impl NamedAttr { - fn span(&self) -> SpanRange { - let name_span = span_tokens(self.name.clone()); - if let Some(attr_type_span) = self.attr_type.span() { - name_span.join_range(attr_type_span) +impl DiagnosticParse for HtmlNameOrMarkup { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + if input.peek(Ident::peek_any) || input.peek(Lit) { + input.diagnostic_parse(diagnostics).map(Self::HtmlName) } else { - name_span + input.diagnostic_parse(diagnostics).map(Self::Markup) } } } -#[derive(Debug)] -pub enum AttrType { - Normal { value: Markup }, - Optional { toggler: Toggler }, - Empty { toggler: Option }, +impl Parse for HtmlNameOrMarkup { + fn parse(input: ParseStream) -> syn::Result { + Self::diagnostic_parse(input, &mut Vec::new()) + } } -impl AttrType { - fn span(&self) -> Option { - match *self { - AttrType::Normal { ref value } => Some(value.span()), - AttrType::Optional { ref toggler } => Some(toggler.span()), - AttrType::Empty { ref toggler } => toggler.as_ref().map(Toggler::span), +impl ToTokens for HtmlNameOrMarkup { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::HtmlName(name) => name.to_tokens(tokens), + Self::Markup(markup) => markup.to_tokens(tokens), } } } -#[derive(Debug)] +impl Display for HtmlNameOrMarkup { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + match self { + Self::HtmlName(name) => name.fmt(f), + Self::Markup(markup) => markup.to_token_stream().fmt(f), + } + } +} + +#[derive(Debug, Clone)] +pub enum AttributeType { + Normal { + eq_token: Eq, + value: Markup, + }, + Optional { + eq_token: Eq, + toggler: Toggler, + }, + Empty(Option), +} + +impl DiagnosticParse for AttributeType { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(Eq) { + let eq_token = input.parse()?; + + if input.peek(Bracket) { + Ok(Self::Optional { + eq_token, + toggler: input.diagnostic_parse(diagnostics)?, + }) + } else { + Ok(Self::Normal { + eq_token, + value: input.diagnostic_parse(diagnostics)?, + }) + } + } else if lookahead.peek(Bracket) { + Ok(Self::Empty(Some(input.diagnostic_parse(diagnostics)?))) + } else { + Ok(Self::Empty(None)) + } + } +} + +impl ToTokens for AttributeType { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Normal { eq_token, value } => { + eq_token.to_tokens(tokens); + value.to_tokens(tokens); + } + Self::Optional { eq_token, toggler } => { + eq_token.to_tokens(tokens); + toggler.to_tokens(tokens); + } + Self::Empty(toggler) => { + if let Some(toggler) = toggler { + toggler.to_tokens(tokens); + } + } + } + } +} + +#[derive(Debug, Clone)] +pub struct HtmlName { + pub name: Punctuated, +} + +impl DiagnosticParse for HtmlName { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + Ok(Self { + name: { + let mut punctuated = Punctuated::new(); + + loop { + punctuated.push_value(input.diagnostic_parse(diagnostics)?); + + if !(input.peek(Minus) || input.peek(Colon)) { + break; + } + + let punct = input.diagnostic_parse(diagnostics)?; + punctuated.push_punct(punct); + } + + punctuated + }, + }) + } +} + +impl Parse for HtmlName { + fn parse(input: ParseStream) -> syn::Result { + Self::diagnostic_parse(input, &mut Vec::new()) + } +} + +impl ToTokens for HtmlName { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.name.to_tokens(tokens); + } +} + +impl Display for HtmlName { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + for pair in self.name.pairs() { + match pair { + Pair::Punctuated(fragment, punct) => { + fragment.fmt(f)?; + punct.fmt(f)?; + } + Pair::End(fragment) => { + fragment.fmt(f)?; + } + } + } + + Ok(()) + } +} + +#[derive(Debug, Clone)] +pub enum HtmlNameFragment { + Ident(Ident), + LitInt(LitInt), + LitStr(LitStr), + Empty, +} + +impl DiagnosticParse for HtmlNameFragment { + fn diagnostic_parse( + input: ParseStream, + _diagnostics: &mut Vec, + ) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(Ident::peek_any) { + input.call(Ident::parse_any).map(Self::Ident) + } else if lookahead.peek(LitInt) { + input.parse().map(Self::LitInt) + } else if lookahead.peek(LitStr) { + input.parse().map(Self::LitStr) + } else if lookahead.peek(Minus) || lookahead.peek(Colon) { + Ok(Self::Empty) + } else { + Err(lookahead.error()) + } + } +} + +impl ToTokens for HtmlNameFragment { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Ident(ident) => ident.to_tokens(tokens), + Self::LitInt(lit) => lit.to_tokens(tokens), + Self::LitStr(lit) => lit.to_tokens(tokens), + Self::Empty => {} + } + } +} + +impl Display for HtmlNameFragment { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + match self { + Self::Ident(ident) => ident.fmt(f), + Self::LitInt(lit) => lit.fmt(f), + Self::LitStr(lit) => lit.value().fmt(f), + Self::Empty => Ok(()), + } + } +} + +#[derive(Debug, Clone)] +pub struct HtmlLit { + pub lit: LitStr, +} + +impl DiagnosticParse for HtmlLit { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(Lit) { + let lit = input.parse()?; + match lit { + Lit::Str(lit) => Ok(Self { lit }), + Lit::Int(lit) => { + diagnostics.push( + lit.span() + .error(format!(r#"literal must be double-quoted: `"{lit}"`"#)), + ); + Ok(Self { + lit: LitStr::new("", lit.span()), + }) + } + Lit::Float(lit) => { + diagnostics.push( + lit.span() + .error(format!(r#"literal must be double-quoted: `"{lit}"`"#)), + ); + Ok(Self { + lit: LitStr::new("", lit.span()), + }) + } + Lit::Char(lit) => { + diagnostics.push(lit.span().error(format!( + r#"literal must be double-quoted: `"{}"`"#, + lit.value() + ))); + Ok(Self { + lit: LitStr::new("", lit.span()), + }) + } + Lit::Bool(_) => { + // diagnostic handled earlier with more information + Ok(Self { + lit: LitStr::new("", lit.span()), + }) + } + _ => { + diagnostics.push(lit.span().error("expected string")); + Ok(Self { + lit: LitStr::new("", lit.span()), + }) + } + } + } else { + Err(lookahead.error()) + } + } +} + +impl ToTokens for HtmlLit { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.lit.to_tokens(tokens); + } +} + +impl Display for HtmlLit { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + self.lit.value().fmt(f) + } +} + +#[derive(Debug, Clone)] +pub enum HtmlNamePunct { + Colon(Colon), + Hyphen(Minus), +} + +impl DiagnosticParse for HtmlNamePunct { + fn diagnostic_parse(input: ParseStream, _: &mut Vec) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(Colon) { + input.parse().map(Self::Colon) + } else if lookahead.peek(Minus) { + input.parse().map(Self::Hyphen) + } else { + Err(lookahead.error()) + } + } +} + +impl ToTokens for HtmlNamePunct { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Colon(token) => token.to_tokens(tokens), + Self::Hyphen(token) => token.to_tokens(tokens), + } + } +} + +impl Display for HtmlNamePunct { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + match self { + Self::Colon(_) => f.write_str(":"), + Self::Hyphen(_) => f.write_str("-"), + } + } +} + +#[derive(Debug, Clone)] pub struct Toggler { - pub cond: TokenStream, - pub cond_span: SpanRange, + pub bracket_token: Bracket, + pub cond: Expr, } -impl Toggler { - fn span(&self) -> SpanRange { - self.cond_span +impl DiagnosticParse for Toggler { + fn diagnostic_parse(input: ParseStream, _: &mut Vec) -> syn::Result { + let content; + Ok(Self { + bracket_token: bracketed!(content in input), + cond: content.parse()?, + }) } } -#[derive(Debug)] -pub struct MatchArm { - pub head: TokenStream, - pub body: Block, +impl ToTokens for Toggler { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.bracket_token.surround(tokens, |tokens| { + self.cond.to_tokens(tokens); + }); + } } -pub fn span_tokens>(tokens: I) -> SpanRange { - join_ranges(tokens.into_iter().map(|s| SpanRange::single_span(s.span()))) +#[derive(Debug, Clone)] +pub struct ControlFlow { + pub at_token: At, + pub kind: ControlFlowKind, } -pub fn join_ranges>(ranges: I) -> SpanRange { - let mut iter = ranges.into_iter(); - let first = match iter.next() { - Some(span) => span, - None => return SpanRange::call_site(), - }; - let last = iter.last().unwrap_or(first); - first.join_range(last) +impl DiagnosticParse for ControlFlow { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + Ok(Self { + at_token: input.parse()?, + kind: { + let lookahead = input.lookahead1(); + + if lookahead.peek(If) { + ControlFlowKind::If(input.diagnostic_parse(diagnostics)?) + } else if lookahead.peek(For) { + ControlFlowKind::For(input.diagnostic_parse(diagnostics)?) + } else if lookahead.peek(While) { + ControlFlowKind::While(input.diagnostic_parse(diagnostics)?) + } else if lookahead.peek(Match) { + ControlFlowKind::Match(input.diagnostic_parse(diagnostics)?) + } else if lookahead.peek(Let) { + let Stmt::Local(local) = input.parse()? else { + unreachable!() + }; + + ControlFlowKind::Let(local) + } else { + return Err(lookahead.error()); + } + }, + }) + } } -pub fn name_to_string(name: TokenStream) -> String { - name.into_iter().map(|token| token.to_string()).collect() +impl ToTokens for ControlFlow { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.at_token.to_tokens(tokens); + match &self.kind { + ControlFlowKind::Let(local) => local.to_tokens(tokens), + ControlFlowKind::If(if_) => if_.to_tokens(tokens), + ControlFlowKind::For(for_) => for_.to_tokens(tokens), + ControlFlowKind::While(while_) => while_.to_tokens(tokens), + ControlFlowKind::Match(match_) => match_.to_tokens(tokens), + } + } +} + +#[derive(Debug, Clone)] +pub enum ControlFlowKind { + Let(Local), + If(IfExpr), + For(ForExpr), + While(WhileExpr), + Match(MatchExpr), +} + +#[derive(Debug, Clone)] +pub struct IfExpr { + pub if_token: If, + pub cond: Expr, + pub then_branch: Block, + pub else_branch: Option<(At, Else, Box>)>, +} + +impl DiagnosticParse for IfExpr { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + Ok(Self { + if_token: input.parse()?, + cond: input.call(Expr::parse_without_eager_brace)?, + then_branch: input.diagnostic_parse(diagnostics)?, + else_branch: { + if input.peek(At) && input.peek2(Else) { + Some(( + input.parse()?, + input.parse()?, + input.diagnostic_parse(diagnostics)?, + )) + } else { + None + } + }, + }) + } +} + +impl ToTokens for IfExpr { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.if_token.to_tokens(tokens); + self.cond.to_tokens(tokens); + self.then_branch.to_tokens(tokens); + if let Some((at_token, else_token, else_branch)) = &self.else_branch { + at_token.to_tokens(tokens); + else_token.to_tokens(tokens); + else_branch.to_tokens(tokens); + } + } +} + +#[derive(Debug, Clone)] +pub enum IfOrBlock { + If(IfExpr), + Block(Block), +} + +impl DiagnosticParse for IfOrBlock { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(If) { + input.diagnostic_parse(diagnostics).map(Self::If) + } else if lookahead.peek(Brace) { + input.diagnostic_parse(diagnostics).map(Self::Block) + } else { + Err(lookahead.error()) + } + } +} + +impl ToTokens for IfOrBlock { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::If(if_) => if_.to_tokens(tokens), + Self::Block(block) => block.to_tokens(tokens), + } + } +} + +#[derive(Debug, Clone)] +pub struct ForExpr { + pub for_token: For, + pub pat: Pat, + pub in_token: In, + pub expr: Expr, + pub body: Block, +} + +impl DiagnosticParse for ForExpr { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + Ok(Self { + for_token: input.parse()?, + pat: input.call(Pat::parse_multi_with_leading_vert)?, + in_token: input.parse()?, + expr: input.call(Expr::parse_without_eager_brace)?, + body: input.diagnostic_parse(diagnostics)?, + }) + } +} + +impl ToTokens for ForExpr { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.for_token.to_tokens(tokens); + self.pat.to_tokens(tokens); + self.in_token.to_tokens(tokens); + self.expr.to_tokens(tokens); + self.body.to_tokens(tokens); + } +} + +#[derive(Debug, Clone)] +pub struct WhileExpr { + pub while_token: While, + pub cond: Expr, + pub body: Block, +} + +impl DiagnosticParse for WhileExpr { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + Ok(Self { + while_token: input.parse()?, + cond: input.call(Expr::parse_without_eager_brace)?, + body: input.diagnostic_parse(diagnostics)?, + }) + } +} + +impl ToTokens for WhileExpr { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.while_token.to_tokens(tokens); + self.cond.to_tokens(tokens); + self.body.to_tokens(tokens); + } +} + +#[derive(Debug, Clone)] +pub struct MatchExpr { + pub match_token: Match, + pub expr: Expr, + pub brace_token: Brace, + pub arms: Vec>, +} + +impl DiagnosticParse for MatchExpr { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + let match_token = input.parse()?; + let expr = input.call(Expr::parse_without_eager_brace)?; + + let content; + let brace_token = braced!(content in input); + + let mut arms = Vec::new(); + while !content.is_empty() { + arms.push(content.diagnostic_parse(diagnostics)?); + } + + Ok(Self { + match_token, + expr, + brace_token, + arms, + }) + } +} + +impl ToTokens for MatchExpr { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.match_token.to_tokens(tokens); + self.expr.to_tokens(tokens); + self.brace_token.surround(tokens, |tokens| { + for arm in &self.arms { + arm.to_tokens(tokens); + } + }); + } +} + +#[derive(Debug, Clone)] +pub struct MatchArm { + pub pat: Pat, + pub guard: Option<(If, Expr)>, + pub fat_arrow_token: FatArrow, + pub body: Markup, + pub comma_token: Option, +} + +impl DiagnosticParse for MatchArm { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + Ok(Self { + pat: Pat::parse_multi_with_leading_vert(input)?, + guard: { + if input.peek(If) { + Some((input.parse()?, input.parse()?)) + } else { + None + } + }, + fat_arrow_token: input.parse()?, + body: Markup::diagnostic_parse_in_block(input, diagnostics)?, + comma_token: if input.peek(Comma) { + Some(input.parse()?) + } else { + None + }, + }) + } +} + +impl ToTokens for MatchArm { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.pat.to_tokens(tokens); + if let Some((if_token, guard)) = &self.guard { + if_token.to_tokens(tokens); + guard.to_tokens(tokens); + } + self.fat_arrow_token.to_tokens(tokens); + self.body.to_tokens(tokens); + if let Some(comma_token) = &self.comma_token { + comma_token.to_tokens(tokens); + } + } +} + +pub trait DiagnosticParse: Sized { + fn diagnostic_parse(input: ParseStream, diagnostics: &mut Vec) + -> syn::Result; +} + +impl DiagnosticParse for Box { + fn diagnostic_parse( + input: ParseStream, + diagnostics: &mut Vec, + ) -> syn::Result { + Ok(Box::new(input.diagnostic_parse(diagnostics)?)) + } +} + +trait DiagonsticParseExt: Sized { + fn diagnostic_parse( + self, + diagnostics: &mut Vec, + ) -> syn::Result; +} + +impl DiagonsticParseExt for ParseStream<'_> { + fn diagnostic_parse(self, diagnostics: &mut Vec) -> syn::Result + where + T: DiagnosticParse, + { + T::diagnostic_parse(self, diagnostics) + } } diff --git a/helpers/pagetop-macros/src/maud/escape.rs b/helpers/pagetop-macros/src/maud/escape.rs index 49ece77..786d8c7 100644 --- a/helpers/pagetop-macros/src/maud/escape.rs +++ b/helpers/pagetop-macros/src/maud/escape.rs @@ -2,10 +2,6 @@ // !!!!!!!! PLEASE KEEP THIS IN SYNC WITH `maud/src/escape.rs` !!!!!!!!! // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -extern crate alloc; - -use alloc::string::String; - pub fn escape_to_string(input: &str, output: &mut String) { for b in input.bytes() { match b { @@ -20,10 +16,7 @@ pub fn escape_to_string(input: &str, output: &mut String) { #[cfg(test)] mod test { - extern crate alloc; - use super::escape_to_string; - use alloc::string::String; #[test] fn it_works() { diff --git a/helpers/pagetop-macros/src/maud/generate.rs b/helpers/pagetop-macros/src/maud/generate.rs index be7946d..1f82578 100644 --- a/helpers/pagetop-macros/src/maud/generate.rs +++ b/helpers/pagetop-macros/src/maud/generate.rs @@ -1,23 +1,23 @@ -use proc_macro2::{Delimiter, Group, Ident, Literal, Span, TokenStream, TokenTree}; -use proc_macro_error::SpanRange; -use quote::quote; +use proc_macro2::{Ident, Span, TokenStream}; +use quote::{quote, ToTokens}; +use syn::{parse_quote, token::Brace, Expr, Local}; use crate::maud::{ast::*, escape}; use proc_macro_crate::{crate_name, FoundCrate}; -pub fn generate(markups: Vec, output_ident: TokenTree) -> TokenStream { +pub fn generate(markups: Markups, output_ident: Ident) -> TokenStream { let mut build = Builder::new(output_ident.clone()); Generator::new(output_ident).markups(markups, &mut build); build.finish() } struct Generator { - output_ident: TokenTree, + output_ident: Ident, } impl Generator { - fn new(output_ident: TokenTree) -> Generator { + fn new(output_ident: Ident) -> Generator { Generator { output_ident } } @@ -25,257 +25,341 @@ impl Generator { Builder::new(self.output_ident.clone()) } - fn markups(&self, markups: Vec, build: &mut Builder) { - for markup in markups { + fn markups>(&self, markups: Markups, build: &mut Builder) { + for markup in markups.markups { self.markup(markup, build); } } - fn markup(&self, markup: Markup, build: &mut Builder) { + fn markup>(&self, markup: Markup, build: &mut Builder) { match markup { - Markup::ParseError { .. } => {} - Markup::Block(Block { - markups, - outer_span, - }) => { - if markups - .iter() - .any(|markup| matches!(*markup, Markup::Let { .. })) - { - self.block( - Block { - markups, - outer_span, - }, - build, - ); + Markup::Block(block) => { + if block.markups.markups.iter().any(|markup| { + matches!( + *markup, + Markup::ControlFlow(ControlFlow { + kind: ControlFlowKind::Let(_), + .. + }) + ) + }) { + self.block(block, build); } else { - self.markups(markups, build); + self.markups(block.markups, build); } } - Markup::Literal { content, .. } => build.push_escaped(&content), - Markup::Symbol { symbol } => self.name(symbol, build), + Markup::Lit(lit) => build.push_escaped(&lit.to_string()), Markup::Splice { expr, .. } => self.splice(expr, build), - Markup::Element { name, attrs, body } => self.element(name, attrs, body, build), - Markup::Let { tokens, .. } => build.push_tokens(tokens), - Markup::Special { segments } => { - for Special { head, body, .. } in segments { - build.push_tokens(head); - self.block(body, build); - } - } - Markup::Match { - head, - arms, - arms_span, - .. - } => { - let body = { - let mut build = self.builder(); - for MatchArm { head, body } in arms { - build.push_tokens(head); - self.block(body, &mut build); - } - build.finish() - }; - let mut body = TokenTree::Group(Group::new(Delimiter::Brace, body)); - body.set_span(arms_span.collapse()); - build.push_tokens(quote!(#head #body)); - } + Markup::Element(element) => self.element(element.into(), build), + Markup::ControlFlow(control_flow) => self.control_flow(control_flow, build), + Markup::Semi(_) => {} } } - fn block( - &self, - Block { - markups, - outer_span, - }: Block, - build: &mut Builder, - ) { - let block = { + fn block>(&self, block: Block, build: &mut Builder) { + let markups = { let mut build = self.builder(); - self.markups(markups, &mut build); + self.markups(block.markups, &mut build); build.finish() }; - let mut block = TokenTree::Group(Group::new(Delimiter::Brace, block)); - block.set_span(outer_span.collapse()); - build.push_tokens(TokenStream::from(block)); + + build.push_tokens(quote!({ #markups })); } - fn splice(&self, expr: TokenStream, build: &mut Builder) { - let output_ident = self.output_ident.clone(); + fn splice(&self, expr: Expr, build: &mut Builder) { + let output_ident = &self.output_ident; - let found_crate = crate_name("pagetop").expect("pagetop is present in `Cargo.toml`"); - build.push_tokens(match found_crate { - FoundCrate::Itself => quote!( - crate::html::html_private::render_to!(&#expr, &mut #output_ident); - ), - _ => quote!( - pagetop::html::html_private::render_to!(&#expr, &mut #output_ident); - ), + let found_crate = crate_name("pagetop").expect("pagetop debe existir en Cargo.toml"); + let crate_ident = match found_crate { + FoundCrate::Itself => Ident::new("pagetop", Span::call_site()), + FoundCrate::Name(name) => Ident::new(&name, Span::call_site()), + }; + build.push_tokens(quote! { + #crate_ident::html::html_private::render_to!(&(#expr), &mut #output_ident); }); } - fn element(&self, name: TokenStream, attrs: Vec, body: ElementBody, build: &mut Builder) { + fn element(&self, element: Element, build: &mut Builder) { + let element_name = element.name.clone().unwrap_or_else(|| parse_quote!(div)); build.push_str("<"); - self.name(name.clone(), build); - self.attrs(attrs, build); + self.name(element_name.clone(), build); + self.attrs(element.attrs, build); build.push_str(">"); - if let ElementBody::Block { block } = body { + if let ElementBody::Block(block) = element.body { self.markups(block.markups, build); build.push_str(""); } } - fn name(&self, name: TokenStream, build: &mut Builder) { - build.push_escaped(&name_to_string(name)); + fn name(&self, name: HtmlName, build: &mut Builder) { + build.push_escaped(&name.to_string()); } - fn attrs(&self, attrs: Vec, build: &mut Builder) { - for NamedAttr { name, attr_type } in desugar_attrs(attrs) { - match attr_type { - AttrType::Normal { value } => { + fn name_or_markup(&self, name: HtmlNameOrMarkup, build: &mut Builder) { + match name { + HtmlNameOrMarkup::HtmlName(name) => self.name(name, build), + HtmlNameOrMarkup::Markup(markup) => self.markup(markup, build), + } + } + + fn attr(&self, name: HtmlName, value: AttributeType, build: &mut Builder) { + match value { + AttributeType::Normal { value, .. } => { + build.push_str(" "); + self.name(name, build); + build.push_str("=\""); + self.markup(value, build); + build.push_str("\""); + } + AttributeType::Optional { + toggler: Toggler { cond, .. }, + .. + } => { + let inner_value: Expr = parse_quote!(inner_value); + + let body = { + let mut build = self.builder(); build.push_str(" "); - self.name(name, build); + self.name(name, &mut build); build.push_str("=\""); - self.markup(value, build); + self.splice(inner_value.clone(), &mut build); build.push_str("\""); - } - AttrType::Optional { - toggler: Toggler { cond, .. }, - } => { - let inner_value = quote!(inner_value); - let body = { - let mut build = self.builder(); - build.push_str(" "); - self.name(name, &mut build); - build.push_str("=\""); - self.splice(inner_value.clone(), &mut build); - build.push_str("\""); - build.finish() - }; - build.push_tokens(quote!(if let Some(#inner_value) = (#cond) { #body })); - } - AttrType::Empty { toggler: None } => { + build.finish() + }; + build.push_tokens(quote!(if let Some(#inner_value) = (#cond) { #body })); + } + AttributeType::Empty(None) => { + build.push_str(" "); + self.name(name, build); + } + AttributeType::Empty(Some(Toggler { cond, .. })) => { + let body = { + let mut build = self.builder(); build.push_str(" "); - self.name(name, build); - } - AttrType::Empty { - toggler: Some(Toggler { cond, .. }), - } => { - let body = { - let mut build = self.builder(); - build.push_str(" "); - self.name(name, &mut build); - build.finish() - }; - build.push_tokens(quote!(if (#cond) { #body })); - } + self.name(name, &mut build); + build.finish() + }; + build.push_tokens(quote!(if (#cond) { #body })); } } } + + fn attrs(&self, attrs: Vec, build: &mut Builder) { + let (classes, id, named_attrs) = split_attrs(attrs); + + if !classes.is_empty() { + let mut toggle_class_exprs = vec![]; + + build.push_str(" "); + self.name(parse_quote!(class), build); + build.push_str("=\""); + for (i, (name, toggler)) in classes.into_iter().enumerate() { + if let Some(toggler) = toggler { + toggle_class_exprs.push((i > 0, name, toggler)); + } else { + if i > 0 { + build.push_str(" "); + } + self.name_or_markup(name, build); + } + } + + for (not_first, name, toggler) in toggle_class_exprs { + let body = { + let mut build = self.builder(); + if not_first { + build.push_str(" "); + } + self.name_or_markup(name, &mut build); + build.finish() + }; + build.push_tokens(quote!(if (#toggler) { #body })); + } + + build.push_str("\""); + } + + if let Some(id) = id { + build.push_str(" "); + self.name(parse_quote!(id), build); + build.push_str("=\""); + self.name_or_markup(id, build); + build.push_str("\""); + } + + for (name, attr_type) in named_attrs { + self.attr(name, attr_type, build); + } + } + + fn control_flow>(&self, control_flow: ControlFlow, build: &mut Builder) { + match control_flow.kind { + ControlFlowKind::If(if_) => self.control_flow_if(if_, build), + ControlFlowKind::Let(let_) => self.control_flow_let(let_, build), + ControlFlowKind::For(for_) => self.control_flow_for(for_, build), + ControlFlowKind::While(while_) => self.control_flow_while(while_, build), + ControlFlowKind::Match(match_) => self.control_flow_match(match_, build), + } + } + + fn control_flow_if>( + &self, + IfExpr { + if_token, + cond, + then_branch, + else_branch, + }: IfExpr, + build: &mut Builder, + ) { + build.push_tokens(quote!(#if_token #cond)); + self.block(then_branch, build); + + if let Some((_, else_token, if_or_block)) = else_branch { + build.push_tokens(quote!(#else_token)); + self.control_flow_if_or_block(*if_or_block, build); + } + } + + fn control_flow_if_or_block>( + &self, + if_or_block: IfOrBlock, + build: &mut Builder, + ) { + match if_or_block { + IfOrBlock::If(if_) => self.control_flow_if(if_, build), + IfOrBlock::Block(block) => self.block(block, build), + } + } + + fn control_flow_let(&self, let_: Local, build: &mut Builder) { + build.push_tokens(let_.to_token_stream()); + } + + fn control_flow_for>( + &self, + ForExpr { + for_token, + pat, + in_token, + expr, + body, + }: ForExpr, + build: &mut Builder, + ) { + build.push_tokens(quote!(#for_token #pat #in_token (#expr))); + self.block(body, build); + } + + fn control_flow_while>( + &self, + WhileExpr { + while_token, + cond, + body, + }: WhileExpr, + build: &mut Builder, + ) { + build.push_tokens(quote!(#while_token #cond)); + self.block(body, build); + } + + fn control_flow_match>( + &self, + MatchExpr { + match_token, + expr, + brace_token, + arms, + }: MatchExpr, + build: &mut Builder, + ) { + let arms = { + let mut build = self.builder(); + for MatchArm { + pat, + guard, + fat_arrow_token, + body, + comma_token, + } in arms + { + build.push_tokens(quote!(#pat)); + if let Some((if_token, cond)) = guard { + build.push_tokens(quote!(#if_token #cond)); + } + build.push_tokens(quote!(#fat_arrow_token)); + self.block( + Block { + brace_token: Brace(Span::call_site()), + markups: Markups { + markups: vec![body], + }, + }, + &mut build, + ); + build.push_tokens(quote!(#comma_token)); + } + build.finish() + }; + + let mut arm_block = TokenStream::new(); + + brace_token.surround(&mut arm_block, |tokens| { + arms.to_tokens(tokens); + }); + + build.push_tokens(quote!(#match_token #expr #arm_block)); + } } //////////////////////////////////////////////////////// -fn desugar_attrs(attrs: Vec) -> Vec { - let mut classes_static = vec![]; - let mut classes_toggled = vec![]; - let mut ids = vec![]; +#[allow(clippy::type_complexity)] +fn split_attrs( + attrs: Vec, +) -> ( + Vec<(HtmlNameOrMarkup, Option)>, + Option, + Vec<(HtmlName, AttributeType)>, +) { + let mut classes = vec![]; + let mut id = None; let mut named_attrs = vec![]; + for attr in attrs { match attr { - Attr::Class { - name, - toggler: Some(toggler), - .. - } => classes_toggled.push((name, toggler)), - Attr::Class { - name, - toggler: None, - .. - } => classes_static.push(name), - Attr::Id { name, .. } => ids.push(name), - Attr::Named { named_attr } => named_attrs.push(named_attr), + Attribute::Class { name, toggler, .. } => { + classes.push((name, toggler.map(|toggler| toggler.cond))) + } + Attribute::Id { name, .. } => id = Some(name), + Attribute::Named { name, attr_type } => named_attrs.push((name, attr_type)), } } - let classes = desugar_classes_or_ids("class", classes_static, classes_toggled); - let ids = desugar_classes_or_ids("id", ids, vec![]); - classes.into_iter().chain(ids).chain(named_attrs).collect() -} -fn desugar_classes_or_ids( - attr_name: &'static str, - values_static: Vec, - values_toggled: Vec<(Markup, Toggler)>, -) -> Option { - if values_static.is_empty() && values_toggled.is_empty() { - return None; - } - let mut markups = Vec::new(); - let mut leading_space = false; - for name in values_static { - markups.extend(prepend_leading_space(name, &mut leading_space)); - } - for (name, Toggler { cond, cond_span }) in values_toggled { - let body = Block { - markups: prepend_leading_space(name, &mut leading_space), - // TODO: is this correct? - outer_span: cond_span, - }; - markups.push(Markup::Special { - segments: vec![Special { - at_span: SpanRange::call_site(), - head: quote!(if (#cond)), - body, - }], - }); - } - Some(NamedAttr { - name: TokenStream::from(TokenTree::Ident(Ident::new(attr_name, Span::call_site()))), - attr_type: AttrType::Normal { - value: Markup::Block(Block { - markups, - outer_span: SpanRange::call_site(), - }), - }, - }) -} - -fn prepend_leading_space(name: Markup, leading_space: &mut bool) -> Vec { - let mut markups = Vec::new(); - if *leading_space { - markups.push(Markup::Literal { - content: " ".to_owned(), - span: name.span(), - }); - } - *leading_space = true; - markups.push(name); - markups + (classes, id, named_attrs) } //////////////////////////////////////////////////////// struct Builder { - output_ident: TokenTree, - tokens: Vec, + output_ident: Ident, + tokens: TokenStream, tail: String, } impl Builder { - fn new(output_ident: TokenTree) -> Builder { + fn new(output_ident: Ident) -> Builder { Builder { output_ident, - tokens: Vec::new(), + tokens: TokenStream::new(), tail: String::new(), } } - fn push_str(&mut self, string: &str) { + fn push_str(&mut self, string: &'static str) { self.tail.push_str(string); } @@ -294,8 +378,8 @@ impl Builder { } let push_str_expr = { let output_ident = self.output_ident.clone(); - let string = TokenTree::Literal(Literal::string(&self.tail)); - quote!(#output_ident.push_str(#string);) + let tail = &self.tail; + quote!(#output_ident.push_str(#tail);) }; self.tail.clear(); self.tokens.extend(push_str_expr); @@ -303,6 +387,6 @@ impl Builder { fn finish(mut self) -> TokenStream { self.cut(); - self.tokens.into_iter().collect() + self.tokens } } diff --git a/helpers/pagetop-macros/src/maud/parse.rs b/helpers/pagetop-macros/src/maud/parse.rs deleted file mode 100644 index d24cea6..0000000 --- a/helpers/pagetop-macros/src/maud/parse.rs +++ /dev/null @@ -1,752 +0,0 @@ -use proc_macro2::{Delimiter, Ident, Literal, Spacing, Span, TokenStream, TokenTree}; -use proc_macro_error::{abort, abort_call_site, emit_error, SpanRange}; -use std::collections::HashMap; - -use syn::Lit; - -use crate::maud::ast; - -pub fn parse(input: TokenStream) -> Vec { - Parser::new(input).markups() -} - -#[derive(Clone)] -struct Parser { - /// If we're inside an attribute, then this contains the attribute name. - current_attr: Option, - input: ::IntoIter, -} - -impl Iterator for Parser { - type Item = TokenTree; - - fn next(&mut self) -> Option { - self.input.next() - } -} - -impl Parser { - fn new(input: TokenStream) -> Parser { - Parser { - current_attr: None, - input: input.into_iter(), - } - } - - fn with_input(&self, input: TokenStream) -> Parser { - Parser { - current_attr: self.current_attr.clone(), - input: input.into_iter(), - } - } - - /// Returns the next token in the stream without consuming it. - fn peek(&mut self) -> Option { - self.clone().next() - } - - /// Returns the next two tokens in the stream without consuming them. - fn peek2(&mut self) -> Option<(TokenTree, Option)> { - let mut clone = self.clone(); - clone.next().map(|first| (first, clone.next())) - } - - /// Advances the cursor by one step. - fn advance(&mut self) { - self.next(); - } - - /// Advances the cursor by two steps. - fn advance2(&mut self) { - self.next(); - self.next(); - } - - /// Parses multiple blocks of markup. - fn markups(&mut self) -> Vec { - let mut result = Vec::new(); - loop { - match self.peek2() { - None => break, - Some((TokenTree::Punct(ref punct), _)) if punct.as_char() == ';' => self.advance(), - Some((TokenTree::Punct(ref punct), Some(TokenTree::Ident(ref ident)))) - if punct.as_char() == '@' && *ident == "let" => - { - self.advance2(); - let keyword = TokenTree::Ident(ident.clone()); - result.push(self.let_expr(punct.span(), keyword)); - } - _ => result.push(self.markup()), - } - } - result - } - - /// Parses a single block of markup. - fn markup(&mut self) -> ast::Markup { - let token = match self.peek() { - Some(token) => token, - None => { - abort_call_site!("unexpected end of input"); - } - }; - let markup = match token { - // Literal - TokenTree::Literal(literal) => { - self.advance(); - self.literal(literal) - } - // Special form - TokenTree::Punct(ref punct) if punct.as_char() == '@' => { - self.advance(); - let at_span = punct.span(); - match self.next() { - Some(TokenTree::Ident(ident)) => { - let keyword = TokenTree::Ident(ident.clone()); - match ident.to_string().as_str() { - "if" => { - let mut segments = Vec::new(); - self.if_expr(at_span, vec![keyword], &mut segments); - ast::Markup::Special { segments } - } - "while" => self.while_expr(at_span, keyword), - "for" => self.for_expr(at_span, keyword), - "match" => self.match_expr(at_span, keyword), - "let" => { - let span = SpanRange { - first: at_span, - last: ident.span(), - }; - abort!(span, "`@let` only works inside a block"); - } - other => { - let span = SpanRange { - first: at_span, - last: ident.span(), - }; - abort!(span, "unknown keyword `@{}`", other); - } - } - } - _ => { - abort!(at_span, "expected keyword after `@`"); - } - } - } - // Element - TokenTree::Ident(ident) => { - let ident_string = ident.to_string(); - match ident_string.as_str() { - "if" | "while" | "for" | "match" | "let" => { - abort!( - ident, - "found keyword `{}`", ident_string; - help = "should this be a `@{}`?", ident_string - ); - } - "true" | "false" => { - if let Some(attr_name) = &self.current_attr { - emit_error!( - ident, - r#"attribute value must be a string"#; - help = "to declare an empty attribute, omit the equals sign: `{}`", - attr_name; - help = "to toggle the attribute, use square brackets: `{}[some_boolean_flag]`", - attr_name; - ); - return ast::Markup::ParseError { - span: SpanRange::single_span(ident.span()), - }; - } - } - _ => {} - } - - // `.try_namespaced_name()` should never fail as we've - // already seen an `Ident` - let name = self.try_namespaced_name().expect("identifier"); - self.element(name) - } - // Div element shorthand - TokenTree::Punct(ref punct) if punct.as_char() == '.' || punct.as_char() == '#' => { - let name = TokenTree::Ident(Ident::new("div", punct.span())); - self.element(name.into()) - } - // Splice - TokenTree::Group(ref group) if group.delimiter() == Delimiter::Parenthesis => { - self.advance(); - ast::Markup::Splice { - expr: group.stream(), - outer_span: SpanRange::single_span(group.span()), - } - } - // Block - TokenTree::Group(ref group) if group.delimiter() == Delimiter::Brace => { - self.advance(); - ast::Markup::Block(self.block(group.stream(), SpanRange::single_span(group.span()))) - } - // ??? - token => { - abort!(token, "invalid syntax"); - } - }; - markup - } - - /// Parses a literal string. - fn literal(&mut self, literal: Literal) -> ast::Markup { - match Lit::new(literal.clone()) { - Lit::Str(lit_str) => { - return ast::Markup::Literal { - content: lit_str.value(), - span: SpanRange::single_span(literal.span()), - } - } - // Boolean literals are idents, so `Lit::Bool` is handled in - // `markup`, not here. - Lit::Int(..) | Lit::Float(..) => { - emit_error!(literal, r#"literal must be double-quoted: `"{}"`"#, literal); - } - Lit::Char(lit_char) => { - emit_error!( - literal, - r#"literal must be double-quoted: `"{}"`"#, - lit_char.value(), - ); - } - _ => { - emit_error!(literal, "expected string"); - } - } - ast::Markup::ParseError { - span: SpanRange::single_span(literal.span()), - } - } - - /// Parses an `@if` expression. - /// - /// The leading `@if` should already be consumed. - fn if_expr(&mut self, at_span: Span, prefix: Vec, segments: &mut Vec) { - let mut head = prefix; - let body = loop { - match self.next() { - Some(TokenTree::Group(ref block)) if block.delimiter() == Delimiter::Brace => { - break self.block(block.stream(), SpanRange::single_span(block.span())); - } - Some(token) => head.push(token), - None => { - let mut span = ast::span_tokens(head); - span.first = at_span; - abort!(span, "expected body for this `@if`"); - } - } - }; - segments.push(ast::Special { - at_span: SpanRange::single_span(at_span), - head: head.into_iter().collect(), - body, - }); - self.else_if_expr(segments) - } - - /// Parses an optional `@else if` or `@else`. - /// - /// The leading `@else if` or `@else` should *not* already be consumed. - fn else_if_expr(&mut self, segments: &mut Vec) { - match self.peek2() { - Some((TokenTree::Punct(ref punct), Some(TokenTree::Ident(ref else_keyword)))) - if punct.as_char() == '@' && *else_keyword == "else" => - { - self.advance2(); - let at_span = punct.span(); - let else_keyword = TokenTree::Ident(else_keyword.clone()); - match self.peek() { - // `@else if` - Some(TokenTree::Ident(ref if_keyword)) if *if_keyword == "if" => { - self.advance(); - let if_keyword = TokenTree::Ident(if_keyword.clone()); - self.if_expr(at_span, vec![else_keyword, if_keyword], segments) - } - // Just an `@else` - _ => match self.next() { - Some(TokenTree::Group(ref group)) - if group.delimiter() == Delimiter::Brace => - { - let body = - self.block(group.stream(), SpanRange::single_span(group.span())); - segments.push(ast::Special { - at_span: SpanRange::single_span(at_span), - head: vec![else_keyword].into_iter().collect(), - body, - }); - } - _ => { - let span = SpanRange { - first: at_span, - last: else_keyword.span(), - }; - abort!(span, "expected body for this `@else`"); - } - }, - } - } - // We didn't find an `@else`; stop - _ => {} - } - } - - /// Parses an `@while` expression. - /// - /// The leading `@while` should already be consumed. - fn while_expr(&mut self, at_span: Span, keyword: TokenTree) -> ast::Markup { - let keyword_span = keyword.span(); - let mut head = vec![keyword]; - let body = loop { - match self.next() { - Some(TokenTree::Group(ref block)) if block.delimiter() == Delimiter::Brace => { - break self.block(block.stream(), SpanRange::single_span(block.span())); - } - Some(token) => head.push(token), - None => { - let span = SpanRange { - first: at_span, - last: keyword_span, - }; - abort!(span, "expected body for this `@while`"); - } - } - }; - ast::Markup::Special { - segments: vec![ast::Special { - at_span: SpanRange::single_span(at_span), - head: head.into_iter().collect(), - body, - }], - } - } - - /// Parses a `@for` expression. - /// - /// The leading `@for` should already be consumed. - fn for_expr(&mut self, at_span: Span, keyword: TokenTree) -> ast::Markup { - let keyword_span = keyword.span(); - let mut head = vec![keyword]; - loop { - match self.next() { - Some(TokenTree::Ident(ref in_keyword)) if *in_keyword == "in" => { - head.push(TokenTree::Ident(in_keyword.clone())); - break; - } - Some(token) => head.push(token), - None => { - let span = SpanRange { - first: at_span, - last: keyword_span, - }; - abort!(span, "missing `in` in `@for` loop"); - } - } - } - let body = loop { - match self.next() { - Some(TokenTree::Group(ref block)) if block.delimiter() == Delimiter::Brace => { - break self.block(block.stream(), SpanRange::single_span(block.span())); - } - Some(token) => head.push(token), - None => { - let span = SpanRange { - first: at_span, - last: keyword_span, - }; - abort!(span, "expected body for this `@for`"); - } - } - }; - ast::Markup::Special { - segments: vec![ast::Special { - at_span: SpanRange::single_span(at_span), - head: head.into_iter().collect(), - body, - }], - } - } - - /// Parses a `@match` expression. - /// - /// The leading `@match` should already be consumed. - fn match_expr(&mut self, at_span: Span, keyword: TokenTree) -> ast::Markup { - let keyword_span = keyword.span(); - let mut head = vec![keyword]; - let (arms, arms_span) = loop { - match self.next() { - Some(TokenTree::Group(ref body)) if body.delimiter() == Delimiter::Brace => { - let span = SpanRange::single_span(body.span()); - break (self.with_input(body.stream()).match_arms(), span); - } - Some(token) => head.push(token), - None => { - let span = SpanRange { - first: at_span, - last: keyword_span, - }; - abort!(span, "expected body for this `@match`"); - } - } - }; - ast::Markup::Match { - at_span: SpanRange::single_span(at_span), - head: head.into_iter().collect(), - arms, - arms_span, - } - } - - fn match_arms(&mut self) -> Vec { - let mut arms = Vec::new(); - while let Some(arm) = self.match_arm() { - arms.push(arm); - } - arms - } - - fn match_arm(&mut self) -> Option { - let mut head = Vec::new(); - loop { - match self.peek2() { - Some((TokenTree::Punct(ref eq), Some(TokenTree::Punct(ref gt)))) - if eq.as_char() == '=' - && gt.as_char() == '>' - && eq.spacing() == Spacing::Joint => - { - self.advance2(); - head.push(TokenTree::Punct(eq.clone())); - head.push(TokenTree::Punct(gt.clone())); - break; - } - Some((token, _)) => { - self.advance(); - head.push(token); - } - None => { - if head.is_empty() { - return None; - } else { - let head_span = ast::span_tokens(head); - abort!(head_span, "unexpected end of @match pattern"); - } - } - } - } - let body = match self.next() { - // $pat => { $stmts } - Some(TokenTree::Group(ref body)) if body.delimiter() == Delimiter::Brace => { - let body = self.block(body.stream(), SpanRange::single_span(body.span())); - // Trailing commas are optional if the match arm is a braced block - if let Some(TokenTree::Punct(ref punct)) = self.peek() { - if punct.as_char() == ',' { - self.advance(); - } - } - body - } - // $pat => $expr - Some(first_token) => { - let mut span = SpanRange::single_span(first_token.span()); - let mut body = vec![first_token]; - loop { - match self.next() { - Some(TokenTree::Punct(ref punct)) if punct.as_char() == ',' => break, - Some(token) => { - span.last = token.span(); - body.push(token); - } - None => break, - } - } - self.block(body.into_iter().collect(), span) - } - None => { - let span = ast::span_tokens(head); - abort!(span, "unexpected end of @match arm"); - } - }; - Some(ast::MatchArm { - head: head.into_iter().collect(), - body, - }) - } - - /// Parses a `@let` expression. - /// - /// The leading `@let` should already be consumed. - fn let_expr(&mut self, at_span: Span, keyword: TokenTree) -> ast::Markup { - let mut tokens = vec![keyword]; - loop { - match self.next() { - Some(token) => match token { - TokenTree::Punct(ref punct) if punct.as_char() == '=' => { - tokens.push(token.clone()); - break; - } - _ => tokens.push(token), - }, - None => { - let mut span = ast::span_tokens(tokens); - span.first = at_span; - abort!(span, "unexpected end of `@let` expression"); - } - } - } - loop { - match self.next() { - Some(token) => match token { - TokenTree::Punct(ref punct) if punct.as_char() == ';' => { - tokens.push(token.clone()); - break; - } - _ => tokens.push(token), - }, - None => { - let mut span = ast::span_tokens(tokens); - span.first = at_span; - abort!( - span, - "unexpected end of `@let` expression"; - help = "are you missing a semicolon?" - ); - } - } - } - ast::Markup::Let { - at_span: SpanRange::single_span(at_span), - tokens: tokens.into_iter().collect(), - } - } - - /// Parses an element node. - /// - /// The element name should already be consumed. - fn element(&mut self, name: TokenStream) -> ast::Markup { - if self.current_attr.is_some() { - let span = ast::span_tokens(name); - abort!(span, "unexpected element"); - } - let attrs = self.attrs(); - let body = match self.peek() { - Some(TokenTree::Punct(ref punct)) - if punct.as_char() == ';' || punct.as_char() == '/' => - { - // Void element - self.advance(); - if punct.as_char() == '/' { - emit_error!( - punct, - "void elements must use `;`, not `/`"; - help = "change this to `;`"; - help = "see https://github.com/lambda-fairy/maud/pull/315 for details"; - ); - } - ast::ElementBody::Void { - semi_span: SpanRange::single_span(punct.span()), - } - } - Some(_) => match self.markup() { - ast::Markup::Block(block) => ast::ElementBody::Block { block }, - markup => { - let markup_span = markup.span(); - abort!( - markup_span, - "element body must be wrapped in braces"; - help = "see https://github.com/lambda-fairy/maud/pull/137 for details" - ); - } - }, - None => abort_call_site!("expected `;`, found end of macro"), - }; - ast::Markup::Element { name, attrs, body } - } - - /// Parses the attributes of an element. - fn attrs(&mut self) -> Vec { - let mut attrs = Vec::new(); - loop { - if let Some(name) = self.try_namespaced_name() { - // Attribute - match self.peek() { - // Non-empty attribute - Some(TokenTree::Punct(ref punct)) if punct.as_char() == '=' => { - self.advance(); - // Parse a value under an attribute context - assert!(self.current_attr.is_none()); - self.current_attr = Some(ast::name_to_string(name.clone())); - let attr_type = match self.attr_toggler() { - Some(toggler) => ast::AttrType::Optional { toggler }, - None => { - let value = self.markup(); - ast::AttrType::Normal { value } - } - }; - self.current_attr = None; - attrs.push(ast::Attr::Named { - named_attr: ast::NamedAttr { name, attr_type }, - }); - } - // Empty attribute (legacy syntax) - Some(TokenTree::Punct(ref punct)) if punct.as_char() == '?' => { - self.advance(); - let toggler = self.attr_toggler(); - attrs.push(ast::Attr::Named { - named_attr: ast::NamedAttr { - name: name.clone(), - attr_type: ast::AttrType::Empty { toggler }, - }, - }); - } - // Empty attribute (new syntax) - _ => { - let toggler = self.attr_toggler(); - attrs.push(ast::Attr::Named { - named_attr: ast::NamedAttr { - name: name.clone(), - attr_type: ast::AttrType::Empty { toggler }, - }, - }); - } - } - } else { - match self.peek() { - // Class shorthand - Some(TokenTree::Punct(ref punct)) if punct.as_char() == '.' => { - self.advance(); - let name = self.class_or_id_name(); - let toggler = self.attr_toggler(); - attrs.push(ast::Attr::Class { - dot_span: SpanRange::single_span(punct.span()), - name, - toggler, - }); - } - // ID shorthand - Some(TokenTree::Punct(ref punct)) if punct.as_char() == '#' => { - self.advance(); - let name = self.class_or_id_name(); - attrs.push(ast::Attr::Id { - hash_span: SpanRange::single_span(punct.span()), - name, - }); - } - // If it's not a valid attribute, backtrack and bail out - _ => break, - } - } - } - - let mut attr_map: HashMap> = HashMap::new(); - let mut has_class = false; - for attr in &attrs { - let name = match attr { - ast::Attr::Class { .. } => { - if has_class { - // Only check the first class to avoid spurious duplicates - continue; - } - has_class = true; - "class".to_string() - } - ast::Attr::Id { .. } => "id".to_string(), - ast::Attr::Named { named_attr } => named_attr - .name - .clone() - .into_iter() - .map(|token| token.to_string()) - .collect(), - }; - let entry = attr_map.entry(name).or_default(); - entry.push(attr.span()); - } - - for (name, spans) in attr_map { - if spans.len() > 1 { - let mut spans = spans.into_iter(); - let first_span = spans.next().expect("spans should be non-empty"); - abort!(first_span, "duplicate attribute `{}`", name); - } - } - - attrs - } - - /// Parses the name of a class or ID. - fn class_or_id_name(&mut self) -> ast::Markup { - if let Some(symbol) = self.try_name() { - ast::Markup::Symbol { symbol } - } else { - self.markup() - } - } - - /// Parses the `[cond]` syntax after an empty attribute or class shorthand. - fn attr_toggler(&mut self) -> Option { - match self.peek() { - Some(TokenTree::Group(ref group)) if group.delimiter() == Delimiter::Bracket => { - self.advance(); - Some(ast::Toggler { - cond: group.stream(), - cond_span: SpanRange::single_span(group.span()), - }) - } - _ => None, - } - } - - /// Parses an identifier, without dealing with namespaces. - fn try_name(&mut self) -> Option { - let mut result = Vec::new(); - if let Some(token @ TokenTree::Ident(_)) = self.peek() { - self.advance(); - result.push(token); - } else { - return None; - } - let mut expect_ident = false; - loop { - expect_ident = match self.peek() { - Some(TokenTree::Punct(ref punct)) if punct.as_char() == '-' => { - self.advance(); - result.push(TokenTree::Punct(punct.clone())); - true - } - Some(TokenTree::Ident(ref ident)) if expect_ident => { - self.advance(); - result.push(TokenTree::Ident(ident.clone())); - false - } - _ => break, - }; - } - Some(result.into_iter().collect()) - } - - /// Parses a HTML element or attribute name, along with a namespace - /// if necessary. - fn try_namespaced_name(&mut self) -> Option { - let mut result = vec![self.try_name()?]; - if let Some(TokenTree::Punct(ref punct)) = self.peek() { - if punct.as_char() == ':' { - self.advance(); - result.push(TokenStream::from(TokenTree::Punct(punct.clone()))); - result.push(self.try_name()?); - } - } - Some(result.into_iter().collect()) - } - - /// Parses the given token stream as a Maud expression. - fn block(&mut self, body: TokenStream, outer_span: SpanRange) -> ast::Block { - let markups = self.with_input(body).markups(); - ast::Block { - markups, - outer_span, - } - } -} diff --git a/src/html.rs b/src/html.rs index 1f24f63..14e72b9 100644 --- a/src/html.rs +++ b/src/html.rs @@ -1,4 +1,4 @@ //! HTML en código. mod maud; -pub use maud::{html, html_private, Markup, PreEscaped, DOCTYPE}; +pub use maud::{display, html, html_private, Escaper, Markup, PreEscaped, Render, DOCTYPE}; diff --git a/src/html/maud.rs b/src/html/maud.rs index db9308a..1942986 100644 --- a/src/html/maud.rs +++ b/src/html/maud.rs @@ -1,4 +1,4 @@ -//#![no_std] +// #![no_std] //! A macro for writing HTML templates. //! @@ -7,11 +7,11 @@ //! //! [book]: https://maud.lambda.xyz/ -//#![doc(html_root_url = "https://docs.rs/maud/0.25.0")] +// #![doc(html_root_url = "https://docs.rs/maud/0.27.0")] extern crate alloc; -use alloc::{borrow::Cow, boxed::Box, string::String}; +use alloc::{borrow::Cow, boxed::Box, string::String, sync::Arc}; use core::fmt::{self, Arguments, Display, Write}; pub use pagetop_macros::html; @@ -34,8 +34,8 @@ mod escape; /// /// # Example /// -/// ```rust#ignore -/// use maud::Escaper; +/// ```rust +/// use pagetop::html::Escaper; /// use std::fmt::Write; /// let mut s = String::new(); /// write!(Escaper::new(&mut s), "").unwrap(); @@ -50,7 +50,7 @@ impl<'a> Escaper<'a> { } } -impl<'a> fmt::Write for Escaper<'a> { +impl fmt::Write for Escaper<'_> { fn write_str(&mut self, s: &str) -> fmt::Result { escape::escape_to_string(s, self.0); Ok(()) @@ -72,8 +72,8 @@ impl<'a> fmt::Write for Escaper<'a> { /// /// # Example /// -/// ```rust#ignore -/// use maud::{html, Markup, Render}; +/// ```rust +/// use pagetop::html::{html, Markup, Render}; /// /// /// Provides a shorthand for linking to a CSS stylesheet. /// pub struct Stylesheet(&'static str); @@ -120,25 +120,25 @@ impl Render for String { } } -impl<'a> Render for Cow<'a, str> { +impl Render for Cow<'_, str> { fn render_to(&self, w: &mut String) { str::render_to(self, w); } } -impl<'a> Render for Arguments<'a> { +impl Render for Arguments<'_> { fn render_to(&self, w: &mut String) { let _ = Escaper::new(w).write_fmt(*self); } } -impl<'a, T: Render + ?Sized> Render for &'a T { +impl Render for &T { fn render_to(&self, w: &mut String) { T::render_to(self, w); } } -impl<'a, T: Render + ?Sized> Render for &'a mut T { +impl Render for &mut T { fn render_to(&self, w: &mut String) { T::render_to(self, w); } @@ -150,6 +150,12 @@ impl Render for Box { } } +impl Render for Arc { + fn render_to(&self, w: &mut String) { + T::render_to(self, w); + } +} + macro_rules! impl_render_with_display { ($($ty:ty)*) => { $( @@ -188,15 +194,15 @@ impl_render_with_itoa! { /// /// # Example /// -/// ```rust#ignore -/// use maud::html; +/// ```rust +/// use pagetop::html::{display, html}; /// use std::net::Ipv4Addr; /// /// let ip_address = Ipv4Addr::new(127, 0, 0, 1); /// /// let markup = html! { /// "My IP address is: " -/// (maud::display(ip_address)) +/// (display(ip_address)) /// }; /// /// assert_eq!(markup.into_string(), "My IP address is: 127.0.0.1"); @@ -215,7 +221,7 @@ pub fn display(value: impl Display) -> impl Render { /// A wrapper that renders the inner value without escaping. #[derive(Debug, Clone, Copy)] -pub struct PreEscaped>(pub T); +pub struct PreEscaped(pub T); impl> Render for PreEscaped { fn render_to(&self, w: &mut String) { @@ -234,20 +240,20 @@ impl Markup { } } -impl + Into> PreEscaped { +impl> PreEscaped { /// Converts the inner value to a string. pub fn into_string(self) -> String { self.0.into() } } -impl + Into> From> for String { +impl> From> for String { fn from(value: PreEscaped) -> String { value.into_string() } } -impl + Default> Default for PreEscaped { +impl Default for PreEscaped { fn default() -> Self { Self(Default::default()) } @@ -259,8 +265,8 @@ impl + Default> Default for PreEscaped { /// /// A minimal web page: /// -/// ```rust#ignore -/// use maud::{DOCTYPE, html}; +/// ```rust +/// use pagetop::html::{DOCTYPE, html}; /// /// let markup = html! { /// (DOCTYPE) @@ -280,10 +286,35 @@ pub const DOCTYPE: PreEscaped<&'static str> = PreEscaped(""); mod actix_support { extern crate alloc; + use core::{ + pin::Pin, + task::{Context, Poll}, + }; + use crate::html::PreEscaped; - use actix_web::{http::header, HttpRequest, HttpResponse, Responder}; + use actix_web::{ + body::{BodySize, MessageBody}, + http::header, + web::Bytes, + HttpRequest, HttpResponse, Responder, + }; use alloc::string::String; + impl MessageBody for PreEscaped { + type Error = ::Error; + + fn size(&self) -> BodySize { + self.0.size() + } + + fn poll_next( + mut self: Pin<&mut Self>, + cx: &mut Context<'_>, + ) -> Poll>> { + Pin::new(&mut self.0).poll_next(cx) + } + } + impl Responder for PreEscaped { type Body = String; diff --git a/src/service.rs b/src/service.rs index 9fe2450..e6904b8 100644 --- a/src/service.rs +++ b/src/service.rs @@ -1,4 +1,4 @@ -//! Gestión del servidor y servicios web ([actix-web](https://docs.rs/actix-web)). +//! Gestión del servidor y servicios web ([Actix Web](https://docs.rs/actix-web)). pub use actix_web::body::BoxBody; pub use actix_web::dev::Server;