From 064077a9001e121a12fc234fee44514840380a0e Mon Sep 17 00:00:00 2001 From: René Kijewski Date: Tue, 24 May 2022 14:20:09 +0200 Subject: Move code generation into askama_derive --- askama/src/lib.rs | 9 +- askama_derive/Cargo.toml | 37 +- askama_derive/src/config.rs | 536 ++++++++ askama_derive/src/generator.rs | 2141 ++++++++++++++++++++++++++++++ askama_derive/src/heritage.rs | 126 ++ askama_derive/src/input.rs | 255 ++++ askama_derive/src/lib.rs | 92 +- askama_derive/src/parser.rs | 1885 +++++++++++++++++++++++++++ askama_derive/templates/a.html | 1 + askama_derive/templates/b.html | 1 + askama_derive/templates/sub/b.html | 1 + askama_derive/templates/sub/c.html | 1 + askama_derive/templates/sub/sub1/d.html | 1 + askama_shared/src/config.rs | 536 -------- askama_shared/src/filters/mod.rs | 36 - askama_shared/src/generator.rs | 2142 ------------------------------- askama_shared/src/heritage.rs | 126 -- askama_shared/src/input.rs | 256 ---- askama_shared/src/lib.rs | 51 - askama_shared/src/parser.rs | 1885 --------------------------- askama_shared/templates/a.html | 1 - askama_shared/templates/b.html | 1 - askama_shared/templates/sub/b.html | 1 - askama_shared/templates/sub/c.html | 1 - askama_shared/templates/sub/sub1/d.html | 1 - 25 files changed, 5062 insertions(+), 5061 deletions(-) create mode 100644 askama_derive/src/config.rs create mode 100644 askama_derive/src/generator.rs create mode 100644 askama_derive/src/heritage.rs create mode 100644 askama_derive/src/input.rs create mode 100644 askama_derive/src/parser.rs create mode 100644 askama_derive/templates/a.html create mode 100644 askama_derive/templates/b.html create mode 100644 askama_derive/templates/sub/b.html create mode 100644 askama_derive/templates/sub/c.html create mode 100644 askama_derive/templates/sub/sub1/d.html delete mode 100644 askama_shared/src/config.rs delete mode 100644 askama_shared/src/generator.rs delete mode 100644 askama_shared/src/heritage.rs delete mode 100644 askama_shared/src/input.rs delete mode 100644 askama_shared/src/parser.rs delete mode 100644 askama_shared/templates/a.html delete mode 100644 askama_shared/templates/b.html delete mode 100644 askama_shared/templates/sub/b.html delete mode 100644 askama_shared/templates/sub/c.html delete mode 100644 askama_shared/templates/sub/sub1/d.html diff --git a/askama/src/lib.rs b/askama/src/lib.rs index 02febf9..b0a83ff 100644 --- a/askama/src/lib.rs +++ b/askama/src/lib.rs @@ -63,19 +63,12 @@ #![deny(elided_lifetimes_in_paths)] #![deny(unreachable_pub)] -pub use askama_derive::*; +pub use askama_derive::Template; pub use askama_escape::{Html, Text}; pub use askama_shared::{ self as shared, filters, helpers, DynTemplate, Error, MarkupDisplay, Result, Template, }; -#[deprecated(since = "0.11.1", note = "The only function in this mod is deprecated")] -pub mod mime { - #[cfg(all(feature = "mime_guess", feature = "mime"))] - #[deprecated(since = "0.11.1", note = "Use Template::MIME_TYPE instead")] - pub use crate::shared::extension_to_mime_type; -} - /// Old build script helper to rebuild crates if contained templates have changed /// /// This function is now deprecated and does nothing. diff --git a/askama_derive/Cargo.toml b/askama_derive/Cargo.toml index 016960d..ce7abfa 100644 --- a/askama_derive/Cargo.toml +++ b/askama_derive/Cargo.toml @@ -13,20 +13,27 @@ edition = "2018" proc-macro = true [features] -config = ["askama_shared/config"] -humansize = ["askama_shared/humansize"] -markdown = ["askama_shared/markdown"] -urlencode = ["askama_shared/percent-encoding"] -serde-json = ["askama_shared/json"] -serde-yaml = ["askama_shared/yaml"] -num-traits = ["askama_shared/num-traits"] -with-actix-web = ["askama_shared/actix-web"] -with-axum = ["askama_shared/axum"] -with-gotham = ["askama_shared/gotham"] -with-mendes = ["askama_shared/mendes"] -with-rocket = ["askama_shared/rocket"] -with-tide = ["askama_shared/tide"] -with-warp = ["askama_shared/warp"] +config = ["serde", "toml"] +humansize = [] +markdown = [] +urlencode = [] +serde-json = [] +serde-yaml = [] +num-traits = [] +with-actix-web = [] +with-axum = [] +with-gotham = [] +with-mendes = [] +with-rocket = [] +with-tide = [] +with-warp = [] [dependencies] -askama_shared = { version = "0.13.0", path = "../askama_shared", default-features = false } +mime = "0.3" +mime_guess = "2" +nom = "7" +proc-macro2 = "1" +quote = "1" +serde = { version = "1.0", optional = true, features = ["derive"] } +syn = "1" +toml = { version = "0.5", optional = true } diff --git a/askama_derive/src/config.rs b/askama_derive/src/config.rs new file mode 100644 index 0000000..01f81a2 --- /dev/null +++ b/askama_derive/src/config.rs @@ -0,0 +1,536 @@ +use std::collections::{BTreeMap, HashSet}; +use std::convert::TryFrom; +use std::path::{Path, PathBuf}; +use std::{env, fs}; + +#[cfg(feature = "serde")] +use serde::Deserialize; + +use crate::CompileError; + +#[derive(Debug)] +pub(crate) struct Config<'a> { + pub(crate) dirs: Vec, + pub(crate) syntaxes: BTreeMap>, + pub(crate) default_syntax: &'a str, + pub(crate) escapers: Vec<(HashSet, String)>, + pub(crate) whitespace: WhitespaceHandling, +} + +impl Config<'_> { + pub(crate) fn new(s: &str) -> std::result::Result, CompileError> { + let root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); + let default_dirs = vec![root.join("templates")]; + + let mut syntaxes = BTreeMap::new(); + syntaxes.insert(DEFAULT_SYNTAX_NAME.to_string(), Syntax::default()); + + let raw = if s.is_empty() { + RawConfig::default() + } else { + RawConfig::from_toml_str(s)? + }; + + let (dirs, default_syntax, whitespace) = match raw.general { + Some(General { + dirs, + default_syntax, + whitespace, + }) => ( + dirs.map_or(default_dirs, |v| { + v.into_iter().map(|dir| root.join(dir)).collect() + }), + default_syntax.unwrap_or(DEFAULT_SYNTAX_NAME), + whitespace, + ), + None => ( + default_dirs, + DEFAULT_SYNTAX_NAME, + WhitespaceHandling::default(), + ), + }; + + if let Some(raw_syntaxes) = raw.syntax { + for raw_s in raw_syntaxes { + let name = raw_s.name; + + if syntaxes + .insert(name.to_string(), Syntax::try_from(raw_s)?) + .is_some() + { + return Err(format!("syntax \"{}\" is already defined", name).into()); + } + } + } + + if !syntaxes.contains_key(default_syntax) { + return Err(format!("default syntax \"{}\" not found", default_syntax).into()); + } + + let mut escapers = Vec::new(); + if let Some(configured) = raw.escaper { + for escaper in configured { + escapers.push(( + escaper + .extensions + .iter() + .map(|ext| (*ext).to_string()) + .collect(), + escaper.path.to_string(), + )); + } + } + for (extensions, path) in DEFAULT_ESCAPERS { + escapers.push((str_set(extensions), (*path).to_string())); + } + + Ok(Config { + dirs, + syntaxes, + default_syntax, + escapers, + whitespace, + }) + } + + pub(crate) fn find_template( + &self, + path: &str, + start_at: Option<&Path>, + ) -> std::result::Result { + if let Some(root) = start_at { + let relative = root.with_file_name(path); + if relative.exists() { + return Ok(relative); + } + } + + for dir in &self.dirs { + let rooted = dir.join(path); + if rooted.exists() { + return Ok(rooted); + } + } + + Err(format!( + "template {:?} not found in directories {:?}", + path, self.dirs + ) + .into()) + } +} + +#[derive(Debug)] +pub(crate) struct Syntax<'a> { + pub(crate) block_start: &'a str, + pub(crate) block_end: &'a str, + pub(crate) expr_start: &'a str, + pub(crate) expr_end: &'a str, + pub(crate) comment_start: &'a str, + pub(crate) comment_end: &'a str, +} + +impl Default for Syntax<'_> { + fn default() -> Self { + Self { + block_start: "{%", + block_end: "%}", + expr_start: "{{", + expr_end: "}}", + comment_start: "{#", + comment_end: "#}", + } + } +} + +impl<'a> TryFrom> for Syntax<'a> { + type Error = CompileError; + + fn try_from(raw: RawSyntax<'a>) -> std::result::Result { + let default = Self::default(); + let syntax = Self { + block_start: raw.block_start.unwrap_or(default.block_start), + block_end: raw.block_end.unwrap_or(default.block_end), + expr_start: raw.expr_start.unwrap_or(default.expr_start), + expr_end: raw.expr_end.unwrap_or(default.expr_end), + comment_start: raw.comment_start.unwrap_or(default.comment_start), + comment_end: raw.comment_end.unwrap_or(default.comment_end), + }; + + if syntax.block_start.len() != 2 + || syntax.block_end.len() != 2 + || syntax.expr_start.len() != 2 + || syntax.expr_end.len() != 2 + || syntax.comment_start.len() != 2 + || syntax.comment_end.len() != 2 + { + return Err("length of delimiters must be two".into()); + } + + let bs = syntax.block_start.as_bytes()[0]; + let be = syntax.block_start.as_bytes()[1]; + let cs = syntax.comment_start.as_bytes()[0]; + let ce = syntax.comment_start.as_bytes()[1]; + let es = syntax.expr_start.as_bytes()[0]; + let ee = syntax.expr_start.as_bytes()[1]; + if !((bs == cs && bs == es) || (be == ce && be == ee)) { + return Err(format!("bad delimiters block_start: {}, comment_start: {}, expr_start: {}, needs one of the two characters in common", syntax.block_start, syntax.comment_start, syntax.expr_start).into()); + } + + Ok(syntax) + } +} + +#[cfg_attr(feature = "serde", derive(Deserialize))] +#[derive(Default)] +struct RawConfig<'d> { + #[cfg_attr(feature = "serde", serde(borrow))] + general: Option>, + syntax: Option>>, + escaper: Option>>, +} + +impl RawConfig<'_> { + #[cfg(feature = "config")] + fn from_toml_str(s: &str) -> std::result::Result, CompileError> { + toml::from_str(s).map_err(|e| format!("invalid TOML in {}: {}", CONFIG_FILE_NAME, e).into()) + } + + #[cfg(not(feature = "config"))] + fn from_toml_str(_: &str) -> std::result::Result, CompileError> { + Err("TOML support not available".into()) + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(Deserialize))] +#[cfg_attr(feature = "serde", serde(field_identifier, rename_all = "lowercase"))] +pub(crate) enum WhitespaceHandling { + /// The default behaviour. It will leave the whitespace characters "as is". + Preserve, + /// It'll remove all the whitespace characters before and after the jinja block. + Suppress, + /// It'll remove all the whitespace characters except one before and after the jinja blocks. + /// If there is a newline character, the preserved character in the trimmed characters, it will + /// the one preserved. + Minimize, +} + +impl Default for WhitespaceHandling { + fn default() -> Self { + WhitespaceHandling::Preserve + } +} + +#[cfg_attr(feature = "serde", derive(Deserialize))] +struct General<'a> { + #[cfg_attr(feature = "serde", serde(borrow))] + dirs: Option>, + default_syntax: Option<&'a str>, + #[cfg_attr(feature = "serde", serde(default))] + whitespace: WhitespaceHandling, +} + +#[cfg_attr(feature = "serde", derive(Deserialize))] +struct RawSyntax<'a> { + name: &'a str, + block_start: Option<&'a str>, + block_end: Option<&'a str>, + expr_start: Option<&'a str>, + expr_end: Option<&'a str>, + comment_start: Option<&'a str>, + comment_end: Option<&'a str>, +} + +#[cfg_attr(feature = "serde", derive(Deserialize))] +struct RawEscaper<'a> { + path: &'a str, + extensions: Vec<&'a str>, +} + +pub(crate) fn read_config_file( + config_path: &Option, +) -> std::result::Result { + let root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); + let filename = match config_path { + Some(config_path) => root.join(config_path), + None => root.join(CONFIG_FILE_NAME), + }; + + if filename.exists() { + fs::read_to_string(&filename) + .map_err(|_| format!("unable to read {:?}", filename.to_str().unwrap()).into()) + } else if config_path.is_some() { + Err(format!("`{}` does not exist", root.display()).into()) + } else { + Ok("".to_string()) + } +} + +fn str_set(vals: &[T]) -> HashSet +where + T: ToString, +{ + vals.iter().map(|s| s.to_string()).collect() +} + +#[allow(clippy::match_wild_err_arm)] +pub(crate) fn get_template_source(tpl_path: &Path) -> std::result::Result { + match fs::read_to_string(tpl_path) { + Err(_) => Err(format!( + "unable to open template file '{}'", + tpl_path.to_str().unwrap() + ) + .into()), + Ok(mut source) => { + if source.ends_with('\n') { + let _ = source.pop(); + } + Ok(source) + } + } +} + +static CONFIG_FILE_NAME: &str = "askama.toml"; +static DEFAULT_SYNTAX_NAME: &str = "default"; +static DEFAULT_ESCAPERS: &[(&[&str], &str)] = &[ + (&["html", "htm", "xml"], "::askama::Html"), + (&["md", "none", "txt", "yml", ""], "::askama::Text"), + (&["j2", "jinja", "jinja2"], "::askama::Html"), +]; + +#[cfg(test)] +#[allow(clippy::blacklisted_name)] +mod tests { + use std::env; + use std::path::{Path, PathBuf}; + + use super::*; + + #[test] + fn get_source() { + let path = Config::new("") + .and_then(|config| config.find_template("b.html", None)) + .unwrap(); + assert_eq!(get_template_source(&path).unwrap(), "bar"); + } + + #[test] + fn test_default_config() { + let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); + root.push("templates"); + let config = Config::new("").unwrap(); + assert_eq!(config.dirs, vec![root]); + } + + #[cfg(feature = "config")] + #[test] + fn test_config_dirs() { + let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); + root.push("tpl"); + let config = Config::new("[general]\ndirs = [\"tpl\"]").unwrap(); + assert_eq!(config.dirs, vec![root]); + } + + fn assert_eq_rooted(actual: &Path, expected: &str) { + let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); + root.push("templates"); + let mut inner = PathBuf::new(); + inner.push(expected); + assert_eq!(actual.strip_prefix(root).unwrap(), inner); + } + + #[test] + fn find_absolute() { + let config = Config::new("").unwrap(); + let root = config.find_template("a.html", None).unwrap(); + let path = config.find_template("sub/b.html", Some(&root)).unwrap(); + assert_eq_rooted(&path, "sub/b.html"); + } + + #[test] + #[should_panic] + fn find_relative_nonexistent() { + let config = Config::new("").unwrap(); + let root = config.find_template("a.html", None).unwrap(); + config.find_template("c.html", Some(&root)).unwrap(); + } + + #[test] + fn find_relative() { + let config = Config::new("").unwrap(); + let root = config.find_template("sub/b.html", None).unwrap(); + let path = config.find_template("c.html", Some(&root)).unwrap(); + assert_eq_rooted(&path, "sub/c.html"); + } + + #[test] + fn find_relative_sub() { + let config = Config::new("").unwrap(); + let root = config.find_template("sub/b.html", None).unwrap(); + let path = config.find_template("sub1/d.html", Some(&root)).unwrap(); + assert_eq_rooted(&path, "sub/sub1/d.html"); + } + + #[cfg(feature = "config")] + #[test] + fn add_syntax() { + let raw_config = r#" + [general] + default_syntax = "foo" + + [[syntax]] + name = "foo" + block_start = "{<" + + [[syntax]] + name = "bar" + expr_start = "{!" + "#; + + let default_syntax = Syntax::default(); + let config = Config::new(raw_config).unwrap(); + assert_eq!(config.default_syntax, "foo"); + + let foo = config.syntaxes.get("foo").unwrap(); + assert_eq!(foo.block_start, "{<"); + assert_eq!(foo.block_end, default_syntax.block_end); + assert_eq!(foo.expr_start, default_syntax.expr_start); + assert_eq!(foo.expr_end, default_syntax.expr_end); + assert_eq!(foo.comment_start, default_syntax.comment_start); + assert_eq!(foo.comment_end, default_syntax.comment_end); + + let bar = config.syntaxes.get("bar").unwrap(); + assert_eq!(bar.block_start, default_syntax.block_start); + assert_eq!(bar.block_end, default_syntax.block_end); + assert_eq!(bar.expr_start, "{!"); + assert_eq!(bar.expr_end, default_syntax.expr_end); + assert_eq!(bar.comment_start, default_syntax.comment_start); + assert_eq!(bar.comment_end, default_syntax.comment_end); + } + + #[cfg(feature = "config")] + #[test] + fn add_syntax_two() { + let raw_config = r#" + syntax = [{ name = "foo", block_start = "{<" }, + { name = "bar", expr_start = "{!" } ] + + [general] + default_syntax = "foo" + "#; + + let default_syntax = Syntax::default(); + let config = Config::new(raw_config).unwrap(); + assert_eq!(config.default_syntax, "foo"); + + let foo = config.syntaxes.get("foo").unwrap(); + assert_eq!(foo.block_start, "{<"); + assert_eq!(foo.block_end, default_syntax.block_end); + assert_eq!(foo.expr_start, default_syntax.expr_start); + assert_eq!(foo.expr_end, default_syntax.expr_end); + assert_eq!(foo.comment_start, default_syntax.comment_start); + assert_eq!(foo.comment_end, default_syntax.comment_end); + + let bar = config.syntaxes.get("bar").unwrap(); + assert_eq!(bar.block_start, default_syntax.block_start); + assert_eq!(bar.block_end, default_syntax.block_end); + assert_eq!(bar.expr_start, "{!"); + assert_eq!(bar.expr_end, default_syntax.expr_end); + assert_eq!(bar.comment_start, default_syntax.comment_start); + assert_eq!(bar.comment_end, default_syntax.comment_end); + } + + #[cfg(feature = "toml")] + #[should_panic] + #[test] + fn use_default_at_syntax_name() { + let raw_config = r#" + syntax = [{ name = "default" }] + "#; + + let _config = Config::new(raw_config).unwrap(); + } + + #[cfg(feature = "toml")] + #[should_panic] + #[test] + fn duplicated_syntax_name_on_list() { + let raw_config = r#" + syntax = [{ name = "foo", block_start = "~<" }, + { name = "foo", block_start = "%%" } ] + "#; + + let _config = Config::new(raw_config).unwrap(); + } + + #[cfg(feature = "toml")] + #[should_panic] + #[test] + fn is_not_exist_default_syntax() { + let raw_config = r#" + [general] + default_syntax = "foo" + "#; + + let _config = Config::new(raw_config).unwrap(); + } + + #[cfg(feature = "config")] + #[test] + fn escape_modes() { + let config = Config::new( + r#" + [[escaper]] + path = "::askama::Js" + extensions = ["js"] + "#, + ) + .unwrap(); + assert_eq!( + config.escapers, + vec![ + (str_set(&["js"]), "::askama::Js".into()), + (str_set(&["html", "htm", "xml"]), "::askama::Html".into()), + ( + str_set(&["md", "none", "txt", "yml", ""]), + "::askama::Text".into() + ), + (str_set(&["j2", "jinja", "jinja2"]), "::askama::Html".into()), + ] + ); + } + + #[test] + fn test_whitespace_parsing() { + let config = Config::new( + r#" + [general] + whitespace = "suppress" + "#, + ) + .unwrap(); + assert_eq!(config.whitespace, WhitespaceHandling::Suppress); + + let config = Config::new(r#""#).unwrap(); + assert_eq!(config.whitespace, WhitespaceHandling::Preserve); + + let config = Config::new( + r#" + [general] + whitespace = "preserve" + "#, + ) + .unwrap(); + assert_eq!(config.whitespace, WhitespaceHandling::Preserve); + + let config = Config::new( + r#" + [general] + whitespace = "minimize" + "#, + ) + .unwrap(); + assert_eq!(config.whitespace, WhitespaceHandling::Minimize); + } +} diff --git a/askama_derive/src/generator.rs b/askama_derive/src/generator.rs new file mode 100644 index 0000000..39a5380 --- /dev/null +++ b/askama_derive/src/generator.rs @@ -0,0 +1,2141 @@ +use crate::config::{get_template_source, read_config_file, Config, WhitespaceHandling}; +use crate::heritage::{Context, Heritage}; +use crate::input::{Print, Source, TemplateInput}; +use crate::parser::{parse, Cond, CondTest, Expr, Loop, Node, Target, When, Whitespace, Ws}; +use crate::CompileError; + +use proc_macro::TokenStream; +use quote::{quote, ToTokens}; + +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use std::{cmp, hash, mem, str}; + +/// The actual implementation for askama_derive::Template +pub(crate) fn derive_template(input: TokenStream) -> TokenStream { + let ast: syn::DeriveInput = syn::parse(input).unwrap(); + match build_template(&ast) { + Ok(source) => source.parse().unwrap(), + Err(e) => e.into_compile_error(), + } +} + +/// Takes a `syn::DeriveInput` and generates source code for it +/// +/// Reads the metadata from the `template()` attribute to get the template +/// metadata, then fetches the source from the filesystem. The source is +/// parsed, and the parse tree is fed to the code generator. Will print +/// the parse tree and/or generated source according to the `print` key's +/// value as passed to the `template()` attribute. +fn build_template(ast: &syn::DeriveInput) -> Result { + let template_args = TemplateArgs::new(ast)?; + let config_toml = read_config_file(&template_args.config_path)?; + let config = Config::new(&config_toml)?; + let input = TemplateInput::new(ast, &config, template_args)?; + let source: String = match input.source { + Source::Source(ref s) => s.clone(), + Source::Path(_) => get_template_source(&input.path)?, + }; + + let mut sources = HashMap::new(); + find_used_templates(&input, &mut sources, source)?; + + let mut parsed = HashMap::new(); + for (path, src) in &sources { + parsed.insert(path.as_path(), parse(src, input.syntax)?); + } + + let mut contexts = HashMap::new(); + for (path, nodes) in &parsed { + contexts.insert(*path, Context::new(input.config, path, nodes)?); + } + + let ctx = &contexts[input.path.as_path()]; + let heritage = if !ctx.blocks.is_empty() || ctx.extends.is_some() { + Some(Heritage::new(ctx, &contexts)) + } else { + None + }; + + if input.print == Print::Ast || input.print == Print::All { + eprintln!("{:?}", parsed[input.path.as_path()]); + } + + let code = Generator::new( + &input, + &contexts, + heritage.as_ref(), + MapChain::new(), + config.whitespace, + ) + .build(&contexts[input.path.as_path()])?; + if input.print == Print::Code || input.print == Print::All { + eprintln!("{}", code); + } + Ok(code) +} + +#[derive(Default)] +pub(crate) struct TemplateArgs { + pub(crate) source: Option, + pub(crate) print: Print, + pub(crate) escaping: Option, + pub(crate) ext: Option, + pub(crate) syntax: Option, + pub(crate) config_path: Option, +} + +impl TemplateArgs { + fn new(ast: &'_ syn::DeriveInput) -> Result { + // Check that an attribute called `template()` exists once and that it is + // the proper type (list). + let mut template_args = None; + for attr in &ast.attrs { + let ident = match attr.path.get_ident() { + Some(ident) => ident, + None => continue, + }; + + if ident == "template" { + if template_args.is_some() { + return Err("duplicated 'template' attribute".into()); + } + + match attr.parse_meta() { + Ok(syn::Meta::List(syn::MetaList { nested, .. })) => { + template_args = Some(nested); + } + Ok(_) => return Err("'template' attribute must be a list".into()), + Err(e) => return Err(format!("unable to parse attribute: {}", e).into()), + } + } + } + let template_args = + template_args.ok_or_else(|| CompileError::from("no attribute 'template' found"))?; + + let mut args = Self::default(); + // Loop over the meta attributes and find everything that we + // understand. Return a CompileError if something is not right. + // `source` contains an enum that can represent `path` or `source`. + for item in template_args { + let pair = match item { + syn::NestedMeta::Meta(syn::Meta::NameValue(ref pair)) => pair, + _ => { + return Err(format!( + "unsupported attribute argument {:?}", + item.to_token_stream() + ) + .into()) + } + }; + let ident = match pair.path.get_ident() { + Some(ident) => ident, + None => unreachable!("not possible in syn::Meta::NameValue(…)"), + }; + + if ident == "path" { + if let syn::Lit::Str(ref s) = pair.lit { + if args.source.is_some() { + return Err("must specify 'source' or 'path', not both".into()); + } + args.source = Some(Source::Path(s.value())); + } else { + return Err("template path must be string literal".into()); + } + } else if ident == "source" { + if let syn::Lit::Str(ref s) = pair.lit { + if args.source.is_some() { + return Err("must specify 'source' or 'path', not both".into()); + } + args.source = Some(Source::Source(s.value())); + } else { + return Err("template source must be string literal".into()); + } + } else if ident == "print" { + if let syn::Lit::Str(ref s) = pair.lit { + args.print = s.value().parse()?; + } else { + return Err("print value must be string literal".into()); + } + } else if ident == "escape" { + if let syn::Lit::Str(ref s) = pair.lit { + args.escaping = Some(s.value()); + } else { + return Err("escape value must be string literal".into()); + } + } else if ident == "ext" { + if let syn::Lit::Str(ref s) = pair.lit { + args.ext = Some(s.value()); + } else { + return Err("ext value must be string literal".into()); + } + } else if ident == "syntax" { + if let syn::Lit::Str(ref s) = pair.lit { + args.syntax = Some(s.value()) + } else { + return Err("syntax value must be string literal".into()); + } + } else if ident == "config" { + if let syn::Lit::Str(ref s) = pair.lit { + args.config_path = Some(s.value()) + } else { + return Err("config value must be string literal".into()); + } + } else { + return Err(format!("unsupported attribute key {:?} found", ident).into()); + } + } + + Ok(args) + } +} + +fn find_used_templates( + input: &TemplateInput<'_>, + map: &mut HashMap, + source: String, +) -> Result<(), CompileError> { + let mut dependency_graph = Vec::new(); + let mut check = vec![(input.path.clone(), source)]; + while let Some((path, source)) = check.pop() { + for n in parse(&source, input.syntax)? { + match n { + Node::Extends(Expr::StrLit(extends)) => { + let extends = input.config.find_template(extends, Some(&path))?; + let dependency_path = (path.clone(), extends.clone()); + if dependency_graph.contains(&dependency_path) { + return Err(format!( + "cyclic dependecy in graph {:#?}", + dependency_graph + .iter() + .map(|e| format!("{:#?} --> {:#?}", e.0, e.1)) + .collect::>() + ) + .into()); + } + dependency_graph.push(dependency_path); + let source = get_template_source(&extends)?; + check.push((extends, source)); + } + Node::Import(_, import, _) => { + let import = input.config.find_template(import, Some(&path))?; + let source = get_template_source(&import)?; + check.push((import, source)); + } + _ => {} + } + } + map.insert(path, source); + } + Ok(()) +} +struct Generator<'a, S: std::hash::BuildHasher> { + // The template input state: original struct AST and attributes + input: &'a TemplateInput<'a>, + // All contexts, keyed by the package-relative template path + contexts: &'a HashMap<&'a Path, Context<'a>, S>, + // The heritage contains references to blocks and their ancestry + heritage: Option<&'a Heritage<'a>>, + // Variables accessible directly from the current scope (not redirected to context) + locals: MapChain<'a, &'a str, LocalMeta>, + // Suffix whitespace from the previous literal. Will be flushed to the + // output buffer unless suppressed by whitespace suppression on the next + // non-literal. + next_ws: Option<&'a str>, + // Whitespace suppression from the previous non-literal. Will be used to + // determine whether to flush prefix whitespace from the next literal. + skip_ws: WhitespaceHandling, + // If currently in a block, this will contain the name of a potential parent block + super_block: Option<(&'a str, usize)>, + // buffer for writable + buf_writable: Vec>, + // Counter for write! hash named arguments + named: usize, + // If set to `suppress`, the whitespace characters will be removed by default unless `+` is + // used. + whitespace: WhitespaceHandling, +} + +impl<'a, S: std::hash::BuildHasher> Generator<'a, S> { + fn new<'n>( + input: &'n TemplateInput<'_>, + contexts: &'n HashMap<&'n Path, Context<'n>, S>, + heritage: Option<&'n Heritage<'_>>, + locals: MapChain<'n, &'n str, LocalMeta>, + whitespace: WhitespaceHandling, + ) -> Generator<'n, S> { + Generator { + input, + contexts, + heritage, + locals, + next_ws: None, + skip_ws: WhitespaceHandling::Preserve, + super_block: None, + buf_writable: vec![], + named: 0, + whitespace, + } + } + + fn child(&mut self) -> Generator<'_, S> { + let locals = MapChain::with_parent(&self.locals); + Self::new( + self.input, + self.contexts, + self.heritage, + locals, + self.whitespace, + ) + } + + // Takes a Context and generates the relevant implementations. + fn build(mut self, ctx: &'a Context<'_>) -> Result { + let mut buf = Buffer::new(0); + if !ctx.blocks.is_empty() { + if let Some(parent) = self.input.parent { + self.deref_to_parent(&mut buf, parent)?; + } + }; + + self.impl_template(ctx, &mut buf)?; + self.impl_display(&mut buf)?; + + #[cfg(feature = "with-actix-web")] + self.impl_actix_web_responder(&mut buf)?; + #[cfg(feature = "with-axum")] + self.impl_axum_into_response(&mut buf)?; + #[cfg(feature = "with-gotham")] + self.impl_gotham_into_response(&mut buf)?; + #[cfg(feature = "with-mendes")] + self.impl_mendes_responder(&mut buf)?; + #[cfg(feature = "with-rocket")] + self.impl_rocket_responder(&mut buf)?; + #[cfg(feature = "with-tide")] + self.impl_tide_integrations(&mut buf)?; + #[cfg(feature = "with-warp")] + self.impl_warp_reply(&mut buf)?; + + Ok(buf.buf) + } + + // Implement `Template` for the given context struct. + fn impl_template( + &mut self, + ctx: &'a Context<'_>, + buf: &mut Buffer, + ) -> Result<(), CompileError> { + self.write_header(buf, "::askama::Template", None)?; + buf.writeln( + "fn render_into(&self, writer: &mut (impl ::std::fmt::Write + ?Sized)) -> \ + ::askama::Result<()> {", + )?; + + // Make sure the compiler understands that the generated code depends on the template files. + for path in self.contexts.keys() { + // Skip the fake path of templates defined in rust source. + let path_is_valid = match self.input.source { + Source::Path(_) => true, + Source::Source(_) => path != &self.input.path, + }; + if path_is_valid { + let path = path.to_str().unwrap(); + buf.writeln( + "e! { + include_bytes!(#path); + } + .to_string(), + )?; + } + } + + let size_hint = if let Some(heritage) = self.heritage { + self.handle(heritage.root, heritage.root.nodes, buf, AstLevel::Top) + } else { + self.handle(ctx, ctx.nodes, buf, AstLevel::Top) + }?; + + self.flush_ws(Ws(None, None)); + buf.writeln("::askama::Result::Ok(())")?; + buf.writeln("}")?; + + buf.writeln("const EXTENSION: ::std::option::Option<&'static ::std::primitive::str> = ")?; + buf.writeln(&format!("{:?}", self.input.extension()))?; + buf.writeln(";")?; + + buf.writeln("const SIZE_HINT: ::std::primitive::usize = ")?; + buf.writeln(&format!("{}", size_hint))?; + buf.writeln(";")?; + + buf.writeln("const MIME_TYPE: &'static ::std::primitive::str = ")?; + buf.writeln(&format!("{:?}", &self.input.mime_type))?; + buf.writeln(";")?; + + buf.writeln("}")?; + Ok(()) + } + + // Implement `Deref` for an inheriting context struct. + fn deref_to_parent( + &mut self, + buf: &mut Buffer, + parent_type: &syn::Type, + ) -> Result<(), CompileError> { + self.write_header(buf, "::std::ops::Deref", None)?; + buf.writeln(&format!( + "type Target = {};", + parent_type.into_token_stream() + ))?; + buf.writeln("#[inline]")?; + buf.writeln("fn deref(&self) -> &Self::Target {")?; + buf.writeln("&self._parent")?; + buf.writeln("}")?; + buf.writeln("}") + } + + // Implement `Display` for the given context struct. + fn impl_display(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { + self.write_header(buf, "::std::fmt::Display", None)?; + buf.writeln("#[inline]")?; + buf.writeln("fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {")?; + buf.writeln("::askama::Template::render_into(self, f).map_err(|_| ::std::fmt::Error {})")?; + buf.writeln("}")?; + buf.writeln("}") + } + + // Implement Actix-web's `Responder`. + #[cfg(feature = "with-actix-web")] + fn impl_actix_web_responder(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { + self.write_header(buf, "::askama_actix::actix_web::Responder", None)?; + buf.writeln("type Body = ::askama_actix::actix_web::body::BoxBody;")?; + buf.writeln("#[inline]")?; + buf.writeln( + "fn respond_to(self, _req: &::askama_actix::actix_web::HttpRequest) \ + -> ::askama_actix::actix_web::HttpResponse {", + )?; + buf.writeln("::to_response(&self)")?; + buf.writeln("}")?; + buf.writeln("}") + } + + // Implement Axum's `IntoResponse`. + #[cfg(feature = "with-axum")] + fn impl_axum_into_response(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { + self.write_header(buf, "::askama_axum::IntoResponse", None)?; + buf.writeln("#[inline]")?; + buf.writeln( + "fn into_response(self)\ + -> ::askama_axum::Response {", + )?; + let ext = self.input.extension().unwrap_or("txt"); + buf.writeln(&format!("::askama_axum::into_response(&self, {:?})", ext))?; + buf.writeln("}")?; + buf.writeln("}") + } + + // Implement gotham's `IntoResponse`. + #[cfg(feature = "with-gotham")] + fn impl_gotham_into_response(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { + self.write_header(buf, "::askama_gotham::IntoResponse", None)?; + buf.writeln("#[inline]")?; + buf.writeln( + "fn into_response(self, _state: &::askama_gotham::State)\ + -> ::askama_gotham::Response<::askama_gotham::Body> {", + )?; + let ext = self.input.extension().unwrap_or("txt"); + buf.writeln(&format!("::askama_gotham::respond(&self, {:?})", ext))?; + buf.writeln("}")?; + buf.writeln("}") + } + + // Implement mendes' `Responder`. + #[cfg(feature = "with-mendes")] + fn impl_mendes_responder(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { + let param = syn::parse_str("A: ::mendes::Application").unwrap(); + + let mut generics = self.input.ast.generics.clone(); + generics.params.push(param); + let (_, orig_ty_generics, _) = self.input.ast.generics.split_for_impl(); + let (impl_generics, _, where_clause) = generics.split_for_impl(); + + let mut where_clause = match where_clause { + Some(clause) => clause.clone(), + None => syn::WhereClause { + where_token: syn::Token![where](proc_macro2::Span::call_site()), + predicates: syn::punctuated::Punctuated::new(), + }, + }; + + where_clause + .predicates + .push(syn::parse_str("A::ResponseBody: From").unwrap()); + where_clause + .predicates + .push(syn::parse_str("A::Error: From<::askama_mendes::Error>").unwrap()); + + buf.writeln( + format!( + "{} {} for {} {} {{", + quote!(impl#impl_generics), + "::mendes::application::IntoResponse", + self.input.ast.ident, + quote!(#orig_ty_generics #where_clause), + ) + .as_ref(), + )?; + + buf.writeln( + "fn into_response(self, app: &A, req: &::mendes::http::request::Parts) \ + -> ::mendes::http::Response {", + )?; + + buf.writeln(&format!( + "::askama_mendes::into_response(app, req, &self, {:?})", + self.input.extension() + ))?; + buf.writeln("}")?; + buf.writeln("}")?; + Ok(()) + } + + // Implement Rocket's `Responder`. + #[cfg(feature = "with-rocket")] + fn impl_rocket_responder(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { + let lifetime = syn::Lifetime::new("'askama", proc_macro2::Span::call_site()); + let param = syn::GenericParam::Lifetime(syn::LifetimeDef::new(lifetime)); + self.write_header( + buf, + "::askama_rocket::Responder<'askama>", + Some(vec![param]), + )?; + + buf.writeln("#[inline]")?; + buf.writeln( + "fn respond_to(self, _: &::askama_rocket::Request) \ + -> ::askama_rocket::Result<'askama> {", + )?; + let ext = self.input.extension().unwrap_or("txt"); + buf.writeln(&format!("::askama_rocket::respond(&self, {:?})", ext))?; + + buf.writeln("}")?; + buf.writeln("}")?; + Ok(()) + } + + #[cfg(feature = "with-tide")] + fn impl_tide_integrations(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { + let ext = self.input.extension().unwrap_or("txt"); + + self.write_header( + buf, + "::std::convert::TryInto<::askama_tide::tide::Body>", + None, + )?; + buf.writeln( + "type Error = ::askama_tide::askama::Error;\n\ + #[inline]\n\ + fn try_into(self) -> ::askama_tide::askama::Result<::askama_tide::tide::Body> {", + )?; + buf.writeln(&format!("::askama_tide::try_into_body(&self, {:?})", &ext))?; + buf.writeln("}")?; + buf.writeln("}")?; + + buf.writeln("#[allow(clippy::from_over_into)]")?; + self.write_header(buf, "Into<::askama_tide::tide::Response>", None)?; + buf.writeln("#[inline]")?; + buf.writeln("fn into(self) -> ::askama_tide::tide::Response {")?; + buf.writeln(&format!("::askama_tide::into_response(&self, {:?})", ext))?; + buf.writeln("}\n}") + } + + #[cfg(feature = "with-warp")] + fn impl_warp_reply(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { + self.write_header(buf, "::askama_warp::warp::reply::Reply", None)?; + buf.writeln("#[inline]")?; + buf.writeln("fn into_response(self) -> ::askama_warp::warp::reply::Response {")?; + let ext = self.input.extension().unwrap_or("txt"); + buf.writeln(&format!("::askama_warp::reply(&self, {:?})", ext))?; + buf.writeln("}")?; + buf.writeln("}") + } + + // Writes header for the `impl` for `TraitFromPathName` or `Template` + // for the given context struct. + fn write_header( + &mut self, + buf: &mut Buffer, + target: &str, + params: Option>, + ) -> Result<(), CompileError> { + let mut generics = self.input.ast.generics.clone(); + if let Some(params) = params { + for param in params { + generics.params.push(param); + } + } + let (_, orig_ty_generics, _) = self.input.ast.generics.split_for_impl(); + let (impl_generics, _, where_clause) = generics.split_for_impl(); + buf.writeln( + format!( + "{} {} for {}{} {{", + quote!(impl#impl_generics), + target, + self.input.ast.ident, + quote!(#orig_ty_generics #where_clause), + ) + .as_ref(), + ) + } + + /* Helper methods for handling node types */ + + fn handle( + &mut self, + ctx: &'a Context<'_>, + nodes: &'a [Node<'_>], + buf: &mut Buffer, + level: AstLevel, + ) -> Result { + let mut size_hint = 0; + for n in nodes { + match *n { + Node::Lit(lws, val, rws) => { + self.visit_lit(lws, val, rws); + } + Node::Comment(ws) => { + self.write_comment(ws); + } + Node::Expr(ws, ref val) => { + self.write_expr(ws, val); + } + Node::LetDecl(ws, ref var) => { + self.write_let_decl(buf, ws, var)?; + } + Node::Let(ws, ref var, ref val) => { + self.write_let(buf, ws, var, val)?; + } + Node::Cond(ref conds, ws) => { + self.write_cond(ctx, buf, conds, ws)?; + } + Node::Match(ws1, ref expr, ref arms, ws2) => { + self.write_match(ctx, buf, ws1, expr, arms, ws2)?; + } + Node::Loop(ref loop_block) => { + self.write_loop(ctx, buf, loop_block)?; + } + Node::BlockDef(ws1, name, _, ws2) => { + self.write_block(buf, Some(name), Ws(ws1.0, ws2.1))?; + } + Node::Include(ws, path) => { + size_hint += self.handle_include(ctx, buf, ws, path)?; + } + Node::Call(ws, scope, name, ref args) => { + size_hint += self.write_call(ctx, buf, ws, scope, name, args)?; + } + Node::Macro(_, ref m) => { + if level != AstLevel::Top { + return Err("macro blocks only allowed at the top level".into()); + } + self.flush_ws(m.ws1); + self.prepare_ws(m.ws2); + } + Node::Raw(ws1, lws, val, rws, ws2) => { + self.handle_ws(ws1); + self.visit_lit(lws, val, rws); + self.handle_ws(ws2); + } + Node::Import(ws, _, _) => { + if level != AstLevel::Top { + return Err("import blocks only allowed at the top level".into()); + } + self.handle_ws(ws); + } + Node::Extends(_) => { + if level != AstLevel::Top { + return Err("extend blocks only allowed at the top level".into()); + } + // No whitespace handling: child template top-level is not used, + // except for the blocks defined in it. + } + Node::Break(ws) => { + self.handle_ws(ws); + self.write_buf_writable(buf)?; + buf.writeln("break;")?; + } + Node::Continue(ws) => { + self.handle_ws(ws); + self.write_buf_writable(buf)?; + buf.writeln("continue;")?; + } + } + } + + if AstLevel::Top == level { + size_hint += self.write_buf_writable(buf)?; + } + Ok(size_hint) + } + + fn write_cond( + &mut self, + ctx: &'a Context<'_>, + buf: &mut Buffer, + conds: &'a [Cond<'_>], + ws: Ws, + ) -> Result { + let mut flushed = 0; + let mut arm_sizes = Vec::new(); + let mut has_else = false; + for (i, &(cws, ref cond, ref nodes)) in conds.iter().enumerate() { + self.handle_ws(cws); + flushed += self.write_buf_writable(buf)?; + if i > 0 { + self.locals.pop(); + } + + self.locals.push(); + let mut arm_size = 0; + if let Some(CondTest { target, expr }) = cond { + if i == 0 { + buf.write("if "); + } else { + buf.dedent()?; + buf.write("} else if "); + } + + if let Some(target) = target { + let mut expr_buf = Buffer::new(0); + self.visit_expr(&mut expr_buf, expr)?; + buf.write("let "); + self.visit_target(buf, true, true, target); + buf.write(" = &("); + buf.write(&expr_buf.buf); + buf.write(")"); + } else { + // The following syntax `*(&(...) as &bool)` is used to + // trigger Rust's automatic dereferencing, to coerce + // e.g. `&&&&&bool` to `bool`. First `&(...) as &bool` + // coerces e.g. `&&&bool` to `&bool`. Then `*(&bool)` + // finally dereferences it to `bool`. + buf.write("*(&("); + let expr_code = self.visit_expr_root(expr)?; + buf.write(&expr_code); + buf.write(") as &bool)"); + } + } else { + buf.dedent()?; + buf.write("} else"); + has_else = true; + } + + buf.writeln(" {")?; + + arm_size += self.handle(ctx, nodes, buf, AstLevel::Nested)?; + arm_sizes.push(arm_size); + } + self.handle_ws(ws); + flushed += self.write_buf_writable(buf)?; + buf.writeln("}")?; + + self.locals.pop(); + + if !has_else { + arm_sizes.push(0); + } + Ok(flushed + median(&mut arm_sizes)) + } + + #[allow(clippy::too_many_arguments)] + fn write_match( + &mut self, + ctx: &'a Context<'_>, + buf: &mut Buffer, + ws1: Ws, + expr: &Expr<'_>, + arms: &'a [When<'_>], + ws2: Ws, + ) -> Result { + self.flush_ws(ws1); + let flushed = self.write_buf_writable(buf)?; + let mut arm_sizes = Vec::new(); + + let expr_code = self.visit_expr_root(expr)?; + buf.writeln(&format!("match &{} {{", expr_code))?; + + let mut arm_size = 0; + for (i, arm) in arms.iter().enumerate() { + let &(ws, ref target, ref body) = arm; + self.handle_ws(ws); + + if i > 0 { + arm_sizes.push(arm_size + self.write_buf_writable(buf)?); + + buf.writeln("}")?; + self.locals.pop(); + } + + self.locals.push(); + self.visit_target(buf, true, true, target); + buf.writeln(" => {")?; + + arm_size = self.handle(ctx, body, buf, AstLevel::Nested)?; + } + + self.handle_ws(ws2); + arm_sizes.push(arm_size + self.write_buf_writable(buf)?); + buf.writeln("}")?; + self.locals.pop(); + + buf.writeln("}")?; + + Ok(flushed + median(&mut arm_sizes)) + } + + #[allow(clippy::too_many_arguments)] + fn write_loop( + &mut self, + ctx: &'a Context<'_>, + buf: &mut Buffer, + loop_block: &'a Loop<'_>, + ) -> Result { + self.handle_ws(loop_block.ws1); + self.locals.push(); + + let expr_code = self.visit_expr_root(&loop_block.iter)?; + + let flushed = self.write_buf_writable(buf)?; + buf.writeln("{")?; + buf.writeln("let mut _did_loop = false;")?; + match loop_block.iter { + Expr::Range(_, _, _) => buf.writeln(&format!("let _iter = {};", expr_code)), + Expr::Array(..) => buf.writeln(&format!("let _iter = {}.iter();", expr_code)), + // If `iter` is a call then we assume it's something that returns + // an iterator. If not then the user can explicitly add the needed + // call without issues. + Expr::Call(..) | Expr::Index(..) => { + buf.writeln(&format!("let _iter = ({}).into_iter();", expr_code)) + } + // If accessing `self` then it most likely needs to be + // borrowed, to prevent an attempt of moving. + _ if expr_code.starts_with("self.") => { + buf.writeln(&format!("let _iter = (&{}).into_iter();", expr_code)) + } + // If accessing a field then it most likely needs to be + // borrowed, to prevent an attempt of moving. + Expr::Attr(..) => buf.writeln(&format!("let _iter = (&{}).into_iter();", expr_code)), + // Otherwise, we borrow `iter` assuming that it implements `IntoIterator`. + _ => buf.writeln(&format!("let _iter = ({}).into_iter();", expr_code)), + }?; + if let Some(cond) = &loop_block.cond { + self.locals.push(); + buf.write("let _iter = _iter.filter(|"); + self.visit_target(buf, true, true, &loop_block.var); + buf.write("| -> bool {"); + self.visit_expr(buf, cond)?; + buf.writeln("});")?; + self.locals.pop(); + } + + self.locals.push(); + buf.write("for ("); + self.visit_target(buf, true, true, &loop_block.var); + buf.writeln(", _loop_item) in ::askama::helpers::TemplateLoop::new(_iter) {")?; + + buf.writeln("_did_loop = true;")?; + let mut size_hint1 = self.handle(ctx, &loop_block.body, buf, AstLevel::Nested)?; + self.handle_ws(loop_block.ws2); + size_hint1 += self.write_buf_writable(buf)?; + self.locals.pop(); + buf.writeln("}")?; + + buf.writeln("if !_did_loop {")?; + self.locals.push(); + let mut size_hint2 = self.handle(ctx, &loop_block.else_block, buf, AstLevel::Nested)?; + self.handle_ws(loop_block.ws3); + size_hint2 += self.write_buf_writable(buf)?; + self.locals.pop(); + buf.writeln("}")?; + + buf.writeln("}")?; + + Ok(flushed + ((size_hint1 * 3) + size_hint2) / 2) + } + + fn write_call( + &mut self, + ctx: &'a Context<'_>, + buf: &mut Buffer, + ws: Ws, + scope: Option<&str>, + name: &str, + args: &[Expr<'_>], + ) -> Result { + if name == "super" { + return self.write_block(buf, None, ws); + } + + let (def, own_ctx) = match scope { + Some(s) => { + let path = ctx.imports.get(s).ok_or_else(|| { + CompileError::from(format!("no import found for scope {:?}", s)) + })?; + let mctx = self.contexts.get(path.as_path()).ok_or_else(|| { + CompileError::from(format!("context for {:?} not found", path)) + })?; + let def = mctx.macros.get(name).ok_or_else(|| { + CompileError::from(format!("macro {:?} not found in scope {:?}", name, s)) + })?; + (def, mctx) + } + None => { + let def = ctx + .macros + .get(name) + .ok_or_else(|| CompileError::from(format!("macro {:?} not found", name)))?; + (def, ctx) + } + }; + + self.flush_ws(ws); // Cannot handle_ws() here: whitespace from macro definition comes first + self.locals.push(); + self.write_buf_writable(buf)?; + buf.writeln("{")?; + self.prepare_ws(def.ws1); + + let mut names = Buffer::new(0); + let mut values = Buffer::new(0); + let mut is_first_variable = true; + for (i, arg) in def.args.iter().enumerate() { + let expr = args.get(i).ok_or_else(|| { + CompileError::from(format!("macro {:?} takes more than {} arguments", name, i)) + })?; + + match expr { + // If `expr` is already a form of variable then + // don't reintroduce a new variable. This is + // to avoid moving non-copyable values. + Expr::Var(name) => { + let var = self.locals.resolve_or_self(name); + self.locals.insert(arg, LocalMeta::with_ref(var)); + } + Expr::Attr(obj, attr) => { + let mut attr_buf = Buffer::new(0); + self.visit_attr(&mut attr_buf, obj, attr)?; + + let var = self.locals.resolve(&attr_buf.buf).unwrap_or(attr_buf.buf); + self.locals.insert(arg, LocalMeta::with_ref(var)); + } + // Everything else still needs to become variables, + // to avoid having the same logic be executed + // multiple times, e.g. in the case of macro + // parameters being used multiple times. + _ => { + if is_first_variable { + is_first_variable = false + } else { + names.write(", "); + values.write(", "); + } + names.write(arg); + + values.write("("); + values.write(&self.visit_expr_root(expr)?); + values.write(")"); + self.locals.insert_with_default(arg); + } + } + } + + debug_assert_eq!(names.buf.is_empty(), values.buf.is_empty()); + if !names.buf.is_empty() { + buf.writeln(&format!("let ({}) = ({});", names.buf, values.buf))?; + } + + let mut size_hint = self.handle(own_ctx, &def.nodes, buf, AstLevel::Nested)?; + + self.flush_ws(def.ws2); + size_hint += self.write_buf_writable(buf)?; + buf.writeln("}")?; + self.locals.pop(); + self.prepare_ws(ws); + Ok(size_hint) + } + + fn handle_include( + &mut self, + ctx: &'a Context<'_>, + buf: &mut Buffer, + ws: Ws, + path: &str, + ) -> Result { + self.flush_ws(ws); + self.write_buf_writable(buf)?; + let path = self + .input + .config + .find_template(path, Some(&self.input.path))?; + let src = get_template_source(&path)?; + let nodes = parse(&src, self.input.syntax)?; + + // Make sure the compiler understands that the generated code depends on the template file. + { + let path = path.to_str().unwrap(); + buf.writeln( + "e! { + include_bytes!(#path); + } + .to_string(), + )?; + } + + let size_hint = { + // Since nodes must not outlive the Generator, we instantiate + // a nested Generator here to handle the include's nodes. + let mut gen = self.child(); + let mut size_hint = gen.handle(ctx, &nodes, buf, AstLevel::Nested)?; + size_hint += gen.write_buf_writable(buf)?; + size_hint + }; + self.prepare_ws(ws); + Ok(size_hint) + } + + fn write_let_decl( + &mut self, + buf: &mut Buffer, + ws: Ws, + var: &'a Target<'_>, + ) -> Result<(), CompileError> { + self.handle_ws(ws); + self.write_buf_writable(buf)?; + buf.write("let "); + self.visit_target(buf, false, true, var); + buf.writeln(";") + } + + fn is_shadowing_variable(&self, var: &Target<'a>) -> Result { + match var { + Target::Name(name) => { + let name = normalize_identifier(name); + match self.locals.get(&name) { + // declares a new variable + None => Ok(false), + // an initialized variable gets shadowed + Some(meta) if meta.initialized => Ok(true), + // initializes a variable that was introduced in a LetDecl before + _ => Ok(false), + } + } + Target::Tuple(_, targets) => { + for target in targets { + match self.is_shadowing_variable(target) { + Ok(false) => continue, + outcome => return outcome, + } + } + Ok(false) + } + Target::Struct(_, named_targets) => { + for (_, target) in named_targets { + match self.is_shadowing_variable(target) { + Ok(false) => continue, + outcome => return outcome, + } + } + Ok(false) + } + _ => Err("literals are not allowed on the left-hand side of an assignment".into()), + } + } + + fn write_let( + &mut self, + buf: &mut Buffer, + ws: Ws, + var: &'a Target<'_>, + val: &Expr<'_>, + ) -> Result<(), CompileError> { + self.handle_ws(ws); + let mut expr_buf = Buffer::new(0); + self.visit_expr(&mut expr_buf, val)?; + + let shadowed = self.is_shadowing_variable(var)?; + if shadowed { + // Need to flush the buffer if the variable is being shadowed, + // to ensure the old variable is used. + self.write_buf_writable(buf)?; + } + if shadowed + || !matches!(var, &Target::Name(_)) + || matches!(var, Target::Name(name) if self.locals.get(name).is_none()) + { + buf.write("let "); + } + + self.visit_target(buf, true, true, var); + buf.writeln(&format!(" = {};", &expr_buf.buf)) + } + + // If `name` is `Some`, this is a call to a block definition, and we have to find + // the first block for that name from the ancestry chain. If name is `None`, this + // is from a `super()` call, and we can get the name from `self.super_block`. + fn write_block( + &mut self, + buf: &mut Buffer, + name: Option<&'a str>, + outer: Ws, + ) -> Result { + // Flush preceding whitespace according to the outer WS spec + self.flush_ws(outer); + + let prev_block = self.super_block; + let cur = match (name, prev_block) { + // The top-level context contains a block definition + (Some(cur_name), None) => (cur_name, 0), + // A block definition contains a block definition of the same name + (Some(cur_name), Some((prev_name, _))) if cur_name == prev_name => { + return Err(format!("cannot define recursive blocks ({})", cur_name).into()); + } + // A block definition contains a definition of another block + (Some(cur_name), Some((_, _))) => (cur_name, 0), + // `super()` was called inside a block + (None, Some((prev_name, gen))) => (prev_name, gen + 1), + // `super()` is called from outside a block + (None, None) => return Err("cannot call 'super()' outside block".into()), + }; + self.super_block = Some(cur); + + // Get the block definition from the heritage chain + let heritage = self + .heritage + .as_ref() + .ok_or_else(|| CompileError::from("no block ancestors available"))?; + let (ctx, def) = heritage.blocks[cur.0].get(cur.1).ok_or_else(|| { + CompileError::from(match name { + None => format!("no super() block found for block '{}'", cur.0), + Some(name) => format!("no block found for name '{}'", name), + }) + })?; + + // Get the nodes and whitespace suppression data from the block definition + let (ws1, nodes, ws2) = if let Node::BlockDef(ws1, _, nodes, ws2) = def { + (ws1, nodes, ws2) + } else { + unreachable!() + }; + + // Handle inner whitespace suppression spec and process block nodes + self.prepare_ws(*ws1); + self.locals.push(); + let size_hint = self.handle(ctx, nodes, buf, AstLevel::Block)?; + + if !self.locals.is_current_empty() { + // Need to flush the buffer before popping the variable stack + self.write_buf_writable(buf)?; + } + + self.locals.pop(); + self.flush_ws(*ws2); + + // Restore original block context and set whitespace suppression for + // succeeding whitespace according to the outer WS spec + self.super_block = prev_block; + self.prepare_ws(outer); + Ok(size_hint) + } + + fn write_expr(&mut self, ws: Ws, s: &'a Expr<'a>) { + self.handle_ws(ws); + self.buf_writable.push(Writable::Expr(s)); + } + + // Write expression buffer and empty + fn write_buf_writable(&mut self, buf: &mut Buffer) -> Result { + if self.buf_writable.is_empty() { + return Ok(0); + } + + if self + .buf_writable + .iter() + .all(|w| matches!(w, Writable::Lit(_))) + { + let mut buf_lit = Buffer::new(0); + for s in mem::take(&mut self.buf_writable) { + if let Writable::Lit(s) = s { + buf_lit.write(s); + }; + } + buf.writeln(&format!("writer.write_str({:#?})?;", &buf_lit.buf))?; + return Ok(buf_lit.buf.len()); + } + + let mut size_hint = 0; + let mut buf_format = Buffer::new(0); + let mut buf_expr = Buffer::new(buf.indent + 1); + let mut expr_cache = HashMap::with_capacity(self.buf_writable.len()); + for s in mem::take(&mut self.buf_writable) { + match s { + Writable::Lit(s) => { + buf_format.write(&s.replace('{', "{{").replace('}', "}}")); + size_hint += s.len(); + } + Writable::Expr(s) => { + use self::DisplayWrap::*; + let mut expr_buf = Buffer::new(0); + let wrapped = self.visit_expr(&mut expr_buf, s)?; + let expression = match wrapped { + Wrapped => expr_buf.buf, + Unwrapped => format!( + "::askama::MarkupDisplay::new_unsafe(&({}), {})", + expr_buf.buf, self.input.escaper + ), + }; + + use std::collections::hash_map::Entry; + let id = match expr_cache.entry(expression.clone()) { + Entry::Occupied(e) => *e.get(), + Entry::Vacant(e) => { + let id = self.named; + self.named += 1; + + buf_expr.write(&format!("expr{} = ", id)); + buf_expr.write("&"); + buf_expr.write(&expression); + buf_expr.writeln(",")?; + + e.insert(id); + id + } + }; + + buf_format.write(&format!("{{expr{}}}", id)); + size_hint += 3; + } + } + } + + buf.writeln("::std::write!(")?; + buf.indent(); + buf.writeln("writer,")?; + buf.writeln(&format!("{:#?},", &buf_format.buf))?; + buf.writeln(buf_expr.buf.trim())?; + buf.dedent()?; + buf.writeln(")?;")?; + Ok(size_hint) + } + + fn visit_lit(&mut self, lws: &'a str, val: &'a str, rws: &'a str) { + assert!(self.next_ws.is_none()); + if !lws.is_empty() { + match self.skip_ws { + WhitespaceHandling::Suppress => { + self.skip_ws = WhitespaceHandling::Preserve; + } + _ if val.is_empty() => { + assert!(rws.is_empty()); + self.next_ws = Some(lws); + } + WhitespaceHandling::Preserve => self.buf_writable.push(Writable::Lit(lws)), + WhitespaceHandling::Minimize => { + self.buf_writable + .push(Writable::Lit(match lws.contains('\n') { + true => "\n", + false => " ", + })) + } + } + } + + if !val.is_empty() { + self.buf_writable.push(Writable::Lit(val)); + } + + if !rws.is_empty() { + self.next_ws = Some(rws); + } + } + + fn write_comment(&mut self, ws: Ws) { + self.handle_ws(ws); + } + + /* Visitor methods for expression types */ + + fn visit_expr_root(&mut self, expr: &Expr<'_>) -> Result { + let mut buf = Buffer::new(0); + self.visit_expr(&mut buf, expr)?; + Ok(buf.buf) + } + + fn visit_expr( + &mut self, + buf: &mut Buffer, + expr: &Expr<'_>, + ) -> Result { + Ok(match *expr { + Expr::BoolLit(s) => self.visit_bool_lit(buf, s), + Expr::NumLit(s) => self.visit_num_lit(buf, s), + Expr::StrLit(s) => self.visit_str_lit(buf, s), + Expr::CharLit(s) => self.visit_char_lit(buf, s), + Expr::Var(s) => self.visit_var(buf, s), + Expr::Path(ref path) => self.visit_path(buf, path), + Expr::Array(ref elements) => self.visit_array(buf, elements)?, + Expr::Attr(ref obj, name) => self.visit_attr(buf, obj, name)?, + Expr::Index(ref obj, ref key) => self.visit_index(buf, obj, key)?, + Expr::Filter(name, ref args) => self.visit_filter(buf, name, args)?, + Expr::Unary(op, ref inner) => self.visit_unary(buf, op, inner)?, + Expr::BinOp(op, ref left, ref right) => self.visit_binop(buf, op, left, right)?, + Expr::Range(op, ref left, ref right) => self.visit_range(buf, op, left, right)?, + Expr::Group(ref inner) => self.visit_group(buf, inner)?, + Expr::Call(ref obj, ref args) => self.visit_call(buf, obj, args)?, + Expr::RustMacro(name, args) => self.visit_rust_macro(buf, name, args), + Expr::Try(ref expr) => self.visit_try(buf, expr.as_ref())?, + Expr::Tuple(ref exprs) => self.visit_tuple(buf, exprs)?, + }) + } + + fn visit_try( + &mut self, + buf: &mut Buffer, + expr: &Expr<'_>, + ) -> Result { + buf.write("::core::result::Result::map_err("); + self.visit_expr(buf, expr)?; + buf.write(", |err| ::askama::shared::Error::Custom(::core::convert::Into::into(err)))?"); + Ok(DisplayWrap::Unwrapped) + } + + fn visit_rust_macro(&mut self, buf: &mut Buffer, name: &str, args: &str) -> DisplayWrap { + buf.write(name); + buf.write("!("); + buf.write(args); + buf.write(")"); + + DisplayWrap::Unwrapped + } + + #[cfg(not(feature = "markdown"))] + fn _visit_markdown_filter( + &mut self, + _buf: &mut Buffer, + _args: &[Expr<'_>], + ) -> Result { + Err("the `markdown` filter requires the `markdown` feature to be enabled".into()) + } + + #[cfg(feature = "markdown")] + fn _visit_markdown_filter( + &mut self, + buf: &mut Buffer, + args: &[Expr<'_>], + ) -> Result { + let (md, options) = match args { + [md] => (md, None), + [md, options] => (md, Some(options)), + _ => return Err("markdown filter expects no more than one option argument".into()), + }; + + buf.write(&format!( + "::askama::filters::markdown({}, ", + self.input.escaper + )); + self.visit_expr(buf, md)?; + match options { + Some(options) => { + buf.write(", ::core::option::Option::Some("); + self.visit_expr(buf, options)?; + buf.write(")"); + } + None => buf.write(", ::core::option::Option::None"), + } + buf.write(")?"); + + Ok(DisplayWrap::Wrapped) + } + + fn visit_filter( + &mut self, + buf: &mut Buffer, + mut name: &str, + args: &[Expr<'_>], + ) -> Result { + if matches!(name, "escape" | "e") { + self._visit_escape_filter(buf, args)?; + return Ok(DisplayWrap::Wrapped); + } else if name == "format" { + self._visit_format_filter(buf, args)?; + return Ok(DisplayWrap::Unwrapped); + } else if name == "fmt" { + self._visit_fmt_filter(buf, args)?; + return Ok(DisplayWrap::Unwrapped); + } else if name == "join" { + self._visit_join_filter(buf, args)?; + return Ok(DisplayWrap::Unwrapped); + } else if name == "markdown" { + return self._visit_markdown_filter(buf, args); + } + + if name == "tojson" { + name = "json"; + } + + #[cfg(not(feature = "serde-json"))] + if name == "json" { + return Err("the `json` filter requires the `serde-json` feature to be enabled".into()); + } + #[cfg(not(feature = "serde-yaml"))] + if name == "yaml" { + return Err("the `yaml` filter requires the `serde-yaml` feature to be enabled".into()); + } + + const FILTERS: [&str; 2] = ["safe", "yaml"]; + if FILTERS.contains(&name) { + buf.write(&format!( + "::askama::filters::{}({}, ", + name, self.input.escaper + )); + } else if crate::BUILT_IN_FILTERS.contains(&name) { + buf.write(&format!("::askama::filters::{}(", name)); + } else { + buf.write(&format!("filters::{}(", name)); + } + + self._visit_args(buf, args)?; + buf.write(")?"); + Ok(match FILTERS.contains(&name) { + true => DisplayWrap::Wrapped, + false => DisplayWrap::Unwrapped, + }) + } + + fn _visit_escape_filter( + &mut self, + buf: &mut Buffer, + args: &[Expr<'_>], + ) -> Result<(), CompileError> { + if args.len() > 2 { + return Err("only two arguments allowed to escape filter".into()); + } + let opt_escaper = match args.get(1) { + Some(Expr::StrLit(name)) => Some(*name), + Some(_) => return Err("invalid escaper type for escape filter".into()), + None => None, + }; + let escaper = match opt_escaper { + Some(name) => self + .input + .config + .escapers + .iter() + .find_map(|(escapers, escaper)| escapers.contains(name).then(|| escaper)) + .ok_or_else(|| CompileError::from("invalid escaper for escape filter"))?, + None => self.input.escaper, + }; + buf.write("::askama::filters::escape("); + buf.write(escaper); + buf.write(", "); + self._visit_args(buf, &args[..1])?; + buf.write(")?"); + Ok(()) + } + + fn _visit_format_filter( + &mut self, + buf: &mut Buffer, + args: &[Expr<'_>], + ) -> Result<(), CompileError> { + buf.write("format!("); + if let Some(Expr::StrLit(v)) = args.first() { + self.visit_str_lit(buf, v); + if args.len() > 1 { + buf.write(", "); + } + } else { + return Err("invalid expression type for format filter".into()); + } + self._visit_args(buf, &args[1..])?; + buf.write(")"); + Ok(()) + } + + fn _visit_fmt_filter( + &mut self, + buf: &mut Buffer, + args: &[Expr<'_>], + ) -> Result<(), CompileError> { + buf.write("format!("); + if let Some(Expr::StrLit(v)) = args.get(1) { + self.visit_str_lit(buf, v); + buf.write(", "); + } else { + return Err("invalid expression type for fmt filter".into()); + } + self._visit_args(buf, &args[0..1])?; + if args.len() > 2 { + return Err("only two arguments allowed to fmt filter".into()); + } + buf.write(")"); + Ok(()) + } + + // Force type coercion on first argument to `join` filter (see #39). + fn _visit_join_filter( + &mut self, + buf: &mut Buffer, + args: &[Expr<'_>], + ) -> Result<(), CompileError> { + buf.write("::askama::filters::join((&"); + for (i, arg) in args.iter().enumerate() { + if i > 0 { + buf.write(", &"); + } + self.visit_expr(buf, arg)?; + if i == 0 { + buf.write(").into_iter()"); + } + } + buf.write(")?"); + Ok(()) + } + + fn _visit_args(&mut self, buf: &mut Buffer, args: &[Expr<'_>]) -> Result<(), CompileError> { + if args.is_empty() { + return Ok(()); + } + + for (i, arg) in args.iter().enumerate() { + if i > 0 { + buf.write(", "); + } + + let borrow = !arg.is_copyable(); + if borrow { + buf.write("&("); + } + + match arg { + Expr::Call(left, _) if !matches!(left.as_ref(), Expr::Path(_)) => { + buf.writeln("{")?; + self.visit_expr(buf, arg)?; + buf.writeln("}")?; + } + _ => { + self.visit_expr(buf, arg)?; + } + } + + if borrow { + buf.write(")"); + } + } + Ok(()) + } + + fn visit_attr( + &mut self, + buf: &mut Buffer, + obj: &Expr<'_>, + attr: &str, + ) -> Result { + if let Expr::Var(name) = *obj { + if name == "loop" { + if attr == "index" { + buf.write("(_loop_item.index + 1)"); + return Ok(DisplayWrap::Unwrapped); + } else if attr == "index0" { + buf.write("_loop_item.index"); + return Ok(DisplayWrap::Unwrapped); + } else if attr == "first" { + buf.write("_loop_item.first"); + return Ok(DisplayWrap::Unwrapped); + } else if attr == "last" { + buf.write("_loop_item.last"); + return Ok(DisplayWrap::Unwrapped); + } else { + return Err("unknown loop variable".into()); + } + } + } + self.visit_expr(buf, obj)?; + buf.write(&format!(".{}", normalize_identifier(attr))); + Ok(DisplayWrap::Unwrapped) + } + + fn visit_index( + &mut self, + buf: &mut Buffer, + obj: &Expr<'_>, + key: &Expr<'_>, + ) -> Result { + buf.write("&"); + self.visit_expr(buf, obj)?; + buf.write("["); + self.visit_expr(buf, key)?; + buf.write("]"); + Ok(DisplayWrap::Unwrapped) + } + + fn visit_call( + &mut self, + buf: &mut Buffer, + left: &Expr<'_>, + args: &[Expr<'_>], + ) -> Result { + match left { + Expr::Attr(left, method) if **left == Expr::Var("loop") => match *method { + "cycle" => match args { + [arg] => { + if matches!(arg, Expr::Array(arr) if arr.is_empty()) { + return Err("loop.cycle(…) cannot use an empty array".into()); + } + buf.write("({"); + buf.write("let _cycle = &("); + self.visit_expr(buf, arg)?; + buf.writeln(");")?; + buf.writeln("let _len = _cycle.len();")?; + buf.writeln("if _len == 0 {")?; + buf.writeln("return ::core::result::Result::Err(::askama::Error::Fmt(::core::fmt::Error));")?; + buf.writeln("}")?; + buf.writeln("_cycle[_loop_item.index % _len]")?; + buf.writeln("})")?; + } + _ => return Err("loop.cycle(…) expects exactly one argument".into()), + }, + s => return Err(format!("unknown loop method: {:?}", s).into()), + }, + left => { + match left { + Expr::Var(name) => match self.locals.resolve(name) { + Some(resolved) => buf.write(&resolved), + None => buf.write(&format!("(&self.{})", normalize_identifier(name))), + }, + left => { + self.visit_expr(buf, left)?; + } + } + + buf.write("("); + self._visit_args(buf, args)?; + buf.write(")"); + } + } + Ok(DisplayWrap::Unwrapped) + } + + fn visit_unary( + &mut self, + buf: &mut Buffer, + op: &str, + inner: &Expr<'_>, + ) -> Result { + buf.write(op); + self.visit_expr(buf, inner)?; + Ok(DisplayWrap::Unwrapped) + } + + fn visit_range( + &mut self, + buf: &mut Buffer, + op: &str, + left: &Option>>, + right: &Option>>, + ) -> Result { + if let Some(left) = left { + self.visit_expr(buf, left)?; + } + buf.write(op); + if let Some(right) = right { + self.visit_expr(buf, right)?; + } + Ok(DisplayWrap::Unwrapped) + } + + fn visit_binop( + &mut self, + buf: &mut Buffer, + op: &str, + left: &Expr<'_>, + right: &Expr<'_>, + ) -> Result { + self.visit_expr(buf, left)?; + buf.write(&format!(" {} ", op)); + self.visit_expr(buf, right)?; + Ok(DisplayWrap::Unwrapped) + } + + fn visit_group( + &mut self, + buf: &mut Buffer, + inner: &Expr<'_>, + ) -> Result { + buf.write("("); + self.visit_expr(buf, inner)?; + buf.write(")"); + Ok(DisplayWrap::Unwrapped) + } + + fn visit_tuple( + &mut self, + buf: &mut Buffer, + exprs: &[Expr<'_>], + ) -> Result { + buf.write("("); + for (index, expr) in exprs.iter().enumerate() { + if index > 0 { + buf.write(" "); + } + self.visit_expr(buf, expr)?; + buf.write(","); + } + buf.write(")"); + Ok(DisplayWrap::Unwrapped) + } + + fn visit_array( + &mut self, + buf: &mut Buffer, + elements: &[Expr<'_>], + ) -> Result { + buf.write("["); + for (i, el) in elements.iter().enumerate() { + if i > 0 { + buf.write(", "); + } + self.visit_expr(buf, el)?; + } + buf.write("]"); + Ok(DisplayWrap::Unwrapped) + } + + fn visit_path(&mut self, buf: &mut Buffer, path: &[&str]) -> DisplayWrap { + for (i, part) in path.iter().enumerate() { + if i > 0 { + buf.write("::"); + } + buf.write(part); + } + DisplayWrap::Unwrapped + } + + fn visit_var(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap { + if s == "self" { + buf.write(s); + return DisplayWrap::Unwrapped; + } + + buf.write(normalize_identifier(&self.locals.resolve_or_self(s))); + DisplayWrap::Unwrapped + } + + fn visit_bool_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap { + buf.write(s); + DisplayWrap::Unwrapped + } + + fn visit_str_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap { + buf.write(&format!("\"{}\"", s)); + DisplayWrap::Unwrapped + } + + fn visit_char_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap { + buf.write(&format!("'{}'", s)); + DisplayWrap::Unwrapped + } + + fn visit_num_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap { + buf.write(s); + DisplayWrap::Unwrapped + } + + fn visit_target( + &mut self, + buf: &mut Buffer, + initialized: bool, + first_level: bool, + target: &Target<'a>, + ) { + match target { + Target::Name("_") => { + buf.write("_"); + } + Target::Name(name) => { + let name = normalize_identifier(name); + match initialized { + true => self.locals.insert(name, LocalMeta::initialized()), + false => self.locals.insert_with_default(name), + } + buf.write(name); + } + Target::Tuple(path, targets) => { + buf.write(&path.join("::")); + buf.write("("); + for target in targets { + self.visit_target(buf, initialized, false, target); + buf.write(","); + } + buf.write(")"); + } + Target::Struct(path, targets) => { + buf.write(&path.join("::")); + buf.write(" { "); + for (name, target) in targets { + buf.write(normalize_identifier(name)); + buf.write(": "); + self.visit_target(buf, initialized, false, target); + buf.write(","); + } + buf.write(" }"); + } + Target::Path(path) => { + self.visit_path(buf, path); + } + Target::StrLit(s) => { + if first_level { + buf.write("&"); + } + self.visit_str_lit(buf, s); + } + Target::NumLit(s) => { + if first_level { + buf.write("&"); + } + self.visit_num_lit(buf, s); + } + Target::CharLit(s) => { + if first_level { + buf.write("&"); + } + self.visit_char_lit(buf, s); + } + Target::BoolLit(s) => { + if first_level { + buf.write("&"); + } + buf.write(s); + } + } + } + + /* Helper methods for dealing with whitespace nodes */ + + // Combines `flush_ws()` and `prepare_ws()` to handle both trailing whitespace from the + // preceding literal and leading whitespace from the succeeding literal. + fn handle_ws(&mut self, ws: Ws) { + self.flush_ws(ws); + self.prepare_ws(ws); + } + + fn should_trim_ws(&self, ws: Option) -> WhitespaceHandling { + match ws { + Some(Whitespace::Suppress) => WhitespaceHandling::Suppress, + Some(Whitespace::Preserve) => WhitespaceHandling::Preserve, + Some(Whitespace::Minimize) => WhitespaceHandling::Minimize, + None => self.whitespace, + } + } + + // If the previous literal left some trailing whitespace in `next_ws` and the + // prefix whitespace suppressor from the given argument, flush that whitespace. + // In either case, `next_ws` is reset to `None` (no trailing whitespace). + fn flush_ws(&mut self, ws: Ws) { + if self.next_ws.is_none() { + return; + } + + // If `whitespace` is set to `suppress`, we keep the whitespace characters only if there is + // a `+` character. + match self.should_trim_ws(ws.0) { + WhitespaceHandling::Preserve => { + let val = self.next_ws.unwrap(); + if !val.is_empty() { + self.buf_writable.push(Writable::Lit(val)); + } + } + WhitespaceHandling::Minimize => { + let val = self.next_ws.unwrap(); + if !val.is_empty() { + self.buf_writable + .push(Writable::Lit(match val.contains('\n') { + true => "\n", + false => " ", + })); + } + } + WhitespaceHandling::Suppress => {} + } + self.next_ws = None; + } + + // Sets `skip_ws` to match the suffix whitespace suppressor from the given + // argument, to determine whether to suppress leading whitespace from the + // next literal. + fn prepare_ws(&mut self, ws: Ws) { + self.skip_ws = self.should_trim_ws(ws.1); + } +} + +struct Buffer { + // The buffer to generate the code into + buf: String, + // The current level of indentation (in spaces) + indent: u8, + // Whether the output buffer is currently at the start of a line + start: bool, +} + +impl Buffer { + fn new(indent: u8) -> Self { + Self { + buf: String::new(), + indent, + start: true, + } + } + + fn writeln(&mut self, s: &str) -> Result<(), CompileError> { + if s == "}" { + self.dedent()?; + } + if !s.is_empty() { + self.write(s); + } + self.buf.push('\n'); + if s.ends_with('{') { + self.indent(); + } + self.start = true; + Ok(()) + } + + fn write(&mut self, s: &str) { + if self.start { + for _ in 0..(self.indent * 4) { + self.buf.push(' '); + } + self.start = false; + } + self.buf.push_str(s); + } + + fn indent(&mut self) { + self.indent += 1; + } + + fn dedent(&mut self) -> Result<(), CompileError> { + if self.indent == 0 { + return Err("dedent() called while indentation == 0".into()); + } + self.indent -= 1; + Ok(()) + } +} + +#[derive(Clone, Default)] +struct LocalMeta { + refs: Option, + initialized: bool, +} + +impl LocalMeta { + fn initialized() -> Self { + Self { + refs: None, + initialized: true, + } + } + + fn with_ref(refs: String) -> Self { + Self { + refs: Some(refs), + initialized: true, + } + } +} + +// type SetChain<'a, T> = MapChain<'a, T, ()>; + +#[derive(Debug)] +struct MapChain<'a, K, V> +where + K: cmp::Eq + hash::Hash, +{ + parent: Option<&'a MapChain<'a, K, V>>, + scopes: Vec>, +} + +impl<'a, K: 'a, V: 'a> MapChain<'a, K, V> +where + K: cmp::Eq + hash::Hash, +{ + fn new() -> MapChain<'a, K, V> { + MapChain { + parent: None, + scopes: vec![HashMap::new()], + } + } + + fn with_parent<'p>(parent: &'p MapChain<'_, K, V>) -> MapChain<'p, K, V> { + MapChain { + parent: Some(parent), + scopes: vec![HashMap::new()], + } + } + + /// Iterates the scopes in reverse and returns `Some(LocalMeta)` + /// from the first scope where `key` exists. + fn get(&self, key: &K) -> Option<&V> { + let scopes = self.scopes.iter().rev(); + scopes + .filter_map(|set| set.get(key)) + .next() + .or_else(|| self.parent.and_then(|set| set.get(key))) + } + + fn is_current_empty(&self) -> bool { + self.scopes.last().unwrap().is_empty() + } + + fn insert(&mut self, key: K, val: V) { + self.scopes.last_mut().unwrap().insert(key, val); + + // Note that if `insert` returns `Some` then it implies + // an identifier is reused. For e.g. `{% macro f(a, a) %}` + // and `{% let (a, a) = ... %}` then this results in a + // generated template, which when compiled fails with the + // compile error "identifier `a` used more than once". + } + + fn insert_with_default(&mut self, key: K) + where + V: Default, + { + self.insert(key, V::default()); + } + + fn push(&mut self) { + self.scopes.push(HashMap::new()); + } + + fn pop(&mut self) { + self.scopes.pop().unwrap(); + assert!(!self.scopes.is_empty()); + } +} + +impl MapChain<'_, &str, LocalMeta> { + fn resolve(&self, name: &str) -> Option { + let name = normalize_identifier(name); + self.get(&name).map(|meta| match &meta.refs { + Some(expr) => expr.clone(), + None => name.to_string(), + }) + } + + fn resolve_or_self(&self, name: &str) -> String { + let name = normalize_identifier(name); + self.resolve(name) + .unwrap_or_else(|| format!("self.{}", name)) + } +} + +fn median(sizes: &mut [usize]) -> usize { + sizes.sort_unstable(); + if sizes.len() % 2 == 1 { + sizes[sizes.len() / 2] + } else { + (sizes[sizes.len() / 2 - 1] + sizes[sizes.len() / 2]) / 2 + } +} + +#[derive(Clone, PartialEq)] +enum AstLevel { + Top, + Block, + Nested, +} + +impl Copy for AstLevel {} + +#[derive(Clone)] +enum DisplayWrap { + Wrapped, + Unwrapped, +} + +impl Copy for DisplayWrap {} + +#[derive(Debug)] +enum Writable<'a> { + Lit(&'a str), + Expr(&'a Expr<'a>), +} + +// Identifiers to be replaced with raw identifiers, so as to avoid +// collisions between template syntax and Rust's syntax. In particular +// [Rust keywords](https://doc.rust-lang.org/reference/keywords.html) +// should be replaced, since they're not reserved words in Askama +// syntax but have a high probability of causing problems in the +// generated code. +// +// This list excludes the Rust keywords *self*, *Self*, and *super* +// because they are not allowed to be raw identifiers, and *loop* +// because it's used something like a keyword in the template +// language. +static USE_RAW: [(&str, &str); 47] = [ + ("as", "r#as"), + ("break", "r#break"), + ("const", "r#const"), + ("continue", "r#continue"), + ("crate", "r#crate"), + ("else", "r#else"), + ("enum", "r#enum"), + ("extern", "r#extern"), + ("false", "r#false"), + ("fn", "r#fn"), + ("for", "r#for"), + ("if", "r#if"), + ("impl", "r#impl"), + ("in", "r#in"), + ("let", "r#let"), + ("match", "r#match"), + ("mod", "r#mod"), + ("move", "r#move"), + ("mut", "r#mut"), + ("pub", "r#pub"), + ("ref", "r#ref"), + ("return", "r#return"), + ("static", "r#static"), + ("struct", "r#struct"), + ("trait", "r#trait"), + ("true", "r#true"), + ("type", "r#type"), + ("unsafe", "r#unsafe"), + ("use", "r#use"), + ("where", "r#where"), + ("while", "r#while"), + ("async", "r#async"), + ("await", "r#await"), + ("dyn", "r#dyn"), + ("abstract", "r#abstract"), + ("become", "r#become"), + ("box", "r#box"), + ("do", "r#do"), + ("final", "r#final"), + ("macro", "r#macro"), + ("override", "r#override"), + ("priv", "r#priv"), + ("typeof", "r#typeof"), + ("unsized", "r#unsized"), + ("virtual", "r#virtual"), + ("yield", "r#yield"), + ("try", "r#try"), +]; + +fn normalize_identifier(ident: &str) -> &str { + if let Some(word) = USE_RAW.iter().find(|x| x.0 == ident) { + word.1 + } else { + ident + } +} diff --git a/askama_derive/src/heritage.rs b/askama_derive/src/heritage.rs new file mode 100644 index 0000000..52c14a2 --- /dev/null +++ b/askama_derive/src/heritage.rs @@ -0,0 +1,126 @@ +use std::collections::HashMap; +use std::path::{Path, PathBuf}; + +use crate::config::Config; +use crate::parser::{Expr, Loop, Macro, Node}; +use crate::CompileError; + +pub(crate) struct Heritage<'a> { + pub(crate) root: &'a Context<'a>, + pub(crate) blocks: BlockAncestry<'a>, +} + +impl Heritage<'_> { + pub(crate) fn new<'n, S: std::hash::BuildHasher>( + mut ctx: &'n Context<'n>, + contexts: &'n HashMap<&'n Path, Context<'n>, S>, + ) -> Heritage<'n> { + let mut blocks: BlockAncestry<'n> = ctx + .blocks + .iter() + .map(|(name, def)| (*name, vec![(ctx, *def)])) + .collect(); + + while let Some(ref path) = ctx.extends { + ctx = &contexts[path.as_path()]; + for (name, def) in &ctx.blocks { + blocks.entry(name).or_insert_with(Vec::new).push((ctx, def)); + } + } + + Heritage { root: ctx, blocks } + } +} + +type BlockAncestry<'a> = HashMap<&'a str, Vec<(&'a Context<'a>, &'a Node<'a>)>>; + +pub(crate) struct Context<'a> { + pub(crate) nodes: &'a [Node<'a>], + pub(crate) extends: Option, + pub(crate) blocks: HashMap<&'a str, &'a Node<'a>>, + pub(crate) macros: HashMap<&'a str, &'a Macro<'a>>, + pub(crate) imports: HashMap<&'a str, PathBuf>, +} + +impl Context<'_> { + pub(crate) fn new<'n>( + config: &Config<'_>, + path: &Path, + nodes: &'n [Node<'n>], + ) -> Result, CompileError> { + let mut extends = None; + let mut blocks = Vec::new(); + let mut macros = HashMap::new(); + let mut imports = HashMap::new(); + let mut nested = vec![nodes]; + let mut top = true; + + while let Some(nodes) = nested.pop() { + for n in nodes { + match n { + Node::Extends(Expr::StrLit(extends_path)) if top => match extends { + Some(_) => return Err("multiple extend blocks found".into()), + None => { + extends = Some(config.find_template(extends_path, Some(path))?); + } + }, + Node::Macro(name, m) if top => { + macros.insert(*name, m); + } + Node::Import(_, import_path, scope) if top => { + let path = config.find_template(import_path, Some(path))?; + imports.insert(*scope, path); + } + Node::Extends(_) | Node::Macro(_, _) | Node::Import(_, _, _) if !top => { + return Err( + "extends, macro or import blocks not allowed below top level".into(), + ); + } + def @ Node::BlockDef(_, _, _, _) => { + blocks.push(def); + if let Node::BlockDef(_, _, nodes, _) = def { + nested.push(nodes); + } + } + Node::Cond(branches, _) => { + for (_, _, nodes) in branches { + nested.push(nodes); + } + } + Node::Loop(Loop { + body, else_block, .. + }) => { + nested.push(body); + nested.push(else_block); + } + Node::Match(_, _, arms, _) => { + for (_, _, arm) in arms { + nested.push(arm); + } + } + _ => {} + } + } + top = false; + } + + let blocks: HashMap<_, _> = blocks + .iter() + .map(|def| { + if let Node::BlockDef(_, name, _, _) = def { + (*name, *def) + } else { + unreachable!() + } + }) + .collect(); + + Ok(Context { + nodes, + extends, + blocks, + macros, + imports, + }) + } +} diff --git a/askama_derive/src/input.rs b/askama_derive/src/input.rs new file mode 100644 index 0000000..c09f3d0 --- /dev/null +++ b/askama_derive/src/input.rs @@ -0,0 +1,255 @@ +use crate::config::{Config, Syntax}; +use crate::generator::TemplateArgs; +use crate::CompileError; + +use std::path::{Path, PathBuf}; +use std::str::FromStr; + +use mime::Mime; + +pub(crate) struct TemplateInput<'a> { + pub(crate) ast: &'a syn::DeriveInput, + pub(crate) config: &'a Config<'a>, + pub(crate) syntax: &'a Syntax<'a>, + pub(crate) source: Source, + pub(crate) print: Print, + pub(crate) escaper: &'a str, + pub(crate) ext: Option, + pub(crate) mime_type: String, + pub(crate) parent: Option<&'a syn::Type>, + pub(crate) path: PathBuf, +} + +impl TemplateInput<'_> { + /// Extract the template metadata from the `DeriveInput` structure. This + /// mostly recovers the data for the `TemplateInput` fields from the + /// `template()` attribute list fields; it also finds the of the `_parent` + /// field, if any. + pub(crate) fn new<'n>( + ast: &'n syn::DeriveInput, + config: &'n Config<'_>, + args: TemplateArgs, + ) -> Result, CompileError> { + let TemplateArgs { + source, + print, + escaping, + ext, + syntax, + .. + } = args; + + // Validate the `source` and `ext` value together, since they are + // related. In case `source` was used instead of `path`, the value + // of `ext` is merged into a synthetic `path` value here. + let source = source.expect("template path or source not found in attributes"); + let path = match (&source, &ext) { + (&Source::Path(ref path), _) => config.find_template(path, None)?, + (&Source::Source(_), Some(ext)) => PathBuf::from(format!("{}.{}", ast.ident, ext)), + (&Source::Source(_), None) => { + return Err("must include 'ext' attribute when using 'source' attribute".into()) + } + }; + + // Check to see if a `_parent` field was defined on the context + // struct, and store the type for it for use in the code generator. + let parent = match ast.data { + syn::Data::Struct(syn::DataStruct { + fields: syn::Fields::Named(ref fields), + .. + }) => fields + .named + .iter() + .find(|f| f.ident.as_ref().filter(|name| *name == "_parent").is_some()) + .map(|f| &f.ty), + _ => None, + }; + + if parent.is_some() { + eprint!( + " --> in struct {}\n = use of deprecated field '_parent'\n", + ast.ident + ); + } + + // Validate syntax + let syntax = syntax.map_or_else( + || Ok(config.syntaxes.get(config.default_syntax).unwrap()), + |s| { + config + .syntaxes + .get(&s) + .ok_or_else(|| CompileError::from(format!("attribute syntax {} not exist", s))) + }, + )?; + + // Match extension against defined output formats + + let escaping = escaping.unwrap_or_else(|| { + path.extension() + .map(|s| s.to_str().unwrap()) + .unwrap_or("") + .to_string() + }); + + let mut escaper = None; + for (extensions, path) in &config.escapers { + if extensions.contains(&escaping) { + escaper = Some(path); + break; + } + } + + let escaper = escaper.ok_or_else(|| { + CompileError::from(format!("no escaper defined for extension '{}'", escaping)) + })?; + + let mime_type = + extension_to_mime_type(ext_default_to_path(ext.as_deref(), &path).unwrap_or("txt")) + .to_string(); + + Ok(TemplateInput { + ast, + config, + syntax, + source, + print, + escaper, + ext, + mime_type, + parent, + path, + }) + } + + #[inline] + pub(crate) fn extension(&self) -> Option<&str> { + ext_default_to_path(self.ext.as_deref(), &self.path) + } +} + +#[inline] +fn ext_default_to_path<'a>(ext: Option<&'a str>, path: &'a Path) -> Option<&'a str> { + ext.or_else(|| extension(path)) +} + +fn extension(path: &Path) -> Option<&str> { + let ext = path.extension().map(|s| s.to_str().unwrap())?; + + const JINJA_EXTENSIONS: [&str; 3] = ["j2", "jinja", "jinja2"]; + if JINJA_EXTENSIONS.contains(&ext) { + Path::new(path.file_stem().unwrap()) + .extension() + .map(|s| s.to_str().unwrap()) + .or(Some(ext)) + } else { + Some(ext) + } +} + +pub(crate) enum Source { + Path(String), + Source(String), +} + +#[derive(PartialEq)] +pub(crate) enum Print { + All, + Ast, + Code, + None, +} + +impl FromStr for Print { + type Err = CompileError; + + fn from_str(s: &str) -> Result { + use self::Print::*; + Ok(match s { + "all" => All, + "ast" => Ast, + "code" => Code, + "none" => None, + v => return Err(format!("invalid value for print option: {}", v,).into()), + }) + } +} + +impl Default for Print { + fn default() -> Self { + Self::None + } +} + +pub(crate) fn extension_to_mime_type(ext: &str) -> Mime { + let basic_type = mime_guess::from_ext(ext).first_or_octet_stream(); + for (simple, utf_8) in &TEXT_TYPES { + if &basic_type == simple { + return utf_8.clone(); + } + } + basic_type +} + +const TEXT_TYPES: [(Mime, Mime); 6] = [ + (mime::TEXT_PLAIN, mime::TEXT_PLAIN_UTF_8), + (mime::TEXT_HTML, mime::TEXT_HTML_UTF_8), + (mime::TEXT_CSS, mime::TEXT_CSS_UTF_8), + (mime::TEXT_CSV, mime::TEXT_CSV_UTF_8), + ( + mime::TEXT_TAB_SEPARATED_VALUES, + mime::TEXT_TAB_SEPARATED_VALUES_UTF_8, + ), + ( + mime::APPLICATION_JAVASCRIPT, + mime::APPLICATION_JAVASCRIPT_UTF_8, + ), +]; + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_ext() { + assert_eq!(extension(Path::new("foo-bar.txt")), Some("txt")); + assert_eq!(extension(Path::new("foo-bar.html")), Some("html")); + assert_eq!(extension(Path::new("foo-bar.unknown")), Some("unknown")); + + assert_eq!(extension(Path::new("foo/bar/baz.txt")), Some("txt")); + assert_eq!(extension(Path::new("foo/bar/baz.html")), Some("html")); + assert_eq!(extension(Path::new("foo/bar/baz.unknown")), Some("unknown")); + } + + #[test] + fn test_double_ext() { + assert_eq!(extension(Path::new("foo-bar.html.txt")), Some("txt")); + assert_eq!(extension(Path::new("foo-bar.txt.html")), Some("html")); + assert_eq!(extension(Path::new("foo-bar.txt.unknown")), Some("unknown")); + + assert_eq!(extension(Path::new("foo/bar/baz.html.txt")), Some("txt")); + assert_eq!(extension(Path::new("foo/bar/baz.txt.html")), Some("html")); + assert_eq!( + extension(Path::new("foo/bar/baz.txt.unknown")), + Some("unknown") + ); + } + + #[test] + fn test_skip_jinja_ext() { + assert_eq!(extension(Path::new("foo-bar.html.j2")), Some("html")); + assert_eq!(extension(Path::new("foo-bar.html.jinja")), Some("html")); + assert_eq!(extension(Path::new("foo-bar.html.jinja2")), Some("html")); + + assert_eq!(extension(Path::new("foo/bar/baz.txt.j2")), Some("txt")); + assert_eq!(extension(Path::new("foo/bar/baz.txt.jinja")), Some("txt")); + assert_eq!(extension(Path::new("foo/bar/baz.txt.jinja2")), Some("txt")); + } + + #[test] + fn test_only_jinja_ext() { + assert_eq!(extension(Path::new("foo-bar.j2")), Some("j2")); + assert_eq!(extension(Path::new("foo-bar.jinja")), Some("jinja")); + assert_eq!(extension(Path::new("foo-bar.jinja2")), Some("jinja2")); + } +} diff --git a/askama_derive/src/lib.rs b/askama_derive/src/lib.rs index b41fa6a..2acf583 100644 --- a/askama_derive/src/lib.rs +++ b/askama_derive/src/lib.rs @@ -2,9 +2,99 @@ #![deny(elided_lifetimes_in_paths)] #![deny(unreachable_pub)] +use std::borrow::Cow; +use std::fmt; + use proc_macro::TokenStream; +use proc_macro2::Span; + +mod config; +mod generator; +mod heritage; +mod input; +mod parser; #[proc_macro_derive(Template, attributes(template))] pub fn derive_template(input: TokenStream) -> TokenStream { - askama_shared::derive_template(input.into()).into() + generator::derive_template(input) +} + +#[derive(Debug, Clone)] +struct CompileError { + msg: Cow<'static, str>, + span: Span, } + +impl CompileError { + fn new>>(s: S, span: Span) -> Self { + Self { + msg: s.into(), + span, + } + } + + fn into_compile_error(self) -> TokenStream { + syn::Error::new(self.span, self.msg) + .to_compile_error() + .into() + } +} + +impl std::error::Error for CompileError {} + +impl fmt::Display for CompileError { + #[inline] + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt.write_str(&self.msg) + } +} + +impl From<&'static str> for CompileError { + #[inline] + fn from(s: &'static str) -> Self { + Self::new(s, Span::call_site()) + } +} + +impl From for CompileError { + #[inline] + fn from(s: String) -> Self { + Self::new(s, Span::call_site()) + } +} + +// This is used by the code generator to decide whether a named filter is part of +// Askama or should refer to a local `filters` module. It should contain all the +// filters shipped with Askama, even the optional ones (since optional inclusion +// in the const vector based on features seems impossible right now). +const BUILT_IN_FILTERS: &[&str] = &[ + "abs", + "capitalize", + "center", + "e", + "escape", + "filesizeformat", + "fmt", + "format", + "indent", + "into_f64", + "into_isize", + "join", + "linebreaks", + "linebreaksbr", + "paragraphbreaks", + "lower", + "lowercase", + "safe", + "trim", + "truncate", + "upper", + "uppercase", + "urlencode", + "urlencode_strict", + "wordcount", + // optional features, reserve the names anyway: + "json", + "markdown", + "yaml", +]; diff --git a/askama_derive/src/parser.rs b/askama_derive/src/parser.rs new file mode 100644 index 0000000..efcad73 --- /dev/null +++ b/askama_derive/src/parser.rs @@ -0,0 +1,1885 @@ +use std::cell::Cell; +use std::str; + +use nom::branch::alt; +use nom::bytes::complete::{escaped, is_not, tag, take_till, take_until}; +use nom::character::complete::{anychar, char, digit1}; +use nom::combinator::{complete, consumed, cut, eof, map, not, opt, peek, recognize, value}; +use nom::error::{Error, ErrorKind}; +use nom::multi::{fold_many0, many0, many1, separated_list0, separated_list1}; +use nom::sequence::{delimited, pair, preceded, terminated, tuple}; +use nom::{self, error_position, AsChar, IResult, InputTakeAtPosition}; + +use crate::config::Syntax; +use crate::CompileError; + +#[derive(Debug, PartialEq)] +pub(crate) enum Node<'a> { + Lit(&'a str, &'a str, &'a str), + Comment(Ws), + Expr(Ws, Expr<'a>), + Call(Ws, Option<&'a str>, &'a str, Vec>), + LetDecl(Ws, Target<'a>), + Let(Ws, Target<'a>, Expr<'a>), + Cond(Vec>, Ws), + Match(Ws, Expr<'a>, Vec>, Ws), + Loop(Loop<'a>), + Extends(Expr<'a>), + BlockDef(Ws, &'a str, Vec>, Ws), + Include(Ws, &'a str), + Import(Ws, &'a str, &'a str), + Macro(&'a str, Macro<'a>), + Raw(Ws, &'a str, &'a str, &'a str, Ws), + Break(Ws), + Continue(Ws), +} + +#[derive(Debug, PartialEq)] +pub(crate) struct Loop<'a> { + pub(crate) ws1: Ws, + pub(crate) var: Target<'a>, + pub(crate) iter: Expr<'a>, + pub(crate) cond: Option>, + pub(crate) body: Vec>, + pub(crate) ws2: Ws, + pub(crate) else_block: Vec>, + pub(crate) ws3: Ws, +} + +#[derive(Debug, PartialEq)] +pub(crate) enum Expr<'a> { + BoolLit(&'a str), + NumLit(&'a str), + StrLit(&'a str), + CharLit(&'a str), + Var(&'a str), + Path(Vec<&'a str>), + Array(Vec>), + Attr(Box>, &'a str), + Index(Box>, Box>), + Filter(&'a str, Vec>), + Unary(&'a str, Box>), + BinOp(&'a str, Box>, Box>), + Range(&'a str, Option>>, Option>>), + Group(Box>), + Tuple(Vec>), + Call(Box>, Vec>), + RustMacro(&'a str, &'a str), + Try(Box>), +} + +impl Expr<'_> { + /// Returns `true` if enough assumptions can be made, + /// to determine that `self` is copyable. + pub(crate) fn is_copyable(&self) -> bool { + self.is_copyable_within_op(false) + } + + fn is_copyable_within_op(&self, within_op: bool) -> bool { + use Expr::*; + match self { + BoolLit(_) | NumLit(_) | StrLit(_) | CharLit(_) => true, + Unary(.., expr) => expr.is_copyable_within_op(true), + BinOp(_, lhs, rhs) => { + lhs.is_copyable_within_op(true) && rhs.is_copyable_within_op(true) + } + Range(..) => true, + // The result of a call likely doesn't need to be borrowed, + // as in that case the call is more likely to return a + // reference in the first place then. + Call(..) | Path(..) => true, + // If the `expr` is within a `Unary` or `BinOp` then + // an assumption can be made that the operand is copy. + // If not, then the value is moved and adding `.clone()` + // will solve that issue. However, if the operand is + // implicitly borrowed, then it's likely not even possible + // to get the template to compile. + _ => within_op && self.is_attr_self(), + } + } + + /// Returns `true` if this is an `Attr` where the `obj` is `"self"`. + pub(crate) fn is_attr_self(&self) -> bool { + match self { + Expr::Attr(obj, _) if matches!(obj.as_ref(), Expr::Var("self")) => true, + Expr::Attr(obj, _) if matches!(obj.as_ref(), Expr::Attr(..)) => obj.is_attr_self(), + _ => false, + } + } +} + +pub(crate) type When<'a> = (Ws, Target<'a>, Vec>); + +#[derive(Debug, PartialEq)] +pub(crate) struct Macro<'a> { + pub(crate) ws1: Ws, + pub(crate) args: Vec<&'a str>, + pub(crate) nodes: Vec>, + pub(crate) ws2: Ws, +} + +#[derive(Debug, PartialEq)] +pub(crate) enum Target<'a> { + Name(&'a str), + Tuple(Vec<&'a str>, Vec>), + Struct(Vec<&'a str>, Vec<(&'a str, Target<'a>)>), + NumLit(&'a str), + StrLit(&'a str), + CharLit(&'a str), + BoolLit(&'a str), + Path(Vec<&'a str>), +} + +#[derive(Clone, Copy, Debug, PartialEq)] +pub(crate) enum Whitespace { + Preserve, + Suppress, + Minimize, +} + +impl From for Whitespace { + fn from(c: char) -> Self { + match c { + '+' => Self::Preserve, + '-' => Self::Suppress, + '~' => Self::Minimize, + _ => panic!("unsupported `Whitespace` conversion"), + } + } +} + +/// First field is "minus/plus sign was used on the left part of the item". +/// +/// Second field is "minus/plus sign was used on the right part of the item". +#[derive(Clone, Copy, Debug, PartialEq)] +pub(crate) struct Ws(pub(crate) Option, pub(crate) Option); + +pub(crate) type Cond<'a> = (Ws, Option>, Vec>); + +#[derive(Debug, PartialEq)] +pub(crate) struct CondTest<'a> { + pub(crate) target: Option>, + pub(crate) expr: Expr<'a>, +} + +fn is_ws(c: char) -> bool { + matches!(c, ' ' | '\t' | '\r' | '\n') +} + +fn not_ws(c: char) -> bool { + !is_ws(c) +} + +fn ws<'a, O>( + inner: impl FnMut(&'a str) -> IResult<&'a str, O>, +) -> impl FnMut(&'a str) -> IResult<&'a str, O> { + delimited(take_till(not_ws), inner, take_till(not_ws)) +} + +fn split_ws_parts(s: &str) -> Node<'_> { + let trimmed_start = s.trim_start_matches(is_ws); + let len_start = s.len() - trimmed_start.len(); + let trimmed = trimmed_start.trim_end_matches(is_ws); + Node::Lit(&s[..len_start], trimmed, &trimmed_start[trimmed.len()..]) +} + +/// Skips input until `end` was found, but does not consume it. +/// Returns tuple that would be returned when parsing `end`. +fn skip_till<'a, O>( + end: impl FnMut(&'a str) -> IResult<&'a str, O>, +) -> impl FnMut(&'a str) -> IResult<&'a str, (&'a str, O)> { + enum Next { + IsEnd(O), + NotEnd(char), + } + let mut next = alt((map(end, Next::IsEnd), map(anychar, Next::NotEnd))); + move |start: &'a str| { + let mut i = start; + loop { + let (j, is_end) = next(i)?; + match is_end { + Next::IsEnd(lookahead) => return Ok((i, (j, lookahead))), + Next::NotEnd(_) => i = j, + } + } + } +} + +struct State<'a> { + syntax: &'a Syntax<'a>, + loop_depth: Cell, +} + +fn take_content<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { + let p_start = alt(( + tag(s.syntax.block_start), + tag(s.syntax.comment_start), + tag(s.syntax.expr_start), + )); + + let (i, _) = not(eof)(i)?; + let (i, content) = opt(recognize(skip_till(p_start)))(i)?; + let (i, content) = match content { + Some("") => { + // {block,comment,expr}_start follows immediately. + return Err(nom::Err::Error(error_position!(i, ErrorKind::TakeUntil))); + } + Some(content) => (i, content), + None => ("", i), // there is no {block,comment,expr}_start: take everything + }; + Ok((i, split_ws_parts(content))) +} + +fn identifier(input: &str) -> IResult<&str, &str> { + recognize(pair(identifier_start, opt(identifier_tail)))(input) +} + +fn identifier_start(s: &str) -> IResult<&str, &str> { + s.split_at_position1_complete( + |c| !(c.is_alpha() || c == '_' || c >= '\u{0080}'), + nom::error::ErrorKind::Alpha, + ) +} + +fn identifier_tail(s: &str) -> IResult<&str, &str> { + s.split_at_position1_complete( + |c| !(c.is_alphanum() || c == '_' || c >= '\u{0080}'), + nom::error::ErrorKind::Alpha, + ) +} + +fn bool_lit(i: &str) -> IResult<&str, &str> { + alt((tag("false"), tag("true")))(i) +} + +fn expr_bool_lit(i: &str) -> IResult<&str, Expr<'_>> { + map(bool_lit, Expr::BoolLit)(i) +} + +fn variant_bool_lit(i: &str) -> IResult<&str, Target<'_>> { + map(bool_lit, Target::BoolLit)(i) +} + +fn num_lit(i: &str) -> IResult<&str, &str> { + recognize(pair(digit1, opt(pair(char('.'), digit1))))(i) +} + +fn expr_num_lit(i: &str) -> IResult<&str, Expr<'_>> { + map(num_lit, Expr::NumLit)(i) +} + +fn expr_array_lit(i: &str) -> IResult<&str, Expr<'_>> { + delimited( + ws(char('[')), + map(separated_list1(ws(char(',')), expr_any), Expr::Array), + ws(char(']')), + )(i) +} + +fn variant_num_lit(i: &str) -> IResult<&str, Target<'_>> { + map(num_lit, Target::NumLit)(i) +} + +fn str_lit(i: &str) -> IResult<&str, &str> { + let (i, s) = delimited( + char('"'), + opt(escaped(is_not("\\\""), '\\', anychar)), + char('"'), + )(i)?; + Ok((i, s.unwrap_or_default())) +} + +fn expr_str_lit(i: &str) -> IResult<&str, Expr<'_>> { + map(str_lit, Expr::StrLit)(i) +} + +fn variant_str_lit(i: &str) -> IResult<&str, Target<'_>> { + map(str_lit, Target::StrLit)(i) +} + +fn char_lit(i: &str) -> IResult<&str, &str> { + let (i, s) = delimited( + char('\''), + opt(escaped(is_not("\\\'"), '\\', anychar)), + char('\''), + )(i)?; + Ok((i, s.unwrap_or_default())) +} + +fn expr_char_lit(i: &str) -> IResult<&str, Expr<'_>> { + map(char_lit, Expr::CharLit)(i) +} + +fn variant_char_lit(i: &str) -> IResult<&str, Target<'_>> { + map(char_lit, Target::CharLit)(i) +} + +fn expr_var(i: &str) -> IResult<&str, Expr<'_>> { + map(identifier, Expr::Var)(i) +} + +fn path(i: &str) -> IResult<&str, Vec<&str>> { + let root = opt(value("", ws(tag("::")))); + let tail = separated_list1(ws(tag("::")), identifier); + + match tuple((root, identifier, ws(tag("::")), tail))(i) { + Ok((i, (root, start, _, rest))) => { + let mut path = Vec::new(); + path.extend(root); + path.push(start); + path.extend(rest); + Ok((i, path)) + } + Err(err) => { + if let Ok((i, name)) = identifier(i) { + // The returned identifier can be assumed to be path if: + // - Contains both a lowercase and uppercase character, i.e. a type name like `None` + // - Doesn't contain any lowercase characters, i.e. it's a constant + // In short, if it contains any uppercase characters it's a path. + if name.contains(char::is_uppercase) { + return Ok((i, vec![name])); + } + } + + // If `identifier()` fails then just return the original error + Err(err) + } + } +} + +fn expr_path(i: &str) -> IResult<&str, Expr<'_>> { + let (i, path) = path(i)?; + Ok((i, Expr::Path(path))) +} + +fn named_target(i: &str) -> IResult<&str, (&str, Target<'_>)> { + let (i, (src, target)) = pair(identifier, opt(preceded(ws(char(':')), target)))(i)?; + Ok((i, (src, target.unwrap_or(Target::Name(src))))) +} + +fn variant_lit(i: &str) -> IResult<&str, Target<'_>> { + alt(( + variant_str_lit, + variant_char_lit, + variant_num_lit, + variant_bool_lit, + ))(i) +} + +fn target(i: &str) -> IResult<&str, Target<'_>> { + let mut opt_opening_paren = map(opt(ws(char('('))), |o| o.is_some()); + let mut opt_closing_paren = map(opt(ws(char(')'))), |o| o.is_some()); + let mut opt_opening_brace = map(opt(ws(char('{'))), |o| o.is_some()); + + let (i, lit) = opt(variant_lit)(i)?; + if let Some(lit) = lit { + return Ok((i, lit)); + } + + // match tuples and unused parentheses + let (i, target_is_tuple) = opt_opening_paren(i)?; + if target_is_tuple { + let (i, is_empty_tuple) = opt_closing_paren(i)?; + if is_empty_tuple { + return Ok((i, Target::Tuple(Vec::new(), Vec::new()))); + } + + let (i, first_target) = target(i)?; + let (i, is_unused_paren) = opt_closing_paren(i)?; + if is_unused_paren { + return Ok((i, first_target)); + } + + let mut targets = vec![first_target]; + let (i, _) = cut(tuple(( + fold_many0( + preceded(ws(char(',')), target), + || (), + |_, target| { + targets.push(target); + }, + ), + opt(ws(char(','))), + ws(cut(char(')'))), + )))(i)?; + return Ok((i, Target::Tuple(Vec::new(), targets))); + } + + // match structs + let (i, path) = opt(path)(i)?; + if let Some(path) = path { + let i_before_matching_with = i; + let (i, _) = opt(ws(tag("with")))(i)?; + + let (i, is_unnamed_struct) = opt_opening_paren(i)?; + if is_unnamed_struct { + let (i, targets) = alt(( + map(char(')'), |_| Vec::new()), + terminated( + cut(separated_list1(ws(char(',')), target)), + pair(opt(ws(char(','))), ws(cut(char(')')))), + ), + ))(i)?; + return Ok((i, Target::Tuple(path, targets))); + } + + let (i, is_named_struct) = opt_opening_brace(i)?; + if is_named_struct { + let (i, targets) = alt(( + map(char('}'), |_| Vec::new()), + terminated( + cut(separated_list1(ws(char(',')), named_target)), + pair(opt(ws(char(','))), ws(cut(char('}')))), + ), + ))(i)?; + return Ok((i, Target::Struct(path, targets))); + } + + return Ok((i_before_matching_with, Target::Path(path))); + } + + // neither literal nor struct nor path + map(identifier, Target::Name)(i) +} + +fn arguments(i: &str) -> IResult<&str, Vec>> { + delimited( + ws(char('(')), + separated_list0(char(','), ws(expr_any)), + ws(char(')')), + )(i) +} + +fn macro_arguments(i: &str) -> IResult<&str, &str> { + delimited(char('('), recognize(nested_parenthesis), char(')'))(i) +} + +fn nested_parenthesis(i: &str) -> IResult<&str, ()> { + let mut nested = 0; + let mut last = 0; + let mut in_str = false; + let mut escaped = false; + + for (i, b) in i.chars().enumerate() { + if !(b == '(' || b == ')') || !in_str { + match b { + '(' => nested += 1, + ')' => { + if nested == 0 { + last = i; + break; + } + nested -= 1; + } + '"' => { + if in_str { + if !escaped { + in_str = false; + } + } else { + in_str = true; + } + } + '\\' => { + escaped = !escaped; + } + _ => (), + } + } + + if escaped && b != '\\' { + escaped = false; + } + } + + if nested == 0 { + Ok((&i[last..], ())) + } else { + Err(nom::Err::Error(error_position!( + i, + ErrorKind::SeparatedNonEmptyList + ))) + } +} + +fn parameters(i: &str) -> IResult<&str, Vec<&str>> { + delimited( + ws(char('(')), + separated_list0(char(','), ws(identifier)), + ws(char(')')), + )(i) +} + +fn expr_group(i: &str) -> IResult<&str, Expr<'_>> { + let (i, expr) = preceded(ws(char('(')), opt(expr_any))(i)?; + let expr = match expr { + Some(expr) => expr, + None => { + let (i, _) = char(')')(i)?; + return Ok((i, Expr::Tuple(vec![]))); + } + }; + + let (i, comma) = ws(opt(peek(char(','))))(i)?; + if comma.is_none() { + let (i, _) = char(')')(i)?; + return Ok((i, Expr::Group(Box::new(expr)))); + } + + let mut exprs = vec![expr]; + let (i, _) = fold_many0( + preceded(char(','), ws(expr_any)), + || (), + |_, expr| { + exprs.push(expr); + }, + )(i)?; + let (i, _) = pair(ws(opt(char(','))), char(')'))(i)?; + Ok((i, Expr::Tuple(exprs))) +} + +fn expr_single(i: &str) -> IResult<&str, Expr<'_>> { + alt(( + expr_bool_lit, + expr_num_lit, + expr_str_lit, + expr_char_lit, + expr_path, + expr_rust_macro, + expr_array_lit, + expr_var, + expr_group, + ))(i) +} + +enum Suffix<'a> { + Attr(&'a str), + Index(Expr<'a>), + Call(Vec>), + Try, +} + +fn expr_attr(i: &str) -> IResult<&str, Suffix<'_>> { + map( + preceded( + ws(pair(char('.'), not(char('.')))), + cut(alt((num_lit, identifier))), + ), + Suffix::Attr, + )(i) +} + +fn expr_index(i: &str) -> IResult<&str, Suffix<'_>> { + map( + preceded(ws(char('[')), cut(terminated(expr_any, ws(char(']'))))), + Suffix::Index, + )(i) +} + +fn expr_call(i: &str) -> IResult<&str, Suffix<'_>> { + map(arguments, Suffix::Call)(i) +} + +fn expr_try(i: &str) -> IResult<&str, Suffix<'_>> { + map(preceded(take_till(not_ws), char('?')), |_| Suffix::Try)(i) +} + +fn filter(i: &str) -> IResult<&str, (&str, Option>>)> { + let (i, (_, fname, args)) = tuple((char('|'), ws(identifier), opt(arguments)))(i)?; + Ok((i, (fname, args))) +} + +fn expr_filtered(i: &str) -> IResult<&str, Expr<'_>> { + let (i, (obj, filters)) = tuple((expr_prefix, many0(filter)))(i)?; + + let mut res = obj; + for (fname, args) in filters { + res = Expr::Filter(fname, { + let mut args = match args { + Some(inner) => inner, + None => Vec::new(), + }; + args.insert(0, res); + args + }); + } + + Ok((i, res)) +} + +fn expr_prefix(i: &str) -> IResult<&str, Expr<'_>> { + let (i, (ops, mut expr)) = pair(many0(ws(alt((tag("!"), tag("-"))))), expr_suffix)(i)?; + for op in ops.iter().rev() { + expr = Expr::Unary(op, Box::new(expr)); + } + Ok((i, expr)) +} + +fn expr_suffix(i: &str) -> IResult<&str, Expr<'_>> { + let (mut i, mut expr) = expr_single(i)?; + loop { + let (j, suffix) = opt(alt((expr_attr, expr_index, expr_call, expr_try)))(i)?; + i = j; + match suffix { + Some(Suffix::Attr(attr)) => expr = Expr::Attr(expr.into(), attr), + Some(Suffix::Index(index)) => expr = Expr::Index(expr.into(), index.into()), + Some(Suffix::Call(args)) => expr = Expr::Call(expr.into(), args), + Some(Suffix::Try) => expr = Expr::Try(expr.into()), + None => break, + } + } + Ok((i, expr)) +} + +fn expr_rust_macro(i: &str) -> IResult<&str, Expr<'_>> { + let (i, (mname, _, args)) = tuple((identifier, char('!'), macro_arguments))(i)?; + Ok((i, Expr::RustMacro(mname, args))) +} + +macro_rules! expr_prec_layer { + ( $name:ident, $inner:ident, $op:expr ) => { + fn $name(i: &str) -> IResult<&str, Expr<'_>> { + let (i, left) = $inner(i)?; + let (i, right) = many0(pair( + ws(tag($op)), + $inner, + ))(i)?; + Ok(( + i, + right.into_iter().fold(left, |left, (op, right)| { + Expr::BinOp(op, Box::new(left), Box::new(right)) + }), + )) + } + }; + ( $name:ident, $inner:ident, $( $op:expr ),+ ) => { + fn $name(i: &str) -> IResult<&str, Expr<'_>> { + let (i, left) = $inner(i)?; + let (i, right) = many0(pair( + ws(alt(($( tag($op) ),+,))), + $inner, + ))(i)?; + Ok(( + i, + right.into_iter().fold(left, |left, (op, right)| { + Expr::BinOp(op, Box::new(left), Box::new(right)) + }), + )) + } + } +} + +expr_prec_layer!(expr_muldivmod, expr_filtered, "*", "/", "%"); +expr_prec_layer!(expr_addsub, expr_muldivmod, "+", "-"); +expr_prec_layer!(expr_shifts, expr_addsub, ">>", "<<"); +expr_prec_layer!(expr_band, expr_shifts, "&"); +expr_prec_layer!(expr_bxor, expr_band, "^"); +expr_prec_layer!(expr_bor, expr_bxor, "|"); +expr_prec_layer!(expr_compare, expr_bor, "==", "!=", ">=", ">", "<=", "<"); +expr_prec_layer!(expr_and, expr_compare, "&&"); +expr_prec_layer!(expr_or, expr_and, "||"); + +fn expr_handle_ws(i: &str) -> IResult<&str, Whitespace> { + alt((char('-'), char('+'), char('~')))(i).map(|(s, r)| (s, Whitespace::from(r))) +} + +fn expr_any(i: &str) -> IResult<&str, Expr<'_>> { + let range_right = |i| pair(ws(alt((tag("..="), tag("..")))), opt(expr_or))(i); + alt(( + map(range_right, |(op, right)| { + Expr::Range(op, None, right.map(Box::new)) + }), + map( + pair(expr_or, opt(range_right)), + |(left, right)| match right { + Some((op, right)) => Expr::Range(op, Some(Box::new(left)), right.map(Box::new)), + None => left, + }, + ), + ))(i) +} + +fn expr_node<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { + let mut p = tuple(( + |i| tag_expr_start(i, s), + cut(tuple(( + opt(expr_handle_ws), + ws(expr_any), + opt(expr_handle_ws), + |i| tag_expr_end(i, s), + ))), + )); + let (i, (_, (pws, expr, nws, _))) = p(i)?; + Ok((i, Node::Expr(Ws(pws, nws), expr))) +} + +fn block_call(i: &str) -> IResult<&str, Node<'_>> { + let mut p = tuple(( + opt(expr_handle_ws), + ws(tag("call")), + cut(tuple(( + opt(tuple((ws(identifier), ws(tag("::"))))), + ws(identifier), + ws(arguments), + opt(expr_handle_ws), + ))), + )); + let (i, (pws, _, (scope, name, args, nws))) = p(i)?; + let scope = scope.map(|(scope, _)| scope); + Ok((i, Node::Call(Ws(pws, nws), scope, name, args))) +} + +fn cond_if(i: &str) -> IResult<&str, CondTest<'_>> { + let mut p = preceded( + ws(tag("if")), + cut(tuple(( + opt(delimited( + ws(alt((tag("let"), tag("set")))), + ws(target), + ws(char('=')), + )), + ws(expr_any), + ))), + ); + let (i, (target, expr)) = p(i)?; + Ok((i, CondTest { target, expr })) +} + +fn cond_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Cond<'a>> { + let mut p = tuple(( + |i| tag_block_start(i, s), + opt(expr_handle_ws), + ws(tag("else")), + cut(tuple(( + opt(cond_if), + opt(expr_handle_ws), + |i| tag_block_end(i, s), + cut(|i| parse_template(i, s)), + ))), + )); + let (i, (_, pws, _, (cond, nws, _, block))) = p(i)?; + Ok((i, (Ws(pws, nws), cond, block))) +} + +fn block_if<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { + let mut p = tuple(( + opt(expr_handle_ws), + cond_if, + cut(tuple(( + opt(expr_handle_ws), + |i| tag_block_end(i, s), + cut(tuple(( + |i| parse_template(i, s), + many0(|i| cond_block(i, s)), + cut(tuple(( + |i| tag_block_start(i, s), + opt(expr_handle_ws), + ws(tag("endif")), + opt(expr_handle_ws), + ))), + ))), + ))), + )); + let (i, (pws1, cond, (nws1, _, (block, elifs, (_, pws2, _, nws2))))) = p(i)?; + + let mut res = vec![(Ws(pws1, nws1), Some(cond), block)]; + res.extend(elifs); + Ok((i, Node::Cond(res, Ws(pws2, nws2)))) +} + +fn match_else_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, When<'a>> { + let mut p = tuple(( + |i| tag_block_start(i, s), + opt(expr_handle_ws), + ws(tag("else")), + cut(tuple(( + opt(expr_handle_ws), + |i| tag_block_end(i, s), + cut(|i| parse_template(i, s)), + ))), + )); + let (i, (_, pws, _, (nws, _, block))) = p(i)?; + Ok((i, (Ws(pws, nws), Target::Name("_"), block))) +} + +fn when_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, When<'a>> { + let mut p = tuple(( + |i| tag_block_start(i, s), + opt(expr_handle_ws), + ws(tag("when")), + cut(tuple(( + ws(target), + opt(expr_handle_ws), + |i| tag_block_end(i, s), + cut(|i| parse_template(i, s)), + ))), + )); + let (i, (_, pws, _, (target, nws, _, block))) = p(i)?; + Ok((i, (Ws(pws, nws), target, block))) +} + +fn block_match<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { + let mut p = tuple(( + opt(expr_handle_ws), + ws(tag("match")), + cut(tuple(( + ws(expr_any), + opt(expr_handle_ws), + |i| tag_block_end(i, s), + cut(tuple(( + ws(many0(ws(value((), |i| block_comment(i, s))))), + many1(|i| when_block(i, s)), + cut(tuple(( + opt(|i| match_else_block(i, s)), + cut(tuple(( + ws(|i| tag_block_start(i, s)), + opt(expr_handle_ws), + ws(tag("endmatch")), + opt(expr_handle_ws), + ))), + ))), + ))), + ))), + )); + let (i, (pws1, _, (expr, nws1, _, (_, arms, (else_arm, (_, pws2, _, nws2)))))) = p(i)?; + + let mut arms = arms; + if let Some(arm) = else_arm { + arms.push(arm); + } + + Ok((i, Node::Match(Ws(pws1, nws1), expr, arms, Ws(pws2, nws2)))) +} + +fn block_let(i: &str) -> IResult<&str, Node<'_>> { + let mut p = tuple(( + opt(expr_handle_ws), + ws(alt((tag("let"), tag("set")))), + cut(tuple(( + ws(target), + opt(tuple((ws(char('=')), ws(expr_any)))), + opt(expr_handle_ws), + ))), + )); + let (i, (pws, _, (var, val, nws))) = p(i)?; + + Ok(( + i, + if let Some((_, val)) = val { + Node::Let(Ws(pws, nws), var, val) + } else { + Node::LetDecl(Ws(pws, nws), var) + }, + )) +} + +fn parse_loop_content<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec>> { + s.loop_depth.set(s.loop_depth.get() + 1); + let result = parse_template(i, s); + s.loop_depth.set(s.loop_depth.get() - 1); + result +} + +fn block_for<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { + let if_cond = preceded(ws(tag("if")), cut(ws(expr_any))); + let else_block = |i| { + let mut p = preceded( + ws(tag("else")), + cut(tuple(( + opt(expr_handle_ws), + delimited( + |i| tag_block_end(i, s), + |i| parse_template(i, s), + |i| tag_block_start(i, s), + ), + opt(expr_handle_ws), + ))), + ); + let (i, (pws, nodes, nws)) = p(i)?; + Ok((i, (pws, nodes, nws))) + }; + let mut p = tuple(( + opt(expr_handle_ws), + ws(tag("for")), + cut(tuple(( + ws(target), + ws(tag("in")), + cut(tuple(( + ws(expr_any), + opt(if_cond), + opt(expr_handle_ws), + |i| tag_block_end(i, s), + cut(tuple(( + |i| parse_loop_content(i, s), + cut(tuple(( + |i| tag_block_start(i, s), + opt(expr_handle_ws), + opt(else_block), + ws(tag("endfor")), + opt(expr_handle_ws), + ))), + ))), + ))), + ))), + )); + let (i, (pws1, _, (var, _, (iter, cond, nws1, _, (body, (_, pws2, else_block, _, nws2)))))) = + p(i)?; + let (nws3, else_block, pws3) = else_block.unwrap_or_default(); + Ok(( + i, + Node::Loop(Loop { + ws1: Ws(pws1, nws1), + var, + iter, + cond, + body, + ws2: Ws(pws2, nws3), + else_block, + ws3: Ws(pws3, nws2), + }), + )) +} + +fn block_extends(i: &str) -> IResult<&str, Node<'_>> { + let (i, (_, name)) = tuple((ws(tag("extends")), ws(expr_str_lit)))(i)?; + Ok((i, Node::Extends(name))) +} + +fn block_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { + let mut start = tuple(( + opt(expr_handle_ws), + ws(tag("block")), + cut(tuple((ws(identifier), opt(expr_handle_ws), |i| { + tag_block_end(i, s) + }))), + )); + let (i, (pws1, _, (name, nws1, _))) = start(i)?; + + let mut end = cut(tuple(( + |i| parse_template(i, s), + cut(tuple(( + |i| tag_block_start(i, s), + opt(expr_handle_ws), + ws(tag("endblock")), + cut(tuple((opt(ws(tag(name))), opt(expr_handle_ws)))), + ))), + ))); + let (i, (contents, (_, pws2, _, (_, nws2)))) = end(i)?; + + Ok(( + i, + Node::BlockDef(Ws(pws1, nws1), name, contents, Ws(pws2, nws2)), + )) +} + +fn block_include(i: &str) -> IResult<&str, Node<'_>> { + let mut p = tuple(( + opt(expr_handle_ws), + ws(tag("include")), + cut(pair(ws(str_lit), opt(expr_handle_ws))), + )); + let (i, (pws, _, (name, nws))) = p(i)?; + Ok((i, Node::Include(Ws(pws, nws), name))) +} + +fn block_import(i: &str) -> IResult<&str, Node<'_>> { + let mut p = tuple(( + opt(expr_handle_ws), + ws(tag("import")), + cut(tuple(( + ws(str_lit), + ws(tag("as")), + cut(pair(ws(identifier), opt(expr_handle_ws))), + ))), + )); + let (i, (pws, _, (name, _, (scope, nws)))) = p(i)?; + Ok((i, Node::Import(Ws(pws, nws), name, scope))) +} + +fn block_macro<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { + let mut start = tuple(( + opt(expr_handle_ws), + ws(tag("macro")), + cut(tuple(( + ws(identifier), + ws(parameters), + opt(expr_handle_ws), + |i| tag_block_end(i, s), + ))), + )); + let (i, (pws1, _, (name, params, nws1, _))) = start(i)?; + + let mut end = cut(tuple(( + |i| parse_template(i, s), + cut(tuple(( + |i| tag_block_start(i, s), + opt(expr_handle_ws), + ws(tag("endmacro")), + cut(tuple((opt(ws(tag(name))), opt(expr_handle_ws)))), + ))), + ))); + let (i, (contents, (_, pws2, _, (_, nws2)))) = end(i)?; + + assert_ne!(name, "super", "invalid macro name 'super'"); + + Ok(( + i, + Node::Macro( + name, + Macro { + ws1: Ws(pws1, nws1), + args: params, + nodes: contents, + ws2: Ws(pws2, nws2), + }, + ), + )) +} + +fn block_raw<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { + let endraw = tuple(( + |i| tag_block_start(i, s), + opt(expr_handle_ws), + ws(tag("endraw")), + opt(expr_handle_ws), + peek(|i| tag_block_end(i, s)), + )); + + let mut p = tuple(( + opt(expr_handle_ws), + ws(tag("raw")), + cut(tuple(( + opt(expr_handle_ws), + |i| tag_block_end(i, s), + consumed(skip_till(endraw)), + ))), + )); + + let (_, (pws1, _, (nws1, _, (contents, (i, (_, pws2, _, nws2, _)))))) = p(i)?; + let (lws, val, rws) = match split_ws_parts(contents) { + Node::Lit(lws, val, rws) => (lws, val, rws), + _ => unreachable!(), + }; + let ws1 = Ws(pws1, nws1); + let ws2 = Ws(pws2, nws2); + Ok((i, Node::Raw(ws1, lws, val, rws, ws2))) +} + +fn break_statement<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { + let mut p = tuple((opt(expr_handle_ws), ws(tag("break")), opt(expr_handle_ws))); + let (j, (pws, _, nws)) = p(i)?; + if s.loop_depth.get() == 0 { + return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag))); + } + Ok((j, Node::Break(Ws(pws, nws)))) +} + +fn continue_statement<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { + let mut p = tuple(( + opt(expr_handle_ws), + ws(tag("continue")), + opt(expr_handle_ws), + )); + let (j, (pws, _, nws)) = p(i)?; + if s.loop_depth.get() == 0 { + return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag))); + } + Ok((j, Node::Continue(Ws(pws, nws)))) +} + +fn block_node<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { + let mut p = tuple(( + |i| tag_block_start(i, s), + alt(( + block_call, + block_let, + |i| block_if(i, s), + |i| block_for(i, s), + |i| block_match(i, s), + block_extends, + block_include, + block_import, + |i| block_block(i, s), + |i| block_macro(i, s), + |i| block_raw(i, s), + |i| break_statement(i, s), + |i| continue_statement(i, s), + )), + cut(|i| tag_block_end(i, s)), + )); + let (i, (_, contents, _)) = p(i)?; + Ok((i, contents)) +} + +fn block_comment_body<'a>(mut i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { + let mut level = 0; + loop { + let (end, tail) = take_until(s.syntax.comment_end)(i)?; + match take_until::<_, _, Error<_>>(s.syntax.comment_start)(i) { + Ok((start, _)) if start.as_ptr() < end.as_ptr() => { + level += 1; + i = &start[2..]; + } + _ if level > 0 => { + level -= 1; + i = &end[2..]; + } + _ => return Ok((end, tail)), + } + } +} + +fn block_comment<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { + let mut p = tuple(( + |i| tag_comment_start(i, s), + cut(tuple(( + opt(expr_handle_ws), + |i| block_comment_body(i, s), + |i| tag_comment_end(i, s), + ))), + )); + let (i, (_, (pws, tail, _))) = p(i)?; + let nws = if tail.ends_with('-') { + Some(Whitespace::Suppress) + } else if tail.ends_with('+') { + Some(Whitespace::Preserve) + } else if tail.ends_with('~') { + Some(Whitespace::Minimize) + } else { + None + }; + Ok((i, Node::Comment(Ws(pws, nws)))) +} + +fn parse_template<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec>> { + many0(alt(( + complete(|i| take_content(i, s)), + complete(|i| block_comment(i, s)), + complete(|i| expr_node(i, s)), + complete(|i| block_node(i, s)), + )))(i) +} + +fn tag_block_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { + tag(s.syntax.block_start)(i) +} +fn tag_block_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { + tag(s.syntax.block_end)(i) +} +fn tag_comment_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { + tag(s.syntax.comment_start)(i) +} +fn tag_comment_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { + tag(s.syntax.comment_end)(i) +} +fn tag_expr_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { + tag(s.syntax.expr_start)(i) +} +fn tag_expr_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { + tag(s.syntax.expr_end)(i) +} + +pub(crate) fn parse<'a>( + src: &'a str, + syntax: &'a Syntax<'a>, +) -> Result>, CompileError> { + let state = State { + syntax, + loop_depth: Cell::new(0), + }; + match parse_template(src, &state) { + Ok((left, res)) => { + if !left.is_empty() { + Err(format!("unable to parse template:\n\n{:?}", left).into()) + } else { + Ok(res) + } + } + + Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => { + let nom::error::Error { input, .. } = err; + let offset = src.len() - input.len(); + let (source_before, source_after) = src.split_at(offset); + + let source_after = match source_after.char_indices().enumerate().take(41).last() { + Some((40, (i, _))) => format!("{:?}...", &source_after[..i]), + _ => format!("{:?}", source_after), + }; + + let (row, last_line) = source_before.lines().enumerate().last().unwrap(); + let column = last_line.chars().count(); + + let msg = format!( + "problems parsing template source at row {}, column {} near:\n{}", + row + 1, + column, + source_after, + ); + Err(msg.into()) + } + + Err(nom::Err::Incomplete(_)) => Err("parsing incomplete".into()), + } +} + +#[cfg(test)] +mod tests { + use super::{Expr, Node, Whitespace, Ws}; + use crate::config::Syntax; + + fn check_ws_split(s: &str, res: &(&str, &str, &str)) { + match super::split_ws_parts(s) { + Node::Lit(lws, s, rws) => { + assert_eq!(lws, res.0); + assert_eq!(s, res.1); + assert_eq!(rws, res.2); + } + _ => { + panic!("fail"); + } + } + } + + #[test] + fn test_ws_splitter() { + check_ws_split("", &("", "", "")); + check_ws_split("a", &("", "a", "")); + check_ws_split("\ta", &("\t", "a", "")); + check_ws_split("b\n", &("", "b", "\n")); + check_ws_split(" \t\r\n", &(" \t\r\n", "", "")); + } + + #[test] + #[should_panic] + fn test_invalid_block() { + super::parse("{% extend \"blah\" %}", &Syntax::default()).unwrap(); + } + + #[test] + fn test_parse_filter() { + use Expr::*; + let syntax = Syntax::default(); + assert_eq!( + super::parse("{{ strvar|e }}", &syntax).unwrap(), + vec![Node::Expr(Ws(None, None), Filter("e", vec![Var("strvar")]),)], + ); + assert_eq!( + super::parse("{{ 2|abs }}", &syntax).unwrap(), + vec![Node::Expr(Ws(None, None), Filter("abs", vec![NumLit("2")]),)], + ); + assert_eq!( + super::parse("{{ -2|abs }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + Filter("abs", vec![Unary("-", NumLit("2").into())]), + )], + ); + assert_eq!( + super::parse("{{ (1 - 2)|abs }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + Filter( + "abs", + vec![Group( + BinOp("-", NumLit("1").into(), NumLit("2").into()).into() + )] + ), + )], + ); + } + + #[test] + fn test_parse_numbers() { + let syntax = Syntax::default(); + assert_eq!( + super::parse("{{ 2 }}", &syntax).unwrap(), + vec![Node::Expr(Ws(None, None), Expr::NumLit("2"),)], + ); + assert_eq!( + super::parse("{{ 2.5 }}", &syntax).unwrap(), + vec![Node::Expr(Ws(None, None), Expr::NumLit("2.5"),)], + ); + } + + #[test] + fn test_parse_var() { + let s = Syntax::default(); + + assert_eq!( + super::parse("{{ foo }}", &s).unwrap(), + vec![Node::Expr(Ws(None, None), Expr::Var("foo"))], + ); + assert_eq!( + super::parse("{{ foo_bar }}", &s).unwrap(), + vec![Node::Expr(Ws(None, None), Expr::Var("foo_bar"))], + ); + + assert_eq!( + super::parse("{{ none }}", &s).unwrap(), + vec![Node::Expr(Ws(None, None), Expr::Var("none"))], + ); + } + + #[test] + fn test_parse_const() { + let s = Syntax::default(); + + assert_eq!( + super::parse("{{ FOO }}", &s).unwrap(), + vec![Node::Expr(Ws(None, None), Expr::Path(vec!["FOO"]))], + ); + assert_eq!( + super::parse("{{ FOO_BAR }}", &s).unwrap(), + vec![Node::Expr(Ws(None, None), Expr::Path(vec!["FOO_BAR"]))], + ); + + assert_eq!( + super::parse("{{ NONE }}", &s).unwrap(), + vec![Node::Expr(Ws(None, None), Expr::Path(vec!["NONE"]))], + ); + } + + #[test] + fn test_parse_path() { + let s = Syntax::default(); + + assert_eq!( + super::parse("{{ None }}", &s).unwrap(), + vec![Node::Expr(Ws(None, None), Expr::Path(vec!["None"]))], + ); + assert_eq!( + super::parse("{{ Some(123) }}", &s).unwrap(), + vec![Node::Expr( + Ws(None, None), + Expr::Call( + Box::new(Expr::Path(vec!["Some"])), + vec![Expr::NumLit("123")] + ), + )], + ); + + assert_eq!( + super::parse("{{ Ok(123) }}", &s).unwrap(), + vec![Node::Expr( + Ws(None, None), + Expr::Call(Box::new(Expr::Path(vec!["Ok"])), vec![Expr::NumLit("123")]), + )], + ); + assert_eq!( + super::parse("{{ Err(123) }}", &s).unwrap(), + vec![Node::Expr( + Ws(None, None), + Expr::Call(Box::new(Expr::Path(vec!["Err"])), vec![Expr::NumLit("123")]), + )], + ); + } + + #[test] + fn test_parse_var_call() { + assert_eq!( + super::parse("{{ function(\"123\", 3) }}", &Syntax::default()).unwrap(), + vec![Node::Expr( + Ws(None, None), + Expr::Call( + Box::new(Expr::Var("function")), + vec![Expr::StrLit("123"), Expr::NumLit("3")] + ), + )], + ); + } + + #[test] + fn test_parse_path_call() { + let s = Syntax::default(); + + assert_eq!( + super::parse("{{ Option::None }}", &s).unwrap(), + vec![Node::Expr( + Ws(None, None), + Expr::Path(vec!["Option", "None"]) + )], + ); + assert_eq!( + super::parse("{{ Option::Some(123) }}", &s).unwrap(), + vec![Node::Expr( + Ws(None, None), + Expr::Call( + Box::new(Expr::Path(vec!["Option", "Some"])), + vec![Expr::NumLit("123")], + ), + )], + ); + + assert_eq!( + super::parse("{{ self::function(\"123\", 3) }}", &s).unwrap(), + vec![Node::Expr( + Ws(None, None), + Expr::Call( + Box::new(Expr::Path(vec!["self", "function"])), + vec![Expr::StrLit("123"), Expr::NumLit("3")], + ), + )], + ); + } + + #[test] + fn test_parse_root_path() { + let syntax = Syntax::default(); + assert_eq!( + super::parse("{{ std::string::String::new() }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + Expr::Call( + Box::new(Expr::Path(vec!["std", "string", "String", "new"])), + vec![] + ), + )], + ); + assert_eq!( + super::parse("{{ ::std::string::String::new() }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + Expr::Call( + Box::new(Expr::Path(vec!["", "std", "string", "String", "new"])), + vec![] + ), + )], + ); + } + + #[test] + fn change_delimiters_parse_filter() { + let syntax = Syntax { + expr_start: "{=", + expr_end: "=}", + ..Syntax::default() + }; + + super::parse("{= strvar|e =}", &syntax).unwrap(); + } + + #[test] + fn test_precedence() { + use Expr::*; + let syntax = Syntax::default(); + assert_eq!( + super::parse("{{ a + b == c }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + BinOp( + "==", + BinOp("+", Var("a").into(), Var("b").into()).into(), + Var("c").into(), + ) + )], + ); + assert_eq!( + super::parse("{{ a + b * c - d / e }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + BinOp( + "-", + BinOp( + "+", + Var("a").into(), + BinOp("*", Var("b").into(), Var("c").into()).into(), + ) + .into(), + BinOp("/", Var("d").into(), Var("e").into()).into(), + ) + )], + ); + assert_eq!( + super::parse("{{ a * (b + c) / -d }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + BinOp( + "/", + BinOp( + "*", + Var("a").into(), + Group(BinOp("+", Var("b").into(), Var("c").into()).into()).into() + ) + .into(), + Unary("-", Var("d").into()).into() + ) + )], + ); + assert_eq!( + super::parse("{{ a || b && c || d && e }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + BinOp( + "||", + BinOp( + "||", + Var("a").into(), + BinOp("&&", Var("b").into(), Var("c").into()).into(), + ) + .into(), + BinOp("&&", Var("d").into(), Var("e").into()).into(), + ) + )], + ); + } + + #[test] + fn test_associativity() { + use Expr::*; + let syntax = Syntax::default(); + assert_eq!( + super::parse("{{ a + b + c }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + BinOp( + "+", + BinOp("+", Var("a").into(), Var("b").into()).into(), + Var("c").into() + ) + )], + ); + assert_eq!( + super::parse("{{ a * b * c }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + BinOp( + "*", + BinOp("*", Var("a").into(), Var("b").into()).into(), + Var("c").into() + ) + )], + ); + assert_eq!( + super::parse("{{ a && b && c }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + BinOp( + "&&", + BinOp("&&", Var("a").into(), Var("b").into()).into(), + Var("c").into() + ) + )], + ); + assert_eq!( + super::parse("{{ a + b - c + d }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + BinOp( + "+", + BinOp( + "-", + BinOp("+", Var("a").into(), Var("b").into()).into(), + Var("c").into() + ) + .into(), + Var("d").into() + ) + )], + ); + assert_eq!( + super::parse("{{ a == b != c > d > e == f }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + BinOp( + "==", + BinOp( + ">", + BinOp( + ">", + BinOp( + "!=", + BinOp("==", Var("a").into(), Var("b").into()).into(), + Var("c").into() + ) + .into(), + Var("d").into() + ) + .into(), + Var("e").into() + ) + .into(), + Var("f").into() + ) + )], + ); + } + + #[test] + fn test_odd_calls() { + use Expr::*; + let syntax = Syntax::default(); + assert_eq!( + super::parse("{{ a[b](c) }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + Call( + Box::new(Index(Box::new(Var("a")), Box::new(Var("b")))), + vec![Var("c")], + ), + )], + ); + assert_eq!( + super::parse("{{ (a + b)(c) }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + Call( + Box::new(Group(Box::new(BinOp( + "+", + Box::new(Var("a")), + Box::new(Var("b")) + )))), + vec![Var("c")], + ), + )], + ); + assert_eq!( + super::parse("{{ a + b(c) }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + BinOp( + "+", + Box::new(Var("a")), + Box::new(Call(Box::new(Var("b")), vec![Var("c")])), + ), + )], + ); + assert_eq!( + super::parse("{{ (-a)(b) }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + Call( + Box::new(Group(Box::new(Unary("-", Box::new(Var("a")))))), + vec![Var("b")], + ), + )], + ); + assert_eq!( + super::parse("{{ -a(b) }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + Unary("-", Box::new(Call(Box::new(Var("a")), vec![Var("b")])),), + )], + ); + } + + #[test] + fn test_parse_comments() { + let s = &Syntax::default(); + + assert_eq!( + super::parse("{##}", s).unwrap(), + vec![Node::Comment(Ws(None, None))], + ); + assert_eq!( + super::parse("{#- #}", s).unwrap(), + vec![Node::Comment(Ws(Some(Whitespace::Suppress), None))], + ); + assert_eq!( + super::parse("{# -#}", s).unwrap(), + vec![Node::Comment(Ws(None, Some(Whitespace::Suppress)))], + ); + assert_eq!( + super::parse("{#--#}", s).unwrap(), + vec![Node::Comment(Ws( + Some(Whitespace::Suppress), + Some(Whitespace::Suppress) + ))], + ); + assert_eq!( + super::parse("{#- foo\n bar -#}", s).unwrap(), + vec![Node::Comment(Ws( + Some(Whitespace::Suppress), + Some(Whitespace::Suppress) + ))], + ); + assert_eq!( + super::parse("{#- foo\n {#- bar\n -#} baz -#}", s).unwrap(), + vec![Node::Comment(Ws( + Some(Whitespace::Suppress), + Some(Whitespace::Suppress) + ))], + ); + assert_eq!( + super::parse("{#+ #}", s).unwrap(), + vec![Node::Comment(Ws(Some(Whitespace::Preserve), None))], + ); + assert_eq!( + super::parse("{# +#}", s).unwrap(), + vec![Node::Comment(Ws(None, Some(Whitespace::Preserve)))], + ); + assert_eq!( + super::parse("{#++#}", s).unwrap(), + vec![Node::Comment(Ws( + Some(Whitespace::Preserve), + Some(Whitespace::Preserve) + ))], + ); + assert_eq!( + super::parse("{#+ foo\n bar +#}", s).unwrap(), + vec![Node::Comment(Ws( + Some(Whitespace::Preserve), + Some(Whitespace::Preserve) + ))], + ); + assert_eq!( + super::parse("{#+ foo\n {#+ bar\n +#} baz -+#}", s).unwrap(), + vec![Node::Comment(Ws( + Some(Whitespace::Preserve), + Some(Whitespace::Preserve) + ))], + ); + assert_eq!( + super::parse("{#~ #}", s).unwrap(), + vec![Node::Comment(Ws(Some(Whitespace::Minimize), None))], + ); + assert_eq!( + super::parse("{# ~#}", s).unwrap(), + vec![Node::Comment(Ws(None, Some(Whitespace::Minimize)))], + ); + assert_eq!( + super::parse("{#~~#}", s).unwrap(), + vec![Node::Comment(Ws( + Some(Whitespace::Minimize), + Some(Whitespace::Minimize) + ))], + ); + assert_eq!( + super::parse("{#~ foo\n bar ~#}", s).unwrap(), + vec![Node::Comment(Ws( + Some(Whitespace::Minimize), + Some(Whitespace::Minimize) + ))], + ); + assert_eq!( + super::parse("{#~ foo\n {#~ bar\n ~#} baz -~#}", s).unwrap(), + vec![Node::Comment(Ws( + Some(Whitespace::Minimize), + Some(Whitespace::Minimize) + ))], + ); + + assert_eq!( + super::parse("{# foo {# bar #} {# {# baz #} qux #} #}", s).unwrap(), + vec![Node::Comment(Ws(None, None))], + ); + } + + #[test] + fn test_parse_tuple() { + use super::Expr::*; + let syntax = Syntax::default(); + assert_eq!( + super::parse("{{ () }}", &syntax).unwrap(), + vec![Node::Expr(Ws(None, None), Tuple(vec![]),)], + ); + assert_eq!( + super::parse("{{ (1) }}", &syntax).unwrap(), + vec![Node::Expr(Ws(None, None), Group(Box::new(NumLit("1"))),)], + ); + assert_eq!( + super::parse("{{ (1,) }}", &syntax).unwrap(), + vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)], + ); + assert_eq!( + super::parse("{{ (1, ) }}", &syntax).unwrap(), + vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)], + ); + assert_eq!( + super::parse("{{ (1 ,) }}", &syntax).unwrap(), + vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)], + ); + assert_eq!( + super::parse("{{ (1 , ) }}", &syntax).unwrap(), + vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)], + ); + assert_eq!( + super::parse("{{ (1, 2) }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + Tuple(vec![NumLit("1"), NumLit("2")]), + )], + ); + assert_eq!( + super::parse("{{ (1, 2,) }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + Tuple(vec![NumLit("1"), NumLit("2")]), + )], + ); + assert_eq!( + super::parse("{{ (1, 2, 3) }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + Tuple(vec![NumLit("1"), NumLit("2"), NumLit("3")]), + )], + ); + assert_eq!( + super::parse("{{ ()|abs }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + Filter("abs", vec![Tuple(vec![])]), + )], + ); + assert_eq!( + super::parse("{{ () | abs }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + BinOp("|", Box::new(Tuple(vec![])), Box::new(Var("abs"))), + )], + ); + assert_eq!( + super::parse("{{ (1)|abs }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + Filter("abs", vec![Group(Box::new(NumLit("1")))]), + )], + ); + assert_eq!( + super::parse("{{ (1) | abs }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + BinOp( + "|", + Box::new(Group(Box::new(NumLit("1")))), + Box::new(Var("abs")) + ), + )], + ); + assert_eq!( + super::parse("{{ (1,)|abs }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + Filter("abs", vec![Tuple(vec![NumLit("1")])]), + )], + ); + assert_eq!( + super::parse("{{ (1,) | abs }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + BinOp( + "|", + Box::new(Tuple(vec![NumLit("1")])), + Box::new(Var("abs")) + ), + )], + ); + assert_eq!( + super::parse("{{ (1, 2)|abs }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + Filter("abs", vec![Tuple(vec![NumLit("1"), NumLit("2")])]), + )], + ); + assert_eq!( + super::parse("{{ (1, 2) | abs }}", &syntax).unwrap(), + vec![Node::Expr( + Ws(None, None), + BinOp( + "|", + Box::new(Tuple(vec![NumLit("1"), NumLit("2")])), + Box::new(Var("abs")) + ), + )], + ); + } +} diff --git a/askama_derive/templates/a.html b/askama_derive/templates/a.html new file mode 100644 index 0000000..257cc56 --- /dev/null +++ b/askama_derive/templates/a.html @@ -0,0 +1 @@ +foo diff --git a/askama_derive/templates/b.html b/askama_derive/templates/b.html new file mode 100644 index 0000000..5716ca5 --- /dev/null +++ b/askama_derive/templates/b.html @@ -0,0 +1 @@ +bar diff --git a/askama_derive/templates/sub/b.html b/askama_derive/templates/sub/b.html new file mode 100644 index 0000000..5716ca5 --- /dev/null +++ b/askama_derive/templates/sub/b.html @@ -0,0 +1 @@ +bar diff --git a/askama_derive/templates/sub/c.html b/askama_derive/templates/sub/c.html new file mode 100644 index 0000000..7601807 --- /dev/null +++ b/askama_derive/templates/sub/c.html @@ -0,0 +1 @@ +baz diff --git a/askama_derive/templates/sub/sub1/d.html b/askama_derive/templates/sub/sub1/d.html new file mode 100644 index 0000000..fa11a6a --- /dev/null +++ b/askama_derive/templates/sub/sub1/d.html @@ -0,0 +1 @@ +echo diff --git a/askama_shared/src/config.rs b/askama_shared/src/config.rs deleted file mode 100644 index 01f81a2..0000000 --- a/askama_shared/src/config.rs +++ /dev/null @@ -1,536 +0,0 @@ -use std::collections::{BTreeMap, HashSet}; -use std::convert::TryFrom; -use std::path::{Path, PathBuf}; -use std::{env, fs}; - -#[cfg(feature = "serde")] -use serde::Deserialize; - -use crate::CompileError; - -#[derive(Debug)] -pub(crate) struct Config<'a> { - pub(crate) dirs: Vec, - pub(crate) syntaxes: BTreeMap>, - pub(crate) default_syntax: &'a str, - pub(crate) escapers: Vec<(HashSet, String)>, - pub(crate) whitespace: WhitespaceHandling, -} - -impl Config<'_> { - pub(crate) fn new(s: &str) -> std::result::Result, CompileError> { - let root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); - let default_dirs = vec![root.join("templates")]; - - let mut syntaxes = BTreeMap::new(); - syntaxes.insert(DEFAULT_SYNTAX_NAME.to_string(), Syntax::default()); - - let raw = if s.is_empty() { - RawConfig::default() - } else { - RawConfig::from_toml_str(s)? - }; - - let (dirs, default_syntax, whitespace) = match raw.general { - Some(General { - dirs, - default_syntax, - whitespace, - }) => ( - dirs.map_or(default_dirs, |v| { - v.into_iter().map(|dir| root.join(dir)).collect() - }), - default_syntax.unwrap_or(DEFAULT_SYNTAX_NAME), - whitespace, - ), - None => ( - default_dirs, - DEFAULT_SYNTAX_NAME, - WhitespaceHandling::default(), - ), - }; - - if let Some(raw_syntaxes) = raw.syntax { - for raw_s in raw_syntaxes { - let name = raw_s.name; - - if syntaxes - .insert(name.to_string(), Syntax::try_from(raw_s)?) - .is_some() - { - return Err(format!("syntax \"{}\" is already defined", name).into()); - } - } - } - - if !syntaxes.contains_key(default_syntax) { - return Err(format!("default syntax \"{}\" not found", default_syntax).into()); - } - - let mut escapers = Vec::new(); - if let Some(configured) = raw.escaper { - for escaper in configured { - escapers.push(( - escaper - .extensions - .iter() - .map(|ext| (*ext).to_string()) - .collect(), - escaper.path.to_string(), - )); - } - } - for (extensions, path) in DEFAULT_ESCAPERS { - escapers.push((str_set(extensions), (*path).to_string())); - } - - Ok(Config { - dirs, - syntaxes, - default_syntax, - escapers, - whitespace, - }) - } - - pub(crate) fn find_template( - &self, - path: &str, - start_at: Option<&Path>, - ) -> std::result::Result { - if let Some(root) = start_at { - let relative = root.with_file_name(path); - if relative.exists() { - return Ok(relative); - } - } - - for dir in &self.dirs { - let rooted = dir.join(path); - if rooted.exists() { - return Ok(rooted); - } - } - - Err(format!( - "template {:?} not found in directories {:?}", - path, self.dirs - ) - .into()) - } -} - -#[derive(Debug)] -pub(crate) struct Syntax<'a> { - pub(crate) block_start: &'a str, - pub(crate) block_end: &'a str, - pub(crate) expr_start: &'a str, - pub(crate) expr_end: &'a str, - pub(crate) comment_start: &'a str, - pub(crate) comment_end: &'a str, -} - -impl Default for Syntax<'_> { - fn default() -> Self { - Self { - block_start: "{%", - block_end: "%}", - expr_start: "{{", - expr_end: "}}", - comment_start: "{#", - comment_end: "#}", - } - } -} - -impl<'a> TryFrom> for Syntax<'a> { - type Error = CompileError; - - fn try_from(raw: RawSyntax<'a>) -> std::result::Result { - let default = Self::default(); - let syntax = Self { - block_start: raw.block_start.unwrap_or(default.block_start), - block_end: raw.block_end.unwrap_or(default.block_end), - expr_start: raw.expr_start.unwrap_or(default.expr_start), - expr_end: raw.expr_end.unwrap_or(default.expr_end), - comment_start: raw.comment_start.unwrap_or(default.comment_start), - comment_end: raw.comment_end.unwrap_or(default.comment_end), - }; - - if syntax.block_start.len() != 2 - || syntax.block_end.len() != 2 - || syntax.expr_start.len() != 2 - || syntax.expr_end.len() != 2 - || syntax.comment_start.len() != 2 - || syntax.comment_end.len() != 2 - { - return Err("length of delimiters must be two".into()); - } - - let bs = syntax.block_start.as_bytes()[0]; - let be = syntax.block_start.as_bytes()[1]; - let cs = syntax.comment_start.as_bytes()[0]; - let ce = syntax.comment_start.as_bytes()[1]; - let es = syntax.expr_start.as_bytes()[0]; - let ee = syntax.expr_start.as_bytes()[1]; - if !((bs == cs && bs == es) || (be == ce && be == ee)) { - return Err(format!("bad delimiters block_start: {}, comment_start: {}, expr_start: {}, needs one of the two characters in common", syntax.block_start, syntax.comment_start, syntax.expr_start).into()); - } - - Ok(syntax) - } -} - -#[cfg_attr(feature = "serde", derive(Deserialize))] -#[derive(Default)] -struct RawConfig<'d> { - #[cfg_attr(feature = "serde", serde(borrow))] - general: Option>, - syntax: Option>>, - escaper: Option>>, -} - -impl RawConfig<'_> { - #[cfg(feature = "config")] - fn from_toml_str(s: &str) -> std::result::Result, CompileError> { - toml::from_str(s).map_err(|e| format!("invalid TOML in {}: {}", CONFIG_FILE_NAME, e).into()) - } - - #[cfg(not(feature = "config"))] - fn from_toml_str(_: &str) -> std::result::Result, CompileError> { - Err("TOML support not available".into()) - } -} - -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -#[cfg_attr(feature = "serde", derive(Deserialize))] -#[cfg_attr(feature = "serde", serde(field_identifier, rename_all = "lowercase"))] -pub(crate) enum WhitespaceHandling { - /// The default behaviour. It will leave the whitespace characters "as is". - Preserve, - /// It'll remove all the whitespace characters before and after the jinja block. - Suppress, - /// It'll remove all the whitespace characters except one before and after the jinja blocks. - /// If there is a newline character, the preserved character in the trimmed characters, it will - /// the one preserved. - Minimize, -} - -impl Default for WhitespaceHandling { - fn default() -> Self { - WhitespaceHandling::Preserve - } -} - -#[cfg_attr(feature = "serde", derive(Deserialize))] -struct General<'a> { - #[cfg_attr(feature = "serde", serde(borrow))] - dirs: Option>, - default_syntax: Option<&'a str>, - #[cfg_attr(feature = "serde", serde(default))] - whitespace: WhitespaceHandling, -} - -#[cfg_attr(feature = "serde", derive(Deserialize))] -struct RawSyntax<'a> { - name: &'a str, - block_start: Option<&'a str>, - block_end: Option<&'a str>, - expr_start: Option<&'a str>, - expr_end: Option<&'a str>, - comment_start: Option<&'a str>, - comment_end: Option<&'a str>, -} - -#[cfg_attr(feature = "serde", derive(Deserialize))] -struct RawEscaper<'a> { - path: &'a str, - extensions: Vec<&'a str>, -} - -pub(crate) fn read_config_file( - config_path: &Option, -) -> std::result::Result { - let root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); - let filename = match config_path { - Some(config_path) => root.join(config_path), - None => root.join(CONFIG_FILE_NAME), - }; - - if filename.exists() { - fs::read_to_string(&filename) - .map_err(|_| format!("unable to read {:?}", filename.to_str().unwrap()).into()) - } else if config_path.is_some() { - Err(format!("`{}` does not exist", root.display()).into()) - } else { - Ok("".to_string()) - } -} - -fn str_set(vals: &[T]) -> HashSet -where - T: ToString, -{ - vals.iter().map(|s| s.to_string()).collect() -} - -#[allow(clippy::match_wild_err_arm)] -pub(crate) fn get_template_source(tpl_path: &Path) -> std::result::Result { - match fs::read_to_string(tpl_path) { - Err(_) => Err(format!( - "unable to open template file '{}'", - tpl_path.to_str().unwrap() - ) - .into()), - Ok(mut source) => { - if source.ends_with('\n') { - let _ = source.pop(); - } - Ok(source) - } - } -} - -static CONFIG_FILE_NAME: &str = "askama.toml"; -static DEFAULT_SYNTAX_NAME: &str = "default"; -static DEFAULT_ESCAPERS: &[(&[&str], &str)] = &[ - (&["html", "htm", "xml"], "::askama::Html"), - (&["md", "none", "txt", "yml", ""], "::askama::Text"), - (&["j2", "jinja", "jinja2"], "::askama::Html"), -]; - -#[cfg(test)] -#[allow(clippy::blacklisted_name)] -mod tests { - use std::env; - use std::path::{Path, PathBuf}; - - use super::*; - - #[test] - fn get_source() { - let path = Config::new("") - .and_then(|config| config.find_template("b.html", None)) - .unwrap(); - assert_eq!(get_template_source(&path).unwrap(), "bar"); - } - - #[test] - fn test_default_config() { - let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); - root.push("templates"); - let config = Config::new("").unwrap(); - assert_eq!(config.dirs, vec![root]); - } - - #[cfg(feature = "config")] - #[test] - fn test_config_dirs() { - let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); - root.push("tpl"); - let config = Config::new("[general]\ndirs = [\"tpl\"]").unwrap(); - assert_eq!(config.dirs, vec![root]); - } - - fn assert_eq_rooted(actual: &Path, expected: &str) { - let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); - root.push("templates"); - let mut inner = PathBuf::new(); - inner.push(expected); - assert_eq!(actual.strip_prefix(root).unwrap(), inner); - } - - #[test] - fn find_absolute() { - let config = Config::new("").unwrap(); - let root = config.find_template("a.html", None).unwrap(); - let path = config.find_template("sub/b.html", Some(&root)).unwrap(); - assert_eq_rooted(&path, "sub/b.html"); - } - - #[test] - #[should_panic] - fn find_relative_nonexistent() { - let config = Config::new("").unwrap(); - let root = config.find_template("a.html", None).unwrap(); - config.find_template("c.html", Some(&root)).unwrap(); - } - - #[test] - fn find_relative() { - let config = Config::new("").unwrap(); - let root = config.find_template("sub/b.html", None).unwrap(); - let path = config.find_template("c.html", Some(&root)).unwrap(); - assert_eq_rooted(&path, "sub/c.html"); - } - - #[test] - fn find_relative_sub() { - let config = Config::new("").unwrap(); - let root = config.find_template("sub/b.html", None).unwrap(); - let path = config.find_template("sub1/d.html", Some(&root)).unwrap(); - assert_eq_rooted(&path, "sub/sub1/d.html"); - } - - #[cfg(feature = "config")] - #[test] - fn add_syntax() { - let raw_config = r#" - [general] - default_syntax = "foo" - - [[syntax]] - name = "foo" - block_start = "{<" - - [[syntax]] - name = "bar" - expr_start = "{!" - "#; - - let default_syntax = Syntax::default(); - let config = Config::new(raw_config).unwrap(); - assert_eq!(config.default_syntax, "foo"); - - let foo = config.syntaxes.get("foo").unwrap(); - assert_eq!(foo.block_start, "{<"); - assert_eq!(foo.block_end, default_syntax.block_end); - assert_eq!(foo.expr_start, default_syntax.expr_start); - assert_eq!(foo.expr_end, default_syntax.expr_end); - assert_eq!(foo.comment_start, default_syntax.comment_start); - assert_eq!(foo.comment_end, default_syntax.comment_end); - - let bar = config.syntaxes.get("bar").unwrap(); - assert_eq!(bar.block_start, default_syntax.block_start); - assert_eq!(bar.block_end, default_syntax.block_end); - assert_eq!(bar.expr_start, "{!"); - assert_eq!(bar.expr_end, default_syntax.expr_end); - assert_eq!(bar.comment_start, default_syntax.comment_start); - assert_eq!(bar.comment_end, default_syntax.comment_end); - } - - #[cfg(feature = "config")] - #[test] - fn add_syntax_two() { - let raw_config = r#" - syntax = [{ name = "foo", block_start = "{<" }, - { name = "bar", expr_start = "{!" } ] - - [general] - default_syntax = "foo" - "#; - - let default_syntax = Syntax::default(); - let config = Config::new(raw_config).unwrap(); - assert_eq!(config.default_syntax, "foo"); - - let foo = config.syntaxes.get("foo").unwrap(); - assert_eq!(foo.block_start, "{<"); - assert_eq!(foo.block_end, default_syntax.block_end); - assert_eq!(foo.expr_start, default_syntax.expr_start); - assert_eq!(foo.expr_end, default_syntax.expr_end); - assert_eq!(foo.comment_start, default_syntax.comment_start); - assert_eq!(foo.comment_end, default_syntax.comment_end); - - let bar = config.syntaxes.get("bar").unwrap(); - assert_eq!(bar.block_start, default_syntax.block_start); - assert_eq!(bar.block_end, default_syntax.block_end); - assert_eq!(bar.expr_start, "{!"); - assert_eq!(bar.expr_end, default_syntax.expr_end); - assert_eq!(bar.comment_start, default_syntax.comment_start); - assert_eq!(bar.comment_end, default_syntax.comment_end); - } - - #[cfg(feature = "toml")] - #[should_panic] - #[test] - fn use_default_at_syntax_name() { - let raw_config = r#" - syntax = [{ name = "default" }] - "#; - - let _config = Config::new(raw_config).unwrap(); - } - - #[cfg(feature = "toml")] - #[should_panic] - #[test] - fn duplicated_syntax_name_on_list() { - let raw_config = r#" - syntax = [{ name = "foo", block_start = "~<" }, - { name = "foo", block_start = "%%" } ] - "#; - - let _config = Config::new(raw_config).unwrap(); - } - - #[cfg(feature = "toml")] - #[should_panic] - #[test] - fn is_not_exist_default_syntax() { - let raw_config = r#" - [general] - default_syntax = "foo" - "#; - - let _config = Config::new(raw_config).unwrap(); - } - - #[cfg(feature = "config")] - #[test] - fn escape_modes() { - let config = Config::new( - r#" - [[escaper]] - path = "::askama::Js" - extensions = ["js"] - "#, - ) - .unwrap(); - assert_eq!( - config.escapers, - vec![ - (str_set(&["js"]), "::askama::Js".into()), - (str_set(&["html", "htm", "xml"]), "::askama::Html".into()), - ( - str_set(&["md", "none", "txt", "yml", ""]), - "::askama::Text".into() - ), - (str_set(&["j2", "jinja", "jinja2"]), "::askama::Html".into()), - ] - ); - } - - #[test] - fn test_whitespace_parsing() { - let config = Config::new( - r#" - [general] - whitespace = "suppress" - "#, - ) - .unwrap(); - assert_eq!(config.whitespace, WhitespaceHandling::Suppress); - - let config = Config::new(r#""#).unwrap(); - assert_eq!(config.whitespace, WhitespaceHandling::Preserve); - - let config = Config::new( - r#" - [general] - whitespace = "preserve" - "#, - ) - .unwrap(); - assert_eq!(config.whitespace, WhitespaceHandling::Preserve); - - let config = Config::new( - r#" - [general] - whitespace = "minimize" - "#, - ) - .unwrap(); - assert_eq!(config.whitespace, WhitespaceHandling::Minimize); - } -} diff --git a/askama_shared/src/filters/mod.rs b/askama_shared/src/filters/mod.rs index 6437ce0..1782602 100644 --- a/askama_shared/src/filters/mod.rs +++ b/askama_shared/src/filters/mod.rs @@ -43,42 +43,6 @@ const URLENCODE_STRICT_SET: &AsciiSet = &NON_ALPHANUMERIC // Same as URLENCODE_STRICT_SET, but preserves forward slashes for encoding paths const URLENCODE_SET: &AsciiSet = &URLENCODE_STRICT_SET.remove(b'/'); -// This is used by the code generator to decide whether a named filter is part of -// Askama or should refer to a local `filters` module. It should contain all the -// filters shipped with Askama, even the optional ones (since optional inclusion -// in the const vector based on features seems impossible right now). -pub const BUILT_IN_FILTERS: &[&str] = &[ - "abs", - "capitalize", - "center", - "e", - "escape", - "filesizeformat", - "fmt", - "format", - "indent", - "into_f64", - "into_isize", - "join", - "linebreaks", - "linebreaksbr", - "paragraphbreaks", - "lower", - "lowercase", - "safe", - "trim", - "truncate", - "upper", - "uppercase", - "urlencode", - "urlencode_strict", - "wordcount", - // optional features, reserve the names anyway: - "json", - "markdown", - "yaml", -]; - /// Marks a string (or other `Display` type) as safe /// /// Use this is you want to allow markup in an expression, or if you know diff --git a/askama_shared/src/generator.rs b/askama_shared/src/generator.rs deleted file mode 100644 index ea95bd3..0000000 --- a/askama_shared/src/generator.rs +++ /dev/null @@ -1,2142 +0,0 @@ -use crate::config::{get_template_source, read_config_file, Config, WhitespaceHandling}; -use crate::heritage::{Context, Heritage}; -use crate::input::{Print, Source, TemplateInput}; -use crate::parser::{parse, Cond, CondTest, Expr, Loop, Node, Target, When, Whitespace, Ws}; -use crate::{filters, CompileError}; - -use proc_macro2::TokenStream; -use quote::{quote, ToTokens}; - -use std::collections::HashMap; -use std::path::{Path, PathBuf}; -use std::{cmp, hash, mem, str}; - -/// The actual implementation for askama_derive::Template -#[doc(hidden)] -pub fn derive_template(input: TokenStream) -> TokenStream { - let ast: syn::DeriveInput = syn::parse2(input).unwrap(); - match build_template(&ast) { - Ok(source) => source.parse().unwrap(), - Err(e) => e.into_compile_error(), - } -} - -/// Takes a `syn::DeriveInput` and generates source code for it -/// -/// Reads the metadata from the `template()` attribute to get the template -/// metadata, then fetches the source from the filesystem. The source is -/// parsed, and the parse tree is fed to the code generator. Will print -/// the parse tree and/or generated source according to the `print` key's -/// value as passed to the `template()` attribute. -fn build_template(ast: &syn::DeriveInput) -> Result { - let template_args = TemplateArgs::new(ast)?; - let config_toml = read_config_file(&template_args.config_path)?; - let config = Config::new(&config_toml)?; - let input = TemplateInput::new(ast, &config, template_args)?; - let source: String = match input.source { - Source::Source(ref s) => s.clone(), - Source::Path(_) => get_template_source(&input.path)?, - }; - - let mut sources = HashMap::new(); - find_used_templates(&input, &mut sources, source)?; - - let mut parsed = HashMap::new(); - for (path, src) in &sources { - parsed.insert(path.as_path(), parse(src, input.syntax)?); - } - - let mut contexts = HashMap::new(); - for (path, nodes) in &parsed { - contexts.insert(*path, Context::new(input.config, path, nodes)?); - } - - let ctx = &contexts[input.path.as_path()]; - let heritage = if !ctx.blocks.is_empty() || ctx.extends.is_some() { - Some(Heritage::new(ctx, &contexts)) - } else { - None - }; - - if input.print == Print::Ast || input.print == Print::All { - eprintln!("{:?}", parsed[input.path.as_path()]); - } - - let code = Generator::new( - &input, - &contexts, - heritage.as_ref(), - MapChain::new(), - config.whitespace, - ) - .build(&contexts[input.path.as_path()])?; - if input.print == Print::Code || input.print == Print::All { - eprintln!("{}", code); - } - Ok(code) -} - -#[derive(Default)] -pub(crate) struct TemplateArgs { - pub(crate) source: Option, - pub(crate) print: Print, - pub(crate) escaping: Option, - pub(crate) ext: Option, - pub(crate) syntax: Option, - pub(crate) config_path: Option, -} - -impl TemplateArgs { - fn new(ast: &'_ syn::DeriveInput) -> Result { - // Check that an attribute called `template()` exists once and that it is - // the proper type (list). - let mut template_args = None; - for attr in &ast.attrs { - let ident = match attr.path.get_ident() { - Some(ident) => ident, - None => continue, - }; - - if ident == "template" { - if template_args.is_some() { - return Err("duplicated 'template' attribute".into()); - } - - match attr.parse_meta() { - Ok(syn::Meta::List(syn::MetaList { nested, .. })) => { - template_args = Some(nested); - } - Ok(_) => return Err("'template' attribute must be a list".into()), - Err(e) => return Err(format!("unable to parse attribute: {}", e).into()), - } - } - } - let template_args = - template_args.ok_or_else(|| CompileError::from("no attribute 'template' found"))?; - - let mut args = Self::default(); - // Loop over the meta attributes and find everything that we - // understand. Return a CompileError if something is not right. - // `source` contains an enum that can represent `path` or `source`. - for item in template_args { - let pair = match item { - syn::NestedMeta::Meta(syn::Meta::NameValue(ref pair)) => pair, - _ => { - return Err(format!( - "unsupported attribute argument {:?}", - item.to_token_stream() - ) - .into()) - } - }; - let ident = match pair.path.get_ident() { - Some(ident) => ident, - None => unreachable!("not possible in syn::Meta::NameValue(…)"), - }; - - if ident == "path" { - if let syn::Lit::Str(ref s) = pair.lit { - if args.source.is_some() { - return Err("must specify 'source' or 'path', not both".into()); - } - args.source = Some(Source::Path(s.value())); - } else { - return Err("template path must be string literal".into()); - } - } else if ident == "source" { - if let syn::Lit::Str(ref s) = pair.lit { - if args.source.is_some() { - return Err("must specify 'source' or 'path', not both".into()); - } - args.source = Some(Source::Source(s.value())); - } else { - return Err("template source must be string literal".into()); - } - } else if ident == "print" { - if let syn::Lit::Str(ref s) = pair.lit { - args.print = s.value().parse()?; - } else { - return Err("print value must be string literal".into()); - } - } else if ident == "escape" { - if let syn::Lit::Str(ref s) = pair.lit { - args.escaping = Some(s.value()); - } else { - return Err("escape value must be string literal".into()); - } - } else if ident == "ext" { - if let syn::Lit::Str(ref s) = pair.lit { - args.ext = Some(s.value()); - } else { - return Err("ext value must be string literal".into()); - } - } else if ident == "syntax" { - if let syn::Lit::Str(ref s) = pair.lit { - args.syntax = Some(s.value()) - } else { - return Err("syntax value must be string literal".into()); - } - } else if ident == "config" { - if let syn::Lit::Str(ref s) = pair.lit { - args.config_path = Some(s.value()) - } else { - return Err("config value must be string literal".into()); - } - } else { - return Err(format!("unsupported attribute key {:?} found", ident).into()); - } - } - - Ok(args) - } -} - -fn find_used_templates( - input: &TemplateInput<'_>, - map: &mut HashMap, - source: String, -) -> Result<(), CompileError> { - let mut dependency_graph = Vec::new(); - let mut check = vec![(input.path.clone(), source)]; - while let Some((path, source)) = check.pop() { - for n in parse(&source, input.syntax)? { - match n { - Node::Extends(Expr::StrLit(extends)) => { - let extends = input.config.find_template(extends, Some(&path))?; - let dependency_path = (path.clone(), extends.clone()); - if dependency_graph.contains(&dependency_path) { - return Err(format!( - "cyclic dependecy in graph {:#?}", - dependency_graph - .iter() - .map(|e| format!("{:#?} --> {:#?}", e.0, e.1)) - .collect::>() - ) - .into()); - } - dependency_graph.push(dependency_path); - let source = get_template_source(&extends)?; - check.push((extends, source)); - } - Node::Import(_, import, _) => { - let import = input.config.find_template(import, Some(&path))?; - let source = get_template_source(&import)?; - check.push((import, source)); - } - _ => {} - } - } - map.insert(path, source); - } - Ok(()) -} -struct Generator<'a, S: std::hash::BuildHasher> { - // The template input state: original struct AST and attributes - input: &'a TemplateInput<'a>, - // All contexts, keyed by the package-relative template path - contexts: &'a HashMap<&'a Path, Context<'a>, S>, - // The heritage contains references to blocks and their ancestry - heritage: Option<&'a Heritage<'a>>, - // Variables accessible directly from the current scope (not redirected to context) - locals: MapChain<'a, &'a str, LocalMeta>, - // Suffix whitespace from the previous literal. Will be flushed to the - // output buffer unless suppressed by whitespace suppression on the next - // non-literal. - next_ws: Option<&'a str>, - // Whitespace suppression from the previous non-literal. Will be used to - // determine whether to flush prefix whitespace from the next literal. - skip_ws: WhitespaceHandling, - // If currently in a block, this will contain the name of a potential parent block - super_block: Option<(&'a str, usize)>, - // buffer for writable - buf_writable: Vec>, - // Counter for write! hash named arguments - named: usize, - // If set to `suppress`, the whitespace characters will be removed by default unless `+` is - // used. - whitespace: WhitespaceHandling, -} - -impl<'a, S: std::hash::BuildHasher> Generator<'a, S> { - fn new<'n>( - input: &'n TemplateInput<'_>, - contexts: &'n HashMap<&'n Path, Context<'n>, S>, - heritage: Option<&'n Heritage<'_>>, - locals: MapChain<'n, &'n str, LocalMeta>, - whitespace: WhitespaceHandling, - ) -> Generator<'n, S> { - Generator { - input, - contexts, - heritage, - locals, - next_ws: None, - skip_ws: WhitespaceHandling::Preserve, - super_block: None, - buf_writable: vec![], - named: 0, - whitespace, - } - } - - fn child(&mut self) -> Generator<'_, S> { - let locals = MapChain::with_parent(&self.locals); - Self::new( - self.input, - self.contexts, - self.heritage, - locals, - self.whitespace, - ) - } - - // Takes a Context and generates the relevant implementations. - fn build(mut self, ctx: &'a Context<'_>) -> Result { - let mut buf = Buffer::new(0); - if !ctx.blocks.is_empty() { - if let Some(parent) = self.input.parent { - self.deref_to_parent(&mut buf, parent)?; - } - }; - - self.impl_template(ctx, &mut buf)?; - self.impl_display(&mut buf)?; - - #[cfg(feature = "actix-web")] - self.impl_actix_web_responder(&mut buf)?; - #[cfg(feature = "axum")] - self.impl_axum_into_response(&mut buf)?; - #[cfg(feature = "gotham")] - self.impl_gotham_into_response(&mut buf)?; - #[cfg(feature = "mendes")] - self.impl_mendes_responder(&mut buf)?; - #[cfg(feature = "rocket")] - self.impl_rocket_responder(&mut buf)?; - #[cfg(feature = "tide")] - self.impl_tide_integrations(&mut buf)?; - #[cfg(feature = "warp")] - self.impl_warp_reply(&mut buf)?; - - Ok(buf.buf) - } - - // Implement `Template` for the given context struct. - fn impl_template( - &mut self, - ctx: &'a Context<'_>, - buf: &mut Buffer, - ) -> Result<(), CompileError> { - self.write_header(buf, "::askama::Template", None)?; - buf.writeln( - "fn render_into(&self, writer: &mut (impl ::std::fmt::Write + ?Sized)) -> \ - ::askama::Result<()> {", - )?; - - // Make sure the compiler understands that the generated code depends on the template files. - for path in self.contexts.keys() { - // Skip the fake path of templates defined in rust source. - let path_is_valid = match self.input.source { - Source::Path(_) => true, - Source::Source(_) => path != &self.input.path, - }; - if path_is_valid { - let path = path.to_str().unwrap(); - buf.writeln( - "e! { - include_bytes!(#path); - } - .to_string(), - )?; - } - } - - let size_hint = if let Some(heritage) = self.heritage { - self.handle(heritage.root, heritage.root.nodes, buf, AstLevel::Top) - } else { - self.handle(ctx, ctx.nodes, buf, AstLevel::Top) - }?; - - self.flush_ws(Ws(None, None)); - buf.writeln("::askama::Result::Ok(())")?; - buf.writeln("}")?; - - buf.writeln("const EXTENSION: ::std::option::Option<&'static ::std::primitive::str> = ")?; - buf.writeln(&format!("{:?}", self.input.extension()))?; - buf.writeln(";")?; - - buf.writeln("const SIZE_HINT: ::std::primitive::usize = ")?; - buf.writeln(&format!("{}", size_hint))?; - buf.writeln(";")?; - - buf.writeln("const MIME_TYPE: &'static ::std::primitive::str = ")?; - buf.writeln(&format!("{:?}", &self.input.mime_type))?; - buf.writeln(";")?; - - buf.writeln("}")?; - Ok(()) - } - - // Implement `Deref` for an inheriting context struct. - fn deref_to_parent( - &mut self, - buf: &mut Buffer, - parent_type: &syn::Type, - ) -> Result<(), CompileError> { - self.write_header(buf, "::std::ops::Deref", None)?; - buf.writeln(&format!( - "type Target = {};", - parent_type.into_token_stream() - ))?; - buf.writeln("#[inline]")?; - buf.writeln("fn deref(&self) -> &Self::Target {")?; - buf.writeln("&self._parent")?; - buf.writeln("}")?; - buf.writeln("}") - } - - // Implement `Display` for the given context struct. - fn impl_display(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { - self.write_header(buf, "::std::fmt::Display", None)?; - buf.writeln("#[inline]")?; - buf.writeln("fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {")?; - buf.writeln("::askama::Template::render_into(self, f).map_err(|_| ::std::fmt::Error {})")?; - buf.writeln("}")?; - buf.writeln("}") - } - - // Implement Actix-web's `Responder`. - #[cfg(feature = "actix-web")] - fn impl_actix_web_responder(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { - self.write_header(buf, "::askama_actix::actix_web::Responder", None)?; - buf.writeln("type Body = ::askama_actix::actix_web::body::BoxBody;")?; - buf.writeln("#[inline]")?; - buf.writeln( - "fn respond_to(self, _req: &::askama_actix::actix_web::HttpRequest) \ - -> ::askama_actix::actix_web::HttpResponse {", - )?; - buf.writeln("::to_response(&self)")?; - buf.writeln("}")?; - buf.writeln("}") - } - - // Implement Axum's `IntoResponse`. - #[cfg(feature = "axum")] - fn impl_axum_into_response(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { - self.write_header(buf, "::askama_axum::IntoResponse", None)?; - buf.writeln("#[inline]")?; - buf.writeln( - "fn into_response(self)\ - -> ::askama_axum::Response {", - )?; - let ext = self.input.extension().unwrap_or("txt"); - buf.writeln(&format!("::askama_axum::into_response(&self, {:?})", ext))?; - buf.writeln("}")?; - buf.writeln("}") - } - - // Implement gotham's `IntoResponse`. - #[cfg(feature = "gotham")] - fn impl_gotham_into_response(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { - self.write_header(buf, "::askama_gotham::IntoResponse", None)?; - buf.writeln("#[inline]")?; - buf.writeln( - "fn into_response(self, _state: &::askama_gotham::State)\ - -> ::askama_gotham::Response<::askama_gotham::Body> {", - )?; - let ext = self.input.extension().unwrap_or("txt"); - buf.writeln(&format!("::askama_gotham::respond(&self, {:?})", ext))?; - buf.writeln("}")?; - buf.writeln("}") - } - - // Implement mendes' `Responder`. - #[cfg(feature = "mendes")] - fn impl_mendes_responder(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { - let param = syn::parse_str("A: ::mendes::Application").unwrap(); - - let mut generics = self.input.ast.generics.clone(); - generics.params.push(param); - let (_, orig_ty_generics, _) = self.input.ast.generics.split_for_impl(); - let (impl_generics, _, where_clause) = generics.split_for_impl(); - - let mut where_clause = match where_clause { - Some(clause) => clause.clone(), - None => syn::WhereClause { - where_token: syn::Token![where](proc_macro2::Span::call_site()), - predicates: syn::punctuated::Punctuated::new(), - }, - }; - - where_clause - .predicates - .push(syn::parse_str("A::ResponseBody: From").unwrap()); - where_clause - .predicates - .push(syn::parse_str("A::Error: From<::askama_mendes::Error>").unwrap()); - - buf.writeln( - format!( - "{} {} for {} {} {{", - quote!(impl#impl_generics), - "::mendes::application::IntoResponse", - self.input.ast.ident, - quote!(#orig_ty_generics #where_clause), - ) - .as_ref(), - )?; - - buf.writeln( - "fn into_response(self, app: &A, req: &::mendes::http::request::Parts) \ - -> ::mendes::http::Response {", - )?; - - buf.writeln(&format!( - "::askama_mendes::into_response(app, req, &self, {:?})", - self.input.extension() - ))?; - buf.writeln("}")?; - buf.writeln("}")?; - Ok(()) - } - - // Implement Rocket's `Responder`. - #[cfg(feature = "rocket")] - fn impl_rocket_responder(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { - let lifetime = syn::Lifetime::new("'askama", proc_macro2::Span::call_site()); - let param = syn::GenericParam::Lifetime(syn::LifetimeDef::new(lifetime)); - self.write_header( - buf, - "::askama_rocket::Responder<'askama>", - Some(vec![param]), - )?; - - buf.writeln("#[inline]")?; - buf.writeln( - "fn respond_to(self, _: &::askama_rocket::Request) \ - -> ::askama_rocket::Result<'askama> {", - )?; - let ext = self.input.extension().unwrap_or("txt"); - buf.writeln(&format!("::askama_rocket::respond(&self, {:?})", ext))?; - - buf.writeln("}")?; - buf.writeln("}")?; - Ok(()) - } - - #[cfg(feature = "tide")] - fn impl_tide_integrations(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { - let ext = self.input.extension().unwrap_or("txt"); - - self.write_header( - buf, - "::std::convert::TryInto<::askama_tide::tide::Body>", - None, - )?; - buf.writeln( - "type Error = ::askama_tide::askama::Error;\n\ - #[inline]\n\ - fn try_into(self) -> ::askama_tide::askama::Result<::askama_tide::tide::Body> {", - )?; - buf.writeln(&format!("::askama_tide::try_into_body(&self, {:?})", &ext))?; - buf.writeln("}")?; - buf.writeln("}")?; - - buf.writeln("#[allow(clippy::from_over_into)]")?; - self.write_header(buf, "Into<::askama_tide::tide::Response>", None)?; - buf.writeln("#[inline]")?; - buf.writeln("fn into(self) -> ::askama_tide::tide::Response {")?; - buf.writeln(&format!("::askama_tide::into_response(&self, {:?})", ext))?; - buf.writeln("}\n}") - } - - #[cfg(feature = "warp")] - fn impl_warp_reply(&mut self, buf: &mut Buffer) -> Result<(), CompileError> { - self.write_header(buf, "::askama_warp::warp::reply::Reply", None)?; - buf.writeln("#[inline]")?; - buf.writeln("fn into_response(self) -> ::askama_warp::warp::reply::Response {")?; - let ext = self.input.extension().unwrap_or("txt"); - buf.writeln(&format!("::askama_warp::reply(&self, {:?})", ext))?; - buf.writeln("}")?; - buf.writeln("}") - } - - // Writes header for the `impl` for `TraitFromPathName` or `Template` - // for the given context struct. - fn write_header( - &mut self, - buf: &mut Buffer, - target: &str, - params: Option>, - ) -> Result<(), CompileError> { - let mut generics = self.input.ast.generics.clone(); - if let Some(params) = params { - for param in params { - generics.params.push(param); - } - } - let (_, orig_ty_generics, _) = self.input.ast.generics.split_for_impl(); - let (impl_generics, _, where_clause) = generics.split_for_impl(); - buf.writeln( - format!( - "{} {} for {}{} {{", - quote!(impl#impl_generics), - target, - self.input.ast.ident, - quote!(#orig_ty_generics #where_clause), - ) - .as_ref(), - ) - } - - /* Helper methods for handling node types */ - - fn handle( - &mut self, - ctx: &'a Context<'_>, - nodes: &'a [Node<'_>], - buf: &mut Buffer, - level: AstLevel, - ) -> Result { - let mut size_hint = 0; - for n in nodes { - match *n { - Node::Lit(lws, val, rws) => { - self.visit_lit(lws, val, rws); - } - Node::Comment(ws) => { - self.write_comment(ws); - } - Node::Expr(ws, ref val) => { - self.write_expr(ws, val); - } - Node::LetDecl(ws, ref var) => { - self.write_let_decl(buf, ws, var)?; - } - Node::Let(ws, ref var, ref val) => { - self.write_let(buf, ws, var, val)?; - } - Node::Cond(ref conds, ws) => { - self.write_cond(ctx, buf, conds, ws)?; - } - Node::Match(ws1, ref expr, ref arms, ws2) => { - self.write_match(ctx, buf, ws1, expr, arms, ws2)?; - } - Node::Loop(ref loop_block) => { - self.write_loop(ctx, buf, loop_block)?; - } - Node::BlockDef(ws1, name, _, ws2) => { - self.write_block(buf, Some(name), Ws(ws1.0, ws2.1))?; - } - Node::Include(ws, path) => { - size_hint += self.handle_include(ctx, buf, ws, path)?; - } - Node::Call(ws, scope, name, ref args) => { - size_hint += self.write_call(ctx, buf, ws, scope, name, args)?; - } - Node::Macro(_, ref m) => { - if level != AstLevel::Top { - return Err("macro blocks only allowed at the top level".into()); - } - self.flush_ws(m.ws1); - self.prepare_ws(m.ws2); - } - Node::Raw(ws1, lws, val, rws, ws2) => { - self.handle_ws(ws1); - self.visit_lit(lws, val, rws); - self.handle_ws(ws2); - } - Node::Import(ws, _, _) => { - if level != AstLevel::Top { - return Err("import blocks only allowed at the top level".into()); - } - self.handle_ws(ws); - } - Node::Extends(_) => { - if level != AstLevel::Top { - return Err("extend blocks only allowed at the top level".into()); - } - // No whitespace handling: child template top-level is not used, - // except for the blocks defined in it. - } - Node::Break(ws) => { - self.handle_ws(ws); - self.write_buf_writable(buf)?; - buf.writeln("break;")?; - } - Node::Continue(ws) => { - self.handle_ws(ws); - self.write_buf_writable(buf)?; - buf.writeln("continue;")?; - } - } - } - - if AstLevel::Top == level { - size_hint += self.write_buf_writable(buf)?; - } - Ok(size_hint) - } - - fn write_cond( - &mut self, - ctx: &'a Context<'_>, - buf: &mut Buffer, - conds: &'a [Cond<'_>], - ws: Ws, - ) -> Result { - let mut flushed = 0; - let mut arm_sizes = Vec::new(); - let mut has_else = false; - for (i, &(cws, ref cond, ref nodes)) in conds.iter().enumerate() { - self.handle_ws(cws); - flushed += self.write_buf_writable(buf)?; - if i > 0 { - self.locals.pop(); - } - - self.locals.push(); - let mut arm_size = 0; - if let Some(CondTest { target, expr }) = cond { - if i == 0 { - buf.write("if "); - } else { - buf.dedent()?; - buf.write("} else if "); - } - - if let Some(target) = target { - let mut expr_buf = Buffer::new(0); - self.visit_expr(&mut expr_buf, expr)?; - buf.write("let "); - self.visit_target(buf, true, true, target); - buf.write(" = &("); - buf.write(&expr_buf.buf); - buf.write(")"); - } else { - // The following syntax `*(&(...) as &bool)` is used to - // trigger Rust's automatic dereferencing, to coerce - // e.g. `&&&&&bool` to `bool`. First `&(...) as &bool` - // coerces e.g. `&&&bool` to `&bool`. Then `*(&bool)` - // finally dereferences it to `bool`. - buf.write("*(&("); - let expr_code = self.visit_expr_root(expr)?; - buf.write(&expr_code); - buf.write(") as &bool)"); - } - } else { - buf.dedent()?; - buf.write("} else"); - has_else = true; - } - - buf.writeln(" {")?; - - arm_size += self.handle(ctx, nodes, buf, AstLevel::Nested)?; - arm_sizes.push(arm_size); - } - self.handle_ws(ws); - flushed += self.write_buf_writable(buf)?; - buf.writeln("}")?; - - self.locals.pop(); - - if !has_else { - arm_sizes.push(0); - } - Ok(flushed + median(&mut arm_sizes)) - } - - #[allow(clippy::too_many_arguments)] - fn write_match( - &mut self, - ctx: &'a Context<'_>, - buf: &mut Buffer, - ws1: Ws, - expr: &Expr<'_>, - arms: &'a [When<'_>], - ws2: Ws, - ) -> Result { - self.flush_ws(ws1); - let flushed = self.write_buf_writable(buf)?; - let mut arm_sizes = Vec::new(); - - let expr_code = self.visit_expr_root(expr)?; - buf.writeln(&format!("match &{} {{", expr_code))?; - - let mut arm_size = 0; - for (i, arm) in arms.iter().enumerate() { - let &(ws, ref target, ref body) = arm; - self.handle_ws(ws); - - if i > 0 { - arm_sizes.push(arm_size + self.write_buf_writable(buf)?); - - buf.writeln("}")?; - self.locals.pop(); - } - - self.locals.push(); - self.visit_target(buf, true, true, target); - buf.writeln(" => {")?; - - arm_size = self.handle(ctx, body, buf, AstLevel::Nested)?; - } - - self.handle_ws(ws2); - arm_sizes.push(arm_size + self.write_buf_writable(buf)?); - buf.writeln("}")?; - self.locals.pop(); - - buf.writeln("}")?; - - Ok(flushed + median(&mut arm_sizes)) - } - - #[allow(clippy::too_many_arguments)] - fn write_loop( - &mut self, - ctx: &'a Context<'_>, - buf: &mut Buffer, - loop_block: &'a Loop<'_>, - ) -> Result { - self.handle_ws(loop_block.ws1); - self.locals.push(); - - let expr_code = self.visit_expr_root(&loop_block.iter)?; - - let flushed = self.write_buf_writable(buf)?; - buf.writeln("{")?; - buf.writeln("let mut _did_loop = false;")?; - match loop_block.iter { - Expr::Range(_, _, _) => buf.writeln(&format!("let _iter = {};", expr_code)), - Expr::Array(..) => buf.writeln(&format!("let _iter = {}.iter();", expr_code)), - // If `iter` is a call then we assume it's something that returns - // an iterator. If not then the user can explicitly add the needed - // call without issues. - Expr::Call(..) | Expr::Index(..) => { - buf.writeln(&format!("let _iter = ({}).into_iter();", expr_code)) - } - // If accessing `self` then it most likely needs to be - // borrowed, to prevent an attempt of moving. - _ if expr_code.starts_with("self.") => { - buf.writeln(&format!("let _iter = (&{}).into_iter();", expr_code)) - } - // If accessing a field then it most likely needs to be - // borrowed, to prevent an attempt of moving. - Expr::Attr(..) => buf.writeln(&format!("let _iter = (&{}).into_iter();", expr_code)), - // Otherwise, we borrow `iter` assuming that it implements `IntoIterator`. - _ => buf.writeln(&format!("let _iter = ({}).into_iter();", expr_code)), - }?; - if let Some(cond) = &loop_block.cond { - self.locals.push(); - buf.write("let _iter = _iter.filter(|"); - self.visit_target(buf, true, true, &loop_block.var); - buf.write("| -> bool {"); - self.visit_expr(buf, cond)?; - buf.writeln("});")?; - self.locals.pop(); - } - - self.locals.push(); - buf.write("for ("); - self.visit_target(buf, true, true, &loop_block.var); - buf.writeln(", _loop_item) in ::askama::helpers::TemplateLoop::new(_iter) {")?; - - buf.writeln("_did_loop = true;")?; - let mut size_hint1 = self.handle(ctx, &loop_block.body, buf, AstLevel::Nested)?; - self.handle_ws(loop_block.ws2); - size_hint1 += self.write_buf_writable(buf)?; - self.locals.pop(); - buf.writeln("}")?; - - buf.writeln("if !_did_loop {")?; - self.locals.push(); - let mut size_hint2 = self.handle(ctx, &loop_block.else_block, buf, AstLevel::Nested)?; - self.handle_ws(loop_block.ws3); - size_hint2 += self.write_buf_writable(buf)?; - self.locals.pop(); - buf.writeln("}")?; - - buf.writeln("}")?; - - Ok(flushed + ((size_hint1 * 3) + size_hint2) / 2) - } - - fn write_call( - &mut self, - ctx: &'a Context<'_>, - buf: &mut Buffer, - ws: Ws, - scope: Option<&str>, - name: &str, - args: &[Expr<'_>], - ) -> Result { - if name == "super" { - return self.write_block(buf, None, ws); - } - - let (def, own_ctx) = match scope { - Some(s) => { - let path = ctx.imports.get(s).ok_or_else(|| { - CompileError::from(format!("no import found for scope {:?}", s)) - })?; - let mctx = self.contexts.get(path.as_path()).ok_or_else(|| { - CompileError::from(format!("context for {:?} not found", path)) - })?; - let def = mctx.macros.get(name).ok_or_else(|| { - CompileError::from(format!("macro {:?} not found in scope {:?}", name, s)) - })?; - (def, mctx) - } - None => { - let def = ctx - .macros - .get(name) - .ok_or_else(|| CompileError::from(format!("macro {:?} not found", name)))?; - (def, ctx) - } - }; - - self.flush_ws(ws); // Cannot handle_ws() here: whitespace from macro definition comes first - self.locals.push(); - self.write_buf_writable(buf)?; - buf.writeln("{")?; - self.prepare_ws(def.ws1); - - let mut names = Buffer::new(0); - let mut values = Buffer::new(0); - let mut is_first_variable = true; - for (i, arg) in def.args.iter().enumerate() { - let expr = args.get(i).ok_or_else(|| { - CompileError::from(format!("macro {:?} takes more than {} arguments", name, i)) - })?; - - match expr { - // If `expr` is already a form of variable then - // don't reintroduce a new variable. This is - // to avoid moving non-copyable values. - Expr::Var(name) => { - let var = self.locals.resolve_or_self(name); - self.locals.insert(arg, LocalMeta::with_ref(var)); - } - Expr::Attr(obj, attr) => { - let mut attr_buf = Buffer::new(0); - self.visit_attr(&mut attr_buf, obj, attr)?; - - let var = self.locals.resolve(&attr_buf.buf).unwrap_or(attr_buf.buf); - self.locals.insert(arg, LocalMeta::with_ref(var)); - } - // Everything else still needs to become variables, - // to avoid having the same logic be executed - // multiple times, e.g. in the case of macro - // parameters being used multiple times. - _ => { - if is_first_variable { - is_first_variable = false - } else { - names.write(", "); - values.write(", "); - } - names.write(arg); - - values.write("("); - values.write(&self.visit_expr_root(expr)?); - values.write(")"); - self.locals.insert_with_default(arg); - } - } - } - - debug_assert_eq!(names.buf.is_empty(), values.buf.is_empty()); - if !names.buf.is_empty() { - buf.writeln(&format!("let ({}) = ({});", names.buf, values.buf))?; - } - - let mut size_hint = self.handle(own_ctx, &def.nodes, buf, AstLevel::Nested)?; - - self.flush_ws(def.ws2); - size_hint += self.write_buf_writable(buf)?; - buf.writeln("}")?; - self.locals.pop(); - self.prepare_ws(ws); - Ok(size_hint) - } - - fn handle_include( - &mut self, - ctx: &'a Context<'_>, - buf: &mut Buffer, - ws: Ws, - path: &str, - ) -> Result { - self.flush_ws(ws); - self.write_buf_writable(buf)?; - let path = self - .input - .config - .find_template(path, Some(&self.input.path))?; - let src = get_template_source(&path)?; - let nodes = parse(&src, self.input.syntax)?; - - // Make sure the compiler understands that the generated code depends on the template file. - { - let path = path.to_str().unwrap(); - buf.writeln( - "e! { - include_bytes!(#path); - } - .to_string(), - )?; - } - - let size_hint = { - // Since nodes must not outlive the Generator, we instantiate - // a nested Generator here to handle the include's nodes. - let mut gen = self.child(); - let mut size_hint = gen.handle(ctx, &nodes, buf, AstLevel::Nested)?; - size_hint += gen.write_buf_writable(buf)?; - size_hint - }; - self.prepare_ws(ws); - Ok(size_hint) - } - - fn write_let_decl( - &mut self, - buf: &mut Buffer, - ws: Ws, - var: &'a Target<'_>, - ) -> Result<(), CompileError> { - self.handle_ws(ws); - self.write_buf_writable(buf)?; - buf.write("let "); - self.visit_target(buf, false, true, var); - buf.writeln(";") - } - - fn is_shadowing_variable(&self, var: &Target<'a>) -> Result { - match var { - Target::Name(name) => { - let name = normalize_identifier(name); - match self.locals.get(&name) { - // declares a new variable - None => Ok(false), - // an initialized variable gets shadowed - Some(meta) if meta.initialized => Ok(true), - // initializes a variable that was introduced in a LetDecl before - _ => Ok(false), - } - } - Target::Tuple(_, targets) => { - for target in targets { - match self.is_shadowing_variable(target) { - Ok(false) => continue, - outcome => return outcome, - } - } - Ok(false) - } - Target::Struct(_, named_targets) => { - for (_, target) in named_targets { - match self.is_shadowing_variable(target) { - Ok(false) => continue, - outcome => return outcome, - } - } - Ok(false) - } - _ => Err("literals are not allowed on the left-hand side of an assignment".into()), - } - } - - fn write_let( - &mut self, - buf: &mut Buffer, - ws: Ws, - var: &'a Target<'_>, - val: &Expr<'_>, - ) -> Result<(), CompileError> { - self.handle_ws(ws); - let mut expr_buf = Buffer::new(0); - self.visit_expr(&mut expr_buf, val)?; - - let shadowed = self.is_shadowing_variable(var)?; - if shadowed { - // Need to flush the buffer if the variable is being shadowed, - // to ensure the old variable is used. - self.write_buf_writable(buf)?; - } - if shadowed - || !matches!(var, &Target::Name(_)) - || matches!(var, Target::Name(name) if self.locals.get(name).is_none()) - { - buf.write("let "); - } - - self.visit_target(buf, true, true, var); - buf.writeln(&format!(" = {};", &expr_buf.buf)) - } - - // If `name` is `Some`, this is a call to a block definition, and we have to find - // the first block for that name from the ancestry chain. If name is `None`, this - // is from a `super()` call, and we can get the name from `self.super_block`. - fn write_block( - &mut self, - buf: &mut Buffer, - name: Option<&'a str>, - outer: Ws, - ) -> Result { - // Flush preceding whitespace according to the outer WS spec - self.flush_ws(outer); - - let prev_block = self.super_block; - let cur = match (name, prev_block) { - // The top-level context contains a block definition - (Some(cur_name), None) => (cur_name, 0), - // A block definition contains a block definition of the same name - (Some(cur_name), Some((prev_name, _))) if cur_name == prev_name => { - return Err(format!("cannot define recursive blocks ({})", cur_name).into()); - } - // A block definition contains a definition of another block - (Some(cur_name), Some((_, _))) => (cur_name, 0), - // `super()` was called inside a block - (None, Some((prev_name, gen))) => (prev_name, gen + 1), - // `super()` is called from outside a block - (None, None) => return Err("cannot call 'super()' outside block".into()), - }; - self.super_block = Some(cur); - - // Get the block definition from the heritage chain - let heritage = self - .heritage - .as_ref() - .ok_or_else(|| CompileError::from("no block ancestors available"))?; - let (ctx, def) = heritage.blocks[cur.0].get(cur.1).ok_or_else(|| { - CompileError::from(match name { - None => format!("no super() block found for block '{}'", cur.0), - Some(name) => format!("no block found for name '{}'", name), - }) - })?; - - // Get the nodes and whitespace suppression data from the block definition - let (ws1, nodes, ws2) = if let Node::BlockDef(ws1, _, nodes, ws2) = def { - (ws1, nodes, ws2) - } else { - unreachable!() - }; - - // Handle inner whitespace suppression spec and process block nodes - self.prepare_ws(*ws1); - self.locals.push(); - let size_hint = self.handle(ctx, nodes, buf, AstLevel::Block)?; - - if !self.locals.is_current_empty() { - // Need to flush the buffer before popping the variable stack - self.write_buf_writable(buf)?; - } - - self.locals.pop(); - self.flush_ws(*ws2); - - // Restore original block context and set whitespace suppression for - // succeeding whitespace according to the outer WS spec - self.super_block = prev_block; - self.prepare_ws(outer); - Ok(size_hint) - } - - fn write_expr(&mut self, ws: Ws, s: &'a Expr<'a>) { - self.handle_ws(ws); - self.buf_writable.push(Writable::Expr(s)); - } - - // Write expression buffer and empty - fn write_buf_writable(&mut self, buf: &mut Buffer) -> Result { - if self.buf_writable.is_empty() { - return Ok(0); - } - - if self - .buf_writable - .iter() - .all(|w| matches!(w, Writable::Lit(_))) - { - let mut buf_lit = Buffer::new(0); - for s in mem::take(&mut self.buf_writable) { - if let Writable::Lit(s) = s { - buf_lit.write(s); - }; - } - buf.writeln(&format!("writer.write_str({:#?})?;", &buf_lit.buf))?; - return Ok(buf_lit.buf.len()); - } - - let mut size_hint = 0; - let mut buf_format = Buffer::new(0); - let mut buf_expr = Buffer::new(buf.indent + 1); - let mut expr_cache = HashMap::with_capacity(self.buf_writable.len()); - for s in mem::take(&mut self.buf_writable) { - match s { - Writable::Lit(s) => { - buf_format.write(&s.replace('{', "{{").replace('}', "}}")); - size_hint += s.len(); - } - Writable::Expr(s) => { - use self::DisplayWrap::*; - let mut expr_buf = Buffer::new(0); - let wrapped = self.visit_expr(&mut expr_buf, s)?; - let expression = match wrapped { - Wrapped => expr_buf.buf, - Unwrapped => format!( - "::askama::MarkupDisplay::new_unsafe(&({}), {})", - expr_buf.buf, self.input.escaper - ), - }; - - use std::collections::hash_map::Entry; - let id = match expr_cache.entry(expression.clone()) { - Entry::Occupied(e) => *e.get(), - Entry::Vacant(e) => { - let id = self.named; - self.named += 1; - - buf_expr.write(&format!("expr{} = ", id)); - buf_expr.write("&"); - buf_expr.write(&expression); - buf_expr.writeln(",")?; - - e.insert(id); - id - } - }; - - buf_format.write(&format!("{{expr{}}}", id)); - size_hint += 3; - } - } - } - - buf.writeln("::std::write!(")?; - buf.indent(); - buf.writeln("writer,")?; - buf.writeln(&format!("{:#?},", &buf_format.buf))?; - buf.writeln(buf_expr.buf.trim())?; - buf.dedent()?; - buf.writeln(")?;")?; - Ok(size_hint) - } - - fn visit_lit(&mut self, lws: &'a str, val: &'a str, rws: &'a str) { - assert!(self.next_ws.is_none()); - if !lws.is_empty() { - match self.skip_ws { - WhitespaceHandling::Suppress => { - self.skip_ws = WhitespaceHandling::Preserve; - } - _ if val.is_empty() => { - assert!(rws.is_empty()); - self.next_ws = Some(lws); - } - WhitespaceHandling::Preserve => self.buf_writable.push(Writable::Lit(lws)), - WhitespaceHandling::Minimize => { - self.buf_writable - .push(Writable::Lit(match lws.contains('\n') { - true => "\n", - false => " ", - })) - } - } - } - - if !val.is_empty() { - self.buf_writable.push(Writable::Lit(val)); - } - - if !rws.is_empty() { - self.next_ws = Some(rws); - } - } - - fn write_comment(&mut self, ws: Ws) { - self.handle_ws(ws); - } - - /* Visitor methods for expression types */ - - fn visit_expr_root(&mut self, expr: &Expr<'_>) -> Result { - let mut buf = Buffer::new(0); - self.visit_expr(&mut buf, expr)?; - Ok(buf.buf) - } - - fn visit_expr( - &mut self, - buf: &mut Buffer, - expr: &Expr<'_>, - ) -> Result { - Ok(match *expr { - Expr::BoolLit(s) => self.visit_bool_lit(buf, s), - Expr::NumLit(s) => self.visit_num_lit(buf, s), - Expr::StrLit(s) => self.visit_str_lit(buf, s), - Expr::CharLit(s) => self.visit_char_lit(buf, s), - Expr::Var(s) => self.visit_var(buf, s), - Expr::Path(ref path) => self.visit_path(buf, path), - Expr::Array(ref elements) => self.visit_array(buf, elements)?, - Expr::Attr(ref obj, name) => self.visit_attr(buf, obj, name)?, - Expr::Index(ref obj, ref key) => self.visit_index(buf, obj, key)?, - Expr::Filter(name, ref args) => self.visit_filter(buf, name, args)?, - Expr::Unary(op, ref inner) => self.visit_unary(buf, op, inner)?, - Expr::BinOp(op, ref left, ref right) => self.visit_binop(buf, op, left, right)?, - Expr::Range(op, ref left, ref right) => self.visit_range(buf, op, left, right)?, - Expr::Group(ref inner) => self.visit_group(buf, inner)?, - Expr::Call(ref obj, ref args) => self.visit_call(buf, obj, args)?, - Expr::RustMacro(name, args) => self.visit_rust_macro(buf, name, args), - Expr::Try(ref expr) => self.visit_try(buf, expr.as_ref())?, - Expr::Tuple(ref exprs) => self.visit_tuple(buf, exprs)?, - }) - } - - fn visit_try( - &mut self, - buf: &mut Buffer, - expr: &Expr<'_>, - ) -> Result { - buf.write("::core::result::Result::map_err("); - self.visit_expr(buf, expr)?; - buf.write(", |err| ::askama::shared::Error::Custom(::core::convert::Into::into(err)))?"); - Ok(DisplayWrap::Unwrapped) - } - - fn visit_rust_macro(&mut self, buf: &mut Buffer, name: &str, args: &str) -> DisplayWrap { - buf.write(name); - buf.write("!("); - buf.write(args); - buf.write(")"); - - DisplayWrap::Unwrapped - } - - #[cfg(not(feature = "markdown"))] - fn _visit_markdown_filter( - &mut self, - _buf: &mut Buffer, - _args: &[Expr<'_>], - ) -> Result { - Err("the `markdown` filter requires the `markdown` feature to be enabled".into()) - } - - #[cfg(feature = "markdown")] - fn _visit_markdown_filter( - &mut self, - buf: &mut Buffer, - args: &[Expr<'_>], - ) -> Result { - let (md, options) = match args { - [md] => (md, None), - [md, options] => (md, Some(options)), - _ => return Err("markdown filter expects no more than one option argument".into()), - }; - - buf.write(&format!( - "::askama::filters::markdown({}, ", - self.input.escaper - )); - self.visit_expr(buf, md)?; - match options { - Some(options) => { - buf.write(", ::core::option::Option::Some("); - self.visit_expr(buf, options)?; - buf.write(")"); - } - None => buf.write(", ::core::option::Option::None"), - } - buf.write(")?"); - - Ok(DisplayWrap::Wrapped) - } - - fn visit_filter( - &mut self, - buf: &mut Buffer, - mut name: &str, - args: &[Expr<'_>], - ) -> Result { - if matches!(name, "escape" | "e") { - self._visit_escape_filter(buf, args)?; - return Ok(DisplayWrap::Wrapped); - } else if name == "format" { - self._visit_format_filter(buf, args)?; - return Ok(DisplayWrap::Unwrapped); - } else if name == "fmt" { - self._visit_fmt_filter(buf, args)?; - return Ok(DisplayWrap::Unwrapped); - } else if name == "join" { - self._visit_join_filter(buf, args)?; - return Ok(DisplayWrap::Unwrapped); - } else if name == "markdown" { - return self._visit_markdown_filter(buf, args); - } - - if name == "tojson" { - name = "json"; - } - - #[cfg(not(feature = "json"))] - if name == "json" { - return Err("the `json` filter requires the `serde-json` feature to be enabled".into()); - } - #[cfg(not(feature = "yaml"))] - if name == "yaml" { - return Err("the `yaml` filter requires the `serde-yaml` feature to be enabled".into()); - } - - const FILTERS: [&str; 2] = ["safe", "yaml"]; - if FILTERS.contains(&name) { - buf.write(&format!( - "::askama::filters::{}({}, ", - name, self.input.escaper - )); - } else if filters::BUILT_IN_FILTERS.contains(&name) { - buf.write(&format!("::askama::filters::{}(", name)); - } else { - buf.write(&format!("filters::{}(", name)); - } - - self._visit_args(buf, args)?; - buf.write(")?"); - Ok(match FILTERS.contains(&name) { - true => DisplayWrap::Wrapped, - false => DisplayWrap::Unwrapped, - }) - } - - fn _visit_escape_filter( - &mut self, - buf: &mut Buffer, - args: &[Expr<'_>], - ) -> Result<(), CompileError> { - if args.len() > 2 { - return Err("only two arguments allowed to escape filter".into()); - } - let opt_escaper = match args.get(1) { - Some(Expr::StrLit(name)) => Some(*name), - Some(_) => return Err("invalid escaper type for escape filter".into()), - None => None, - }; - let escaper = match opt_escaper { - Some(name) => self - .input - .config - .escapers - .iter() - .find_map(|(escapers, escaper)| escapers.contains(name).then(|| escaper)) - .ok_or_else(|| CompileError::from("invalid escaper for escape filter"))?, - None => self.input.escaper, - }; - buf.write("::askama::filters::escape("); - buf.write(escaper); - buf.write(", "); - self._visit_args(buf, &args[..1])?; - buf.write(")?"); - Ok(()) - } - - fn _visit_format_filter( - &mut self, - buf: &mut Buffer, - args: &[Expr<'_>], - ) -> Result<(), CompileError> { - buf.write("format!("); - if let Some(Expr::StrLit(v)) = args.first() { - self.visit_str_lit(buf, v); - if args.len() > 1 { - buf.write(", "); - } - } else { - return Err("invalid expression type for format filter".into()); - } - self._visit_args(buf, &args[1..])?; - buf.write(")"); - Ok(()) - } - - fn _visit_fmt_filter( - &mut self, - buf: &mut Buffer, - args: &[Expr<'_>], - ) -> Result<(), CompileError> { - buf.write("format!("); - if let Some(Expr::StrLit(v)) = args.get(1) { - self.visit_str_lit(buf, v); - buf.write(", "); - } else { - return Err("invalid expression type for fmt filter".into()); - } - self._visit_args(buf, &args[0..1])?; - if args.len() > 2 { - return Err("only two arguments allowed to fmt filter".into()); - } - buf.write(")"); - Ok(()) - } - - // Force type coercion on first argument to `join` filter (see #39). - fn _visit_join_filter( - &mut self, - buf: &mut Buffer, - args: &[Expr<'_>], - ) -> Result<(), CompileError> { - buf.write("::askama::filters::join((&"); - for (i, arg) in args.iter().enumerate() { - if i > 0 { - buf.write(", &"); - } - self.visit_expr(buf, arg)?; - if i == 0 { - buf.write(").into_iter()"); - } - } - buf.write(")?"); - Ok(()) - } - - fn _visit_args(&mut self, buf: &mut Buffer, args: &[Expr<'_>]) -> Result<(), CompileError> { - if args.is_empty() { - return Ok(()); - } - - for (i, arg) in args.iter().enumerate() { - if i > 0 { - buf.write(", "); - } - - let borrow = !arg.is_copyable(); - if borrow { - buf.write("&("); - } - - match arg { - Expr::Call(left, _) if !matches!(left.as_ref(), Expr::Path(_)) => { - buf.writeln("{")?; - self.visit_expr(buf, arg)?; - buf.writeln("}")?; - } - _ => { - self.visit_expr(buf, arg)?; - } - } - - if borrow { - buf.write(")"); - } - } - Ok(()) - } - - fn visit_attr( - &mut self, - buf: &mut Buffer, - obj: &Expr<'_>, - attr: &str, - ) -> Result { - if let Expr::Var(name) = *obj { - if name == "loop" { - if attr == "index" { - buf.write("(_loop_item.index + 1)"); - return Ok(DisplayWrap::Unwrapped); - } else if attr == "index0" { - buf.write("_loop_item.index"); - return Ok(DisplayWrap::Unwrapped); - } else if attr == "first" { - buf.write("_loop_item.first"); - return Ok(DisplayWrap::Unwrapped); - } else if attr == "last" { - buf.write("_loop_item.last"); - return Ok(DisplayWrap::Unwrapped); - } else { - return Err("unknown loop variable".into()); - } - } - } - self.visit_expr(buf, obj)?; - buf.write(&format!(".{}", normalize_identifier(attr))); - Ok(DisplayWrap::Unwrapped) - } - - fn visit_index( - &mut self, - buf: &mut Buffer, - obj: &Expr<'_>, - key: &Expr<'_>, - ) -> Result { - buf.write("&"); - self.visit_expr(buf, obj)?; - buf.write("["); - self.visit_expr(buf, key)?; - buf.write("]"); - Ok(DisplayWrap::Unwrapped) - } - - fn visit_call( - &mut self, - buf: &mut Buffer, - left: &Expr<'_>, - args: &[Expr<'_>], - ) -> Result { - match left { - Expr::Attr(left, method) if **left == Expr::Var("loop") => match *method { - "cycle" => match args { - [arg] => { - if matches!(arg, Expr::Array(arr) if arr.is_empty()) { - return Err("loop.cycle(…) cannot use an empty array".into()); - } - buf.write("({"); - buf.write("let _cycle = &("); - self.visit_expr(buf, arg)?; - buf.writeln(");")?; - buf.writeln("let _len = _cycle.len();")?; - buf.writeln("if _len == 0 {")?; - buf.writeln("return ::core::result::Result::Err(::askama::Error::Fmt(::core::fmt::Error));")?; - buf.writeln("}")?; - buf.writeln("_cycle[_loop_item.index % _len]")?; - buf.writeln("})")?; - } - _ => return Err("loop.cycle(…) expects exactly one argument".into()), - }, - s => return Err(format!("unknown loop method: {:?}", s).into()), - }, - left => { - match left { - Expr::Var(name) => match self.locals.resolve(name) { - Some(resolved) => buf.write(&resolved), - None => buf.write(&format!("(&self.{})", normalize_identifier(name))), - }, - left => { - self.visit_expr(buf, left)?; - } - } - - buf.write("("); - self._visit_args(buf, args)?; - buf.write(")"); - } - } - Ok(DisplayWrap::Unwrapped) - } - - fn visit_unary( - &mut self, - buf: &mut Buffer, - op: &str, - inner: &Expr<'_>, - ) -> Result { - buf.write(op); - self.visit_expr(buf, inner)?; - Ok(DisplayWrap::Unwrapped) - } - - fn visit_range( - &mut self, - buf: &mut Buffer, - op: &str, - left: &Option>>, - right: &Option>>, - ) -> Result { - if let Some(left) = left { - self.visit_expr(buf, left)?; - } - buf.write(op); - if let Some(right) = right { - self.visit_expr(buf, right)?; - } - Ok(DisplayWrap::Unwrapped) - } - - fn visit_binop( - &mut self, - buf: &mut Buffer, - op: &str, - left: &Expr<'_>, - right: &Expr<'_>, - ) -> Result { - self.visit_expr(buf, left)?; - buf.write(&format!(" {} ", op)); - self.visit_expr(buf, right)?; - Ok(DisplayWrap::Unwrapped) - } - - fn visit_group( - &mut self, - buf: &mut Buffer, - inner: &Expr<'_>, - ) -> Result { - buf.write("("); - self.visit_expr(buf, inner)?; - buf.write(")"); - Ok(DisplayWrap::Unwrapped) - } - - fn visit_tuple( - &mut self, - buf: &mut Buffer, - exprs: &[Expr<'_>], - ) -> Result { - buf.write("("); - for (index, expr) in exprs.iter().enumerate() { - if index > 0 { - buf.write(" "); - } - self.visit_expr(buf, expr)?; - buf.write(","); - } - buf.write(")"); - Ok(DisplayWrap::Unwrapped) - } - - fn visit_array( - &mut self, - buf: &mut Buffer, - elements: &[Expr<'_>], - ) -> Result { - buf.write("["); - for (i, el) in elements.iter().enumerate() { - if i > 0 { - buf.write(", "); - } - self.visit_expr(buf, el)?; - } - buf.write("]"); - Ok(DisplayWrap::Unwrapped) - } - - fn visit_path(&mut self, buf: &mut Buffer, path: &[&str]) -> DisplayWrap { - for (i, part) in path.iter().enumerate() { - if i > 0 { - buf.write("::"); - } - buf.write(part); - } - DisplayWrap::Unwrapped - } - - fn visit_var(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap { - if s == "self" { - buf.write(s); - return DisplayWrap::Unwrapped; - } - - buf.write(normalize_identifier(&self.locals.resolve_or_self(s))); - DisplayWrap::Unwrapped - } - - fn visit_bool_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap { - buf.write(s); - DisplayWrap::Unwrapped - } - - fn visit_str_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap { - buf.write(&format!("\"{}\"", s)); - DisplayWrap::Unwrapped - } - - fn visit_char_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap { - buf.write(&format!("'{}'", s)); - DisplayWrap::Unwrapped - } - - fn visit_num_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap { - buf.write(s); - DisplayWrap::Unwrapped - } - - fn visit_target( - &mut self, - buf: &mut Buffer, - initialized: bool, - first_level: bool, - target: &Target<'a>, - ) { - match target { - Target::Name("_") => { - buf.write("_"); - } - Target::Name(name) => { - let name = normalize_identifier(name); - match initialized { - true => self.locals.insert(name, LocalMeta::initialized()), - false => self.locals.insert_with_default(name), - } - buf.write(name); - } - Target::Tuple(path, targets) => { - buf.write(&path.join("::")); - buf.write("("); - for target in targets { - self.visit_target(buf, initialized, false, target); - buf.write(","); - } - buf.write(")"); - } - Target::Struct(path, targets) => { - buf.write(&path.join("::")); - buf.write(" { "); - for (name, target) in targets { - buf.write(normalize_identifier(name)); - buf.write(": "); - self.visit_target(buf, initialized, false, target); - buf.write(","); - } - buf.write(" }"); - } - Target::Path(path) => { - self.visit_path(buf, path); - } - Target::StrLit(s) => { - if first_level { - buf.write("&"); - } - self.visit_str_lit(buf, s); - } - Target::NumLit(s) => { - if first_level { - buf.write("&"); - } - self.visit_num_lit(buf, s); - } - Target::CharLit(s) => { - if first_level { - buf.write("&"); - } - self.visit_char_lit(buf, s); - } - Target::BoolLit(s) => { - if first_level { - buf.write("&"); - } - buf.write(s); - } - } - } - - /* Helper methods for dealing with whitespace nodes */ - - // Combines `flush_ws()` and `prepare_ws()` to handle both trailing whitespace from the - // preceding literal and leading whitespace from the succeeding literal. - fn handle_ws(&mut self, ws: Ws) { - self.flush_ws(ws); - self.prepare_ws(ws); - } - - fn should_trim_ws(&self, ws: Option) -> WhitespaceHandling { - match ws { - Some(Whitespace::Suppress) => WhitespaceHandling::Suppress, - Some(Whitespace::Preserve) => WhitespaceHandling::Preserve, - Some(Whitespace::Minimize) => WhitespaceHandling::Minimize, - None => self.whitespace, - } - } - - // If the previous literal left some trailing whitespace in `next_ws` and the - // prefix whitespace suppressor from the given argument, flush that whitespace. - // In either case, `next_ws` is reset to `None` (no trailing whitespace). - fn flush_ws(&mut self, ws: Ws) { - if self.next_ws.is_none() { - return; - } - - // If `whitespace` is set to `suppress`, we keep the whitespace characters only if there is - // a `+` character. - match self.should_trim_ws(ws.0) { - WhitespaceHandling::Preserve => { - let val = self.next_ws.unwrap(); - if !val.is_empty() { - self.buf_writable.push(Writable::Lit(val)); - } - } - WhitespaceHandling::Minimize => { - let val = self.next_ws.unwrap(); - if !val.is_empty() { - self.buf_writable - .push(Writable::Lit(match val.contains('\n') { - true => "\n", - false => " ", - })); - } - } - WhitespaceHandling::Suppress => {} - } - self.next_ws = None; - } - - // Sets `skip_ws` to match the suffix whitespace suppressor from the given - // argument, to determine whether to suppress leading whitespace from the - // next literal. - fn prepare_ws(&mut self, ws: Ws) { - self.skip_ws = self.should_trim_ws(ws.1); - } -} - -struct Buffer { - // The buffer to generate the code into - buf: String, - // The current level of indentation (in spaces) - indent: u8, - // Whether the output buffer is currently at the start of a line - start: bool, -} - -impl Buffer { - fn new(indent: u8) -> Self { - Self { - buf: String::new(), - indent, - start: true, - } - } - - fn writeln(&mut self, s: &str) -> Result<(), CompileError> { - if s == "}" { - self.dedent()?; - } - if !s.is_empty() { - self.write(s); - } - self.buf.push('\n'); - if s.ends_with('{') { - self.indent(); - } - self.start = true; - Ok(()) - } - - fn write(&mut self, s: &str) { - if self.start { - for _ in 0..(self.indent * 4) { - self.buf.push(' '); - } - self.start = false; - } - self.buf.push_str(s); - } - - fn indent(&mut self) { - self.indent += 1; - } - - fn dedent(&mut self) -> Result<(), CompileError> { - if self.indent == 0 { - return Err("dedent() called while indentation == 0".into()); - } - self.indent -= 1; - Ok(()) - } -} - -#[derive(Clone, Default)] -struct LocalMeta { - refs: Option, - initialized: bool, -} - -impl LocalMeta { - fn initialized() -> Self { - Self { - refs: None, - initialized: true, - } - } - - fn with_ref(refs: String) -> Self { - Self { - refs: Some(refs), - initialized: true, - } - } -} - -// type SetChain<'a, T> = MapChain<'a, T, ()>; - -#[derive(Debug)] -struct MapChain<'a, K, V> -where - K: cmp::Eq + hash::Hash, -{ - parent: Option<&'a MapChain<'a, K, V>>, - scopes: Vec>, -} - -impl<'a, K: 'a, V: 'a> MapChain<'a, K, V> -where - K: cmp::Eq + hash::Hash, -{ - fn new() -> MapChain<'a, K, V> { - MapChain { - parent: None, - scopes: vec![HashMap::new()], - } - } - - fn with_parent<'p>(parent: &'p MapChain<'_, K, V>) -> MapChain<'p, K, V> { - MapChain { - parent: Some(parent), - scopes: vec![HashMap::new()], - } - } - - /// Iterates the scopes in reverse and returns `Some(LocalMeta)` - /// from the first scope where `key` exists. - fn get(&self, key: &K) -> Option<&V> { - let scopes = self.scopes.iter().rev(); - scopes - .filter_map(|set| set.get(key)) - .next() - .or_else(|| self.parent.and_then(|set| set.get(key))) - } - - fn is_current_empty(&self) -> bool { - self.scopes.last().unwrap().is_empty() - } - - fn insert(&mut self, key: K, val: V) { - self.scopes.last_mut().unwrap().insert(key, val); - - // Note that if `insert` returns `Some` then it implies - // an identifier is reused. For e.g. `{% macro f(a, a) %}` - // and `{% let (a, a) = ... %}` then this results in a - // generated template, which when compiled fails with the - // compile error "identifier `a` used more than once". - } - - fn insert_with_default(&mut self, key: K) - where - V: Default, - { - self.insert(key, V::default()); - } - - fn push(&mut self) { - self.scopes.push(HashMap::new()); - } - - fn pop(&mut self) { - self.scopes.pop().unwrap(); - assert!(!self.scopes.is_empty()); - } -} - -impl MapChain<'_, &str, LocalMeta> { - fn resolve(&self, name: &str) -> Option { - let name = normalize_identifier(name); - self.get(&name).map(|meta| match &meta.refs { - Some(expr) => expr.clone(), - None => name.to_string(), - }) - } - - fn resolve_or_self(&self, name: &str) -> String { - let name = normalize_identifier(name); - self.resolve(name) - .unwrap_or_else(|| format!("self.{}", name)) - } -} - -fn median(sizes: &mut [usize]) -> usize { - sizes.sort_unstable(); - if sizes.len() % 2 == 1 { - sizes[sizes.len() / 2] - } else { - (sizes[sizes.len() / 2 - 1] + sizes[sizes.len() / 2]) / 2 - } -} - -#[derive(Clone, PartialEq)] -enum AstLevel { - Top, - Block, - Nested, -} - -impl Copy for AstLevel {} - -#[derive(Clone)] -enum DisplayWrap { - Wrapped, - Unwrapped, -} - -impl Copy for DisplayWrap {} - -#[derive(Debug)] -enum Writable<'a> { - Lit(&'a str), - Expr(&'a Expr<'a>), -} - -// Identifiers to be replaced with raw identifiers, so as to avoid -// collisions between template syntax and Rust's syntax. In particular -// [Rust keywords](https://doc.rust-lang.org/reference/keywords.html) -// should be replaced, since they're not reserved words in Askama -// syntax but have a high probability of causing problems in the -// generated code. -// -// This list excludes the Rust keywords *self*, *Self*, and *super* -// because they are not allowed to be raw identifiers, and *loop* -// because it's used something like a keyword in the template -// language. -static USE_RAW: [(&str, &str); 47] = [ - ("as", "r#as"), - ("break", "r#break"), - ("const", "r#const"), - ("continue", "r#continue"), - ("crate", "r#crate"), - ("else", "r#else"), - ("enum", "r#enum"), - ("extern", "r#extern"), - ("false", "r#false"), - ("fn", "r#fn"), - ("for", "r#for"), - ("if", "r#if"), - ("impl", "r#impl"), - ("in", "r#in"), - ("let", "r#let"), - ("match", "r#match"), - ("mod", "r#mod"), - ("move", "r#move"), - ("mut", "r#mut"), - ("pub", "r#pub"), - ("ref", "r#ref"), - ("return", "r#return"), - ("static", "r#static"), - ("struct", "r#struct"), - ("trait", "r#trait"), - ("true", "r#true"), - ("type", "r#type"), - ("unsafe", "r#unsafe"), - ("use", "r#use"), - ("where", "r#where"), - ("while", "r#while"), - ("async", "r#async"), - ("await", "r#await"), - ("dyn", "r#dyn"), - ("abstract", "r#abstract"), - ("become", "r#become"), - ("box", "r#box"), - ("do", "r#do"), - ("final", "r#final"), - ("macro", "r#macro"), - ("override", "r#override"), - ("priv", "r#priv"), - ("typeof", "r#typeof"), - ("unsized", "r#unsized"), - ("virtual", "r#virtual"), - ("yield", "r#yield"), - ("try", "r#try"), -]; - -fn normalize_identifier(ident: &str) -> &str { - if let Some(word) = USE_RAW.iter().find(|x| x.0 == ident) { - word.1 - } else { - ident - } -} diff --git a/askama_shared/src/heritage.rs b/askama_shared/src/heritage.rs deleted file mode 100644 index 52c14a2..0000000 --- a/askama_shared/src/heritage.rs +++ /dev/null @@ -1,126 +0,0 @@ -use std::collections::HashMap; -use std::path::{Path, PathBuf}; - -use crate::config::Config; -use crate::parser::{Expr, Loop, Macro, Node}; -use crate::CompileError; - -pub(crate) struct Heritage<'a> { - pub(crate) root: &'a Context<'a>, - pub(crate) blocks: BlockAncestry<'a>, -} - -impl Heritage<'_> { - pub(crate) fn new<'n, S: std::hash::BuildHasher>( - mut ctx: &'n Context<'n>, - contexts: &'n HashMap<&'n Path, Context<'n>, S>, - ) -> Heritage<'n> { - let mut blocks: BlockAncestry<'n> = ctx - .blocks - .iter() - .map(|(name, def)| (*name, vec![(ctx, *def)])) - .collect(); - - while let Some(ref path) = ctx.extends { - ctx = &contexts[path.as_path()]; - for (name, def) in &ctx.blocks { - blocks.entry(name).or_insert_with(Vec::new).push((ctx, def)); - } - } - - Heritage { root: ctx, blocks } - } -} - -type BlockAncestry<'a> = HashMap<&'a str, Vec<(&'a Context<'a>, &'a Node<'a>)>>; - -pub(crate) struct Context<'a> { - pub(crate) nodes: &'a [Node<'a>], - pub(crate) extends: Option, - pub(crate) blocks: HashMap<&'a str, &'a Node<'a>>, - pub(crate) macros: HashMap<&'a str, &'a Macro<'a>>, - pub(crate) imports: HashMap<&'a str, PathBuf>, -} - -impl Context<'_> { - pub(crate) fn new<'n>( - config: &Config<'_>, - path: &Path, - nodes: &'n [Node<'n>], - ) -> Result, CompileError> { - let mut extends = None; - let mut blocks = Vec::new(); - let mut macros = HashMap::new(); - let mut imports = HashMap::new(); - let mut nested = vec![nodes]; - let mut top = true; - - while let Some(nodes) = nested.pop() { - for n in nodes { - match n { - Node::Extends(Expr::StrLit(extends_path)) if top => match extends { - Some(_) => return Err("multiple extend blocks found".into()), - None => { - extends = Some(config.find_template(extends_path, Some(path))?); - } - }, - Node::Macro(name, m) if top => { - macros.insert(*name, m); - } - Node::Import(_, import_path, scope) if top => { - let path = config.find_template(import_path, Some(path))?; - imports.insert(*scope, path); - } - Node::Extends(_) | Node::Macro(_, _) | Node::Import(_, _, _) if !top => { - return Err( - "extends, macro or import blocks not allowed below top level".into(), - ); - } - def @ Node::BlockDef(_, _, _, _) => { - blocks.push(def); - if let Node::BlockDef(_, _, nodes, _) = def { - nested.push(nodes); - } - } - Node::Cond(branches, _) => { - for (_, _, nodes) in branches { - nested.push(nodes); - } - } - Node::Loop(Loop { - body, else_block, .. - }) => { - nested.push(body); - nested.push(else_block); - } - Node::Match(_, _, arms, _) => { - for (_, _, arm) in arms { - nested.push(arm); - } - } - _ => {} - } - } - top = false; - } - - let blocks: HashMap<_, _> = blocks - .iter() - .map(|def| { - if let Node::BlockDef(_, name, _, _) = def { - (*name, *def) - } else { - unreachable!() - } - }) - .collect(); - - Ok(Context { - nodes, - extends, - blocks, - macros, - imports, - }) - } -} diff --git a/askama_shared/src/input.rs b/askama_shared/src/input.rs deleted file mode 100644 index 1f367fd..0000000 --- a/askama_shared/src/input.rs +++ /dev/null @@ -1,256 +0,0 @@ -use crate::config::{Config, Syntax}; -use crate::generator::TemplateArgs; -use crate::CompileError; - -use std::path::{Path, PathBuf}; -use std::str::FromStr; - -use mime::Mime; - -pub(crate) struct TemplateInput<'a> { - pub(crate) ast: &'a syn::DeriveInput, - pub(crate) config: &'a Config<'a>, - pub(crate) syntax: &'a Syntax<'a>, - pub(crate) source: Source, - pub(crate) print: Print, - pub(crate) escaper: &'a str, - pub(crate) ext: Option, - pub(crate) mime_type: String, - pub(crate) parent: Option<&'a syn::Type>, - pub(crate) path: PathBuf, -} - -impl TemplateInput<'_> { - /// Extract the template metadata from the `DeriveInput` structure. This - /// mostly recovers the data for the `TemplateInput` fields from the - /// `template()` attribute list fields; it also finds the of the `_parent` - /// field, if any. - pub(crate) fn new<'n>( - ast: &'n syn::DeriveInput, - config: &'n Config<'_>, - args: TemplateArgs, - ) -> Result, CompileError> { - let TemplateArgs { - source, - print, - escaping, - ext, - syntax, - .. - } = args; - - // Validate the `source` and `ext` value together, since they are - // related. In case `source` was used instead of `path`, the value - // of `ext` is merged into a synthetic `path` value here. - let source = source.expect("template path or source not found in attributes"); - let path = match (&source, &ext) { - (&Source::Path(ref path), _) => config.find_template(path, None)?, - (&Source::Source(_), Some(ext)) => PathBuf::from(format!("{}.{}", ast.ident, ext)), - (&Source::Source(_), None) => { - return Err("must include 'ext' attribute when using 'source' attribute".into()) - } - }; - - // Check to see if a `_parent` field was defined on the context - // struct, and store the type for it for use in the code generator. - let parent = match ast.data { - syn::Data::Struct(syn::DataStruct { - fields: syn::Fields::Named(ref fields), - .. - }) => fields - .named - .iter() - .find(|f| f.ident.as_ref().filter(|name| *name == "_parent").is_some()) - .map(|f| &f.ty), - _ => None, - }; - - if parent.is_some() { - eprint!( - " --> in struct {}\n = use of deprecated field '_parent'\n", - ast.ident - ); - } - - // Validate syntax - let syntax = syntax.map_or_else( - || Ok(config.syntaxes.get(config.default_syntax).unwrap()), - |s| { - config - .syntaxes - .get(&s) - .ok_or_else(|| CompileError::from(format!("attribute syntax {} not exist", s))) - }, - )?; - - // Match extension against defined output formats - - let escaping = escaping.unwrap_or_else(|| { - path.extension() - .map(|s| s.to_str().unwrap()) - .unwrap_or("") - .to_string() - }); - - let mut escaper = None; - for (extensions, path) in &config.escapers { - if extensions.contains(&escaping) { - escaper = Some(path); - break; - } - } - - let escaper = escaper.ok_or_else(|| { - CompileError::from(format!("no escaper defined for extension '{}'", escaping)) - })?; - - let mime_type = - extension_to_mime_type(ext_default_to_path(ext.as_deref(), &path).unwrap_or("txt")) - .to_string(); - - Ok(TemplateInput { - ast, - config, - syntax, - source, - print, - escaper, - ext, - mime_type, - parent, - path, - }) - } - - #[inline] - pub(crate) fn extension(&self) -> Option<&str> { - ext_default_to_path(self.ext.as_deref(), &self.path) - } -} - -#[inline] -fn ext_default_to_path<'a>(ext: Option<&'a str>, path: &'a Path) -> Option<&'a str> { - ext.or_else(|| extension(path)) -} - -fn extension(path: &Path) -> Option<&str> { - let ext = path.extension().map(|s| s.to_str().unwrap())?; - - const JINJA_EXTENSIONS: [&str; 3] = ["j2", "jinja", "jinja2"]; - if JINJA_EXTENSIONS.contains(&ext) { - Path::new(path.file_stem().unwrap()) - .extension() - .map(|s| s.to_str().unwrap()) - .or(Some(ext)) - } else { - Some(ext) - } -} - -pub(crate) enum Source { - Path(String), - Source(String), -} - -#[derive(PartialEq)] -pub(crate) enum Print { - All, - Ast, - Code, - None, -} - -impl FromStr for Print { - type Err = CompileError; - - fn from_str(s: &str) -> Result { - use self::Print::*; - Ok(match s { - "all" => All, - "ast" => Ast, - "code" => Code, - "none" => None, - v => return Err(format!("invalid value for print option: {}", v,).into()), - }) - } -} - -impl Default for Print { - fn default() -> Self { - Self::None - } -} - -#[doc(hidden)] -pub fn extension_to_mime_type(ext: &str) -> Mime { - let basic_type = mime_guess::from_ext(ext).first_or_octet_stream(); - for (simple, utf_8) in &TEXT_TYPES { - if &basic_type == simple { - return utf_8.clone(); - } - } - basic_type -} - -const TEXT_TYPES: [(Mime, Mime); 6] = [ - (mime::TEXT_PLAIN, mime::TEXT_PLAIN_UTF_8), - (mime::TEXT_HTML, mime::TEXT_HTML_UTF_8), - (mime::TEXT_CSS, mime::TEXT_CSS_UTF_8), - (mime::TEXT_CSV, mime::TEXT_CSV_UTF_8), - ( - mime::TEXT_TAB_SEPARATED_VALUES, - mime::TEXT_TAB_SEPARATED_VALUES_UTF_8, - ), - ( - mime::APPLICATION_JAVASCRIPT, - mime::APPLICATION_JAVASCRIPT_UTF_8, - ), -]; - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_ext() { - assert_eq!(extension(Path::new("foo-bar.txt")), Some("txt")); - assert_eq!(extension(Path::new("foo-bar.html")), Some("html")); - assert_eq!(extension(Path::new("foo-bar.unknown")), Some("unknown")); - - assert_eq!(extension(Path::new("foo/bar/baz.txt")), Some("txt")); - assert_eq!(extension(Path::new("foo/bar/baz.html")), Some("html")); - assert_eq!(extension(Path::new("foo/bar/baz.unknown")), Some("unknown")); - } - - #[test] - fn test_double_ext() { - assert_eq!(extension(Path::new("foo-bar.html.txt")), Some("txt")); - assert_eq!(extension(Path::new("foo-bar.txt.html")), Some("html")); - assert_eq!(extension(Path::new("foo-bar.txt.unknown")), Some("unknown")); - - assert_eq!(extension(Path::new("foo/bar/baz.html.txt")), Some("txt")); - assert_eq!(extension(Path::new("foo/bar/baz.txt.html")), Some("html")); - assert_eq!( - extension(Path::new("foo/bar/baz.txt.unknown")), - Some("unknown") - ); - } - - #[test] - fn test_skip_jinja_ext() { - assert_eq!(extension(Path::new("foo-bar.html.j2")), Some("html")); - assert_eq!(extension(Path::new("foo-bar.html.jinja")), Some("html")); - assert_eq!(extension(Path::new("foo-bar.html.jinja2")), Some("html")); - - assert_eq!(extension(Path::new("foo/bar/baz.txt.j2")), Some("txt")); - assert_eq!(extension(Path::new("foo/bar/baz.txt.jinja")), Some("txt")); - assert_eq!(extension(Path::new("foo/bar/baz.txt.jinja2")), Some("txt")); - } - - #[test] - fn test_only_jinja_ext() { - assert_eq!(extension(Path::new("foo-bar.j2")), Some("j2")); - assert_eq!(extension(Path::new("foo-bar.jinja")), Some("jinja")); - assert_eq!(extension(Path::new("foo-bar.jinja2")), Some("jinja2")); - } -} diff --git a/askama_shared/src/lib.rs b/askama_shared/src/lib.rs index a3ee4c1..cb26406 100644 --- a/askama_shared/src/lib.rs +++ b/askama_shared/src/lib.rs @@ -3,23 +3,14 @@ #![deny(elided_lifetimes_in_paths)] #![deny(unreachable_pub)] -use std::borrow::Cow; use std::fmt; -pub use crate::generator::derive_template; -pub use crate::input::extension_to_mime_type; pub use askama_escape::MarkupDisplay; -use proc_macro2::{Span, TokenStream}; -mod config; mod error; pub use crate::error::{Error, Result}; pub mod filters; -mod generator; pub mod helpers; -mod heritage; -mod input; -mod parser; /// Main `Template` trait; implementations are generally derived /// @@ -107,48 +98,6 @@ impl fmt::Display for dyn DynTemplate { } } -#[derive(Debug, Clone)] -struct CompileError { - msg: Cow<'static, str>, - span: Span, -} - -impl CompileError { - fn new>>(s: S, span: Span) -> Self { - Self { - msg: s.into(), - span, - } - } - - fn into_compile_error(self) -> TokenStream { - syn::Error::new(self.span, self.msg).to_compile_error() - } -} - -impl std::error::Error for CompileError {} - -impl fmt::Display for CompileError { - #[inline] - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt.write_str(&self.msg) - } -} - -impl From<&'static str> for CompileError { - #[inline] - fn from(s: &'static str) -> Self { - Self::new(s, Span::call_site()) - } -} - -impl From for CompileError { - #[inline] - fn from(s: String) -> Self { - Self::new(s, Span::call_site()) - } -} - #[cfg(test)] #[allow(clippy::blacklisted_name)] mod tests { diff --git a/askama_shared/src/parser.rs b/askama_shared/src/parser.rs deleted file mode 100644 index efcad73..0000000 --- a/askama_shared/src/parser.rs +++ /dev/null @@ -1,1885 +0,0 @@ -use std::cell::Cell; -use std::str; - -use nom::branch::alt; -use nom::bytes::complete::{escaped, is_not, tag, take_till, take_until}; -use nom::character::complete::{anychar, char, digit1}; -use nom::combinator::{complete, consumed, cut, eof, map, not, opt, peek, recognize, value}; -use nom::error::{Error, ErrorKind}; -use nom::multi::{fold_many0, many0, many1, separated_list0, separated_list1}; -use nom::sequence::{delimited, pair, preceded, terminated, tuple}; -use nom::{self, error_position, AsChar, IResult, InputTakeAtPosition}; - -use crate::config::Syntax; -use crate::CompileError; - -#[derive(Debug, PartialEq)] -pub(crate) enum Node<'a> { - Lit(&'a str, &'a str, &'a str), - Comment(Ws), - Expr(Ws, Expr<'a>), - Call(Ws, Option<&'a str>, &'a str, Vec>), - LetDecl(Ws, Target<'a>), - Let(Ws, Target<'a>, Expr<'a>), - Cond(Vec>, Ws), - Match(Ws, Expr<'a>, Vec>, Ws), - Loop(Loop<'a>), - Extends(Expr<'a>), - BlockDef(Ws, &'a str, Vec>, Ws), - Include(Ws, &'a str), - Import(Ws, &'a str, &'a str), - Macro(&'a str, Macro<'a>), - Raw(Ws, &'a str, &'a str, &'a str, Ws), - Break(Ws), - Continue(Ws), -} - -#[derive(Debug, PartialEq)] -pub(crate) struct Loop<'a> { - pub(crate) ws1: Ws, - pub(crate) var: Target<'a>, - pub(crate) iter: Expr<'a>, - pub(crate) cond: Option>, - pub(crate) body: Vec>, - pub(crate) ws2: Ws, - pub(crate) else_block: Vec>, - pub(crate) ws3: Ws, -} - -#[derive(Debug, PartialEq)] -pub(crate) enum Expr<'a> { - BoolLit(&'a str), - NumLit(&'a str), - StrLit(&'a str), - CharLit(&'a str), - Var(&'a str), - Path(Vec<&'a str>), - Array(Vec>), - Attr(Box>, &'a str), - Index(Box>, Box>), - Filter(&'a str, Vec>), - Unary(&'a str, Box>), - BinOp(&'a str, Box>, Box>), - Range(&'a str, Option>>, Option>>), - Group(Box>), - Tuple(Vec>), - Call(Box>, Vec>), - RustMacro(&'a str, &'a str), - Try(Box>), -} - -impl Expr<'_> { - /// Returns `true` if enough assumptions can be made, - /// to determine that `self` is copyable. - pub(crate) fn is_copyable(&self) -> bool { - self.is_copyable_within_op(false) - } - - fn is_copyable_within_op(&self, within_op: bool) -> bool { - use Expr::*; - match self { - BoolLit(_) | NumLit(_) | StrLit(_) | CharLit(_) => true, - Unary(.., expr) => expr.is_copyable_within_op(true), - BinOp(_, lhs, rhs) => { - lhs.is_copyable_within_op(true) && rhs.is_copyable_within_op(true) - } - Range(..) => true, - // The result of a call likely doesn't need to be borrowed, - // as in that case the call is more likely to return a - // reference in the first place then. - Call(..) | Path(..) => true, - // If the `expr` is within a `Unary` or `BinOp` then - // an assumption can be made that the operand is copy. - // If not, then the value is moved and adding `.clone()` - // will solve that issue. However, if the operand is - // implicitly borrowed, then it's likely not even possible - // to get the template to compile. - _ => within_op && self.is_attr_self(), - } - } - - /// Returns `true` if this is an `Attr` where the `obj` is `"self"`. - pub(crate) fn is_attr_self(&self) -> bool { - match self { - Expr::Attr(obj, _) if matches!(obj.as_ref(), Expr::Var("self")) => true, - Expr::Attr(obj, _) if matches!(obj.as_ref(), Expr::Attr(..)) => obj.is_attr_self(), - _ => false, - } - } -} - -pub(crate) type When<'a> = (Ws, Target<'a>, Vec>); - -#[derive(Debug, PartialEq)] -pub(crate) struct Macro<'a> { - pub(crate) ws1: Ws, - pub(crate) args: Vec<&'a str>, - pub(crate) nodes: Vec>, - pub(crate) ws2: Ws, -} - -#[derive(Debug, PartialEq)] -pub(crate) enum Target<'a> { - Name(&'a str), - Tuple(Vec<&'a str>, Vec>), - Struct(Vec<&'a str>, Vec<(&'a str, Target<'a>)>), - NumLit(&'a str), - StrLit(&'a str), - CharLit(&'a str), - BoolLit(&'a str), - Path(Vec<&'a str>), -} - -#[derive(Clone, Copy, Debug, PartialEq)] -pub(crate) enum Whitespace { - Preserve, - Suppress, - Minimize, -} - -impl From for Whitespace { - fn from(c: char) -> Self { - match c { - '+' => Self::Preserve, - '-' => Self::Suppress, - '~' => Self::Minimize, - _ => panic!("unsupported `Whitespace` conversion"), - } - } -} - -/// First field is "minus/plus sign was used on the left part of the item". -/// -/// Second field is "minus/plus sign was used on the right part of the item". -#[derive(Clone, Copy, Debug, PartialEq)] -pub(crate) struct Ws(pub(crate) Option, pub(crate) Option); - -pub(crate) type Cond<'a> = (Ws, Option>, Vec>); - -#[derive(Debug, PartialEq)] -pub(crate) struct CondTest<'a> { - pub(crate) target: Option>, - pub(crate) expr: Expr<'a>, -} - -fn is_ws(c: char) -> bool { - matches!(c, ' ' | '\t' | '\r' | '\n') -} - -fn not_ws(c: char) -> bool { - !is_ws(c) -} - -fn ws<'a, O>( - inner: impl FnMut(&'a str) -> IResult<&'a str, O>, -) -> impl FnMut(&'a str) -> IResult<&'a str, O> { - delimited(take_till(not_ws), inner, take_till(not_ws)) -} - -fn split_ws_parts(s: &str) -> Node<'_> { - let trimmed_start = s.trim_start_matches(is_ws); - let len_start = s.len() - trimmed_start.len(); - let trimmed = trimmed_start.trim_end_matches(is_ws); - Node::Lit(&s[..len_start], trimmed, &trimmed_start[trimmed.len()..]) -} - -/// Skips input until `end` was found, but does not consume it. -/// Returns tuple that would be returned when parsing `end`. -fn skip_till<'a, O>( - end: impl FnMut(&'a str) -> IResult<&'a str, O>, -) -> impl FnMut(&'a str) -> IResult<&'a str, (&'a str, O)> { - enum Next { - IsEnd(O), - NotEnd(char), - } - let mut next = alt((map(end, Next::IsEnd), map(anychar, Next::NotEnd))); - move |start: &'a str| { - let mut i = start; - loop { - let (j, is_end) = next(i)?; - match is_end { - Next::IsEnd(lookahead) => return Ok((i, (j, lookahead))), - Next::NotEnd(_) => i = j, - } - } - } -} - -struct State<'a> { - syntax: &'a Syntax<'a>, - loop_depth: Cell, -} - -fn take_content<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { - let p_start = alt(( - tag(s.syntax.block_start), - tag(s.syntax.comment_start), - tag(s.syntax.expr_start), - )); - - let (i, _) = not(eof)(i)?; - let (i, content) = opt(recognize(skip_till(p_start)))(i)?; - let (i, content) = match content { - Some("") => { - // {block,comment,expr}_start follows immediately. - return Err(nom::Err::Error(error_position!(i, ErrorKind::TakeUntil))); - } - Some(content) => (i, content), - None => ("", i), // there is no {block,comment,expr}_start: take everything - }; - Ok((i, split_ws_parts(content))) -} - -fn identifier(input: &str) -> IResult<&str, &str> { - recognize(pair(identifier_start, opt(identifier_tail)))(input) -} - -fn identifier_start(s: &str) -> IResult<&str, &str> { - s.split_at_position1_complete( - |c| !(c.is_alpha() || c == '_' || c >= '\u{0080}'), - nom::error::ErrorKind::Alpha, - ) -} - -fn identifier_tail(s: &str) -> IResult<&str, &str> { - s.split_at_position1_complete( - |c| !(c.is_alphanum() || c == '_' || c >= '\u{0080}'), - nom::error::ErrorKind::Alpha, - ) -} - -fn bool_lit(i: &str) -> IResult<&str, &str> { - alt((tag("false"), tag("true")))(i) -} - -fn expr_bool_lit(i: &str) -> IResult<&str, Expr<'_>> { - map(bool_lit, Expr::BoolLit)(i) -} - -fn variant_bool_lit(i: &str) -> IResult<&str, Target<'_>> { - map(bool_lit, Target::BoolLit)(i) -} - -fn num_lit(i: &str) -> IResult<&str, &str> { - recognize(pair(digit1, opt(pair(char('.'), digit1))))(i) -} - -fn expr_num_lit(i: &str) -> IResult<&str, Expr<'_>> { - map(num_lit, Expr::NumLit)(i) -} - -fn expr_array_lit(i: &str) -> IResult<&str, Expr<'_>> { - delimited( - ws(char('[')), - map(separated_list1(ws(char(',')), expr_any), Expr::Array), - ws(char(']')), - )(i) -} - -fn variant_num_lit(i: &str) -> IResult<&str, Target<'_>> { - map(num_lit, Target::NumLit)(i) -} - -fn str_lit(i: &str) -> IResult<&str, &str> { - let (i, s) = delimited( - char('"'), - opt(escaped(is_not("\\\""), '\\', anychar)), - char('"'), - )(i)?; - Ok((i, s.unwrap_or_default())) -} - -fn expr_str_lit(i: &str) -> IResult<&str, Expr<'_>> { - map(str_lit, Expr::StrLit)(i) -} - -fn variant_str_lit(i: &str) -> IResult<&str, Target<'_>> { - map(str_lit, Target::StrLit)(i) -} - -fn char_lit(i: &str) -> IResult<&str, &str> { - let (i, s) = delimited( - char('\''), - opt(escaped(is_not("\\\'"), '\\', anychar)), - char('\''), - )(i)?; - Ok((i, s.unwrap_or_default())) -} - -fn expr_char_lit(i: &str) -> IResult<&str, Expr<'_>> { - map(char_lit, Expr::CharLit)(i) -} - -fn variant_char_lit(i: &str) -> IResult<&str, Target<'_>> { - map(char_lit, Target::CharLit)(i) -} - -fn expr_var(i: &str) -> IResult<&str, Expr<'_>> { - map(identifier, Expr::Var)(i) -} - -fn path(i: &str) -> IResult<&str, Vec<&str>> { - let root = opt(value("", ws(tag("::")))); - let tail = separated_list1(ws(tag("::")), identifier); - - match tuple((root, identifier, ws(tag("::")), tail))(i) { - Ok((i, (root, start, _, rest))) => { - let mut path = Vec::new(); - path.extend(root); - path.push(start); - path.extend(rest); - Ok((i, path)) - } - Err(err) => { - if let Ok((i, name)) = identifier(i) { - // The returned identifier can be assumed to be path if: - // - Contains both a lowercase and uppercase character, i.e. a type name like `None` - // - Doesn't contain any lowercase characters, i.e. it's a constant - // In short, if it contains any uppercase characters it's a path. - if name.contains(char::is_uppercase) { - return Ok((i, vec![name])); - } - } - - // If `identifier()` fails then just return the original error - Err(err) - } - } -} - -fn expr_path(i: &str) -> IResult<&str, Expr<'_>> { - let (i, path) = path(i)?; - Ok((i, Expr::Path(path))) -} - -fn named_target(i: &str) -> IResult<&str, (&str, Target<'_>)> { - let (i, (src, target)) = pair(identifier, opt(preceded(ws(char(':')), target)))(i)?; - Ok((i, (src, target.unwrap_or(Target::Name(src))))) -} - -fn variant_lit(i: &str) -> IResult<&str, Target<'_>> { - alt(( - variant_str_lit, - variant_char_lit, - variant_num_lit, - variant_bool_lit, - ))(i) -} - -fn target(i: &str) -> IResult<&str, Target<'_>> { - let mut opt_opening_paren = map(opt(ws(char('('))), |o| o.is_some()); - let mut opt_closing_paren = map(opt(ws(char(')'))), |o| o.is_some()); - let mut opt_opening_brace = map(opt(ws(char('{'))), |o| o.is_some()); - - let (i, lit) = opt(variant_lit)(i)?; - if let Some(lit) = lit { - return Ok((i, lit)); - } - - // match tuples and unused parentheses - let (i, target_is_tuple) = opt_opening_paren(i)?; - if target_is_tuple { - let (i, is_empty_tuple) = opt_closing_paren(i)?; - if is_empty_tuple { - return Ok((i, Target::Tuple(Vec::new(), Vec::new()))); - } - - let (i, first_target) = target(i)?; - let (i, is_unused_paren) = opt_closing_paren(i)?; - if is_unused_paren { - return Ok((i, first_target)); - } - - let mut targets = vec![first_target]; - let (i, _) = cut(tuple(( - fold_many0( - preceded(ws(char(',')), target), - || (), - |_, target| { - targets.push(target); - }, - ), - opt(ws(char(','))), - ws(cut(char(')'))), - )))(i)?; - return Ok((i, Target::Tuple(Vec::new(), targets))); - } - - // match structs - let (i, path) = opt(path)(i)?; - if let Some(path) = path { - let i_before_matching_with = i; - let (i, _) = opt(ws(tag("with")))(i)?; - - let (i, is_unnamed_struct) = opt_opening_paren(i)?; - if is_unnamed_struct { - let (i, targets) = alt(( - map(char(')'), |_| Vec::new()), - terminated( - cut(separated_list1(ws(char(',')), target)), - pair(opt(ws(char(','))), ws(cut(char(')')))), - ), - ))(i)?; - return Ok((i, Target::Tuple(path, targets))); - } - - let (i, is_named_struct) = opt_opening_brace(i)?; - if is_named_struct { - let (i, targets) = alt(( - map(char('}'), |_| Vec::new()), - terminated( - cut(separated_list1(ws(char(',')), named_target)), - pair(opt(ws(char(','))), ws(cut(char('}')))), - ), - ))(i)?; - return Ok((i, Target::Struct(path, targets))); - } - - return Ok((i_before_matching_with, Target::Path(path))); - } - - // neither literal nor struct nor path - map(identifier, Target::Name)(i) -} - -fn arguments(i: &str) -> IResult<&str, Vec>> { - delimited( - ws(char('(')), - separated_list0(char(','), ws(expr_any)), - ws(char(')')), - )(i) -} - -fn macro_arguments(i: &str) -> IResult<&str, &str> { - delimited(char('('), recognize(nested_parenthesis), char(')'))(i) -} - -fn nested_parenthesis(i: &str) -> IResult<&str, ()> { - let mut nested = 0; - let mut last = 0; - let mut in_str = false; - let mut escaped = false; - - for (i, b) in i.chars().enumerate() { - if !(b == '(' || b == ')') || !in_str { - match b { - '(' => nested += 1, - ')' => { - if nested == 0 { - last = i; - break; - } - nested -= 1; - } - '"' => { - if in_str { - if !escaped { - in_str = false; - } - } else { - in_str = true; - } - } - '\\' => { - escaped = !escaped; - } - _ => (), - } - } - - if escaped && b != '\\' { - escaped = false; - } - } - - if nested == 0 { - Ok((&i[last..], ())) - } else { - Err(nom::Err::Error(error_position!( - i, - ErrorKind::SeparatedNonEmptyList - ))) - } -} - -fn parameters(i: &str) -> IResult<&str, Vec<&str>> { - delimited( - ws(char('(')), - separated_list0(char(','), ws(identifier)), - ws(char(')')), - )(i) -} - -fn expr_group(i: &str) -> IResult<&str, Expr<'_>> { - let (i, expr) = preceded(ws(char('(')), opt(expr_any))(i)?; - let expr = match expr { - Some(expr) => expr, - None => { - let (i, _) = char(')')(i)?; - return Ok((i, Expr::Tuple(vec![]))); - } - }; - - let (i, comma) = ws(opt(peek(char(','))))(i)?; - if comma.is_none() { - let (i, _) = char(')')(i)?; - return Ok((i, Expr::Group(Box::new(expr)))); - } - - let mut exprs = vec![expr]; - let (i, _) = fold_many0( - preceded(char(','), ws(expr_any)), - || (), - |_, expr| { - exprs.push(expr); - }, - )(i)?; - let (i, _) = pair(ws(opt(char(','))), char(')'))(i)?; - Ok((i, Expr::Tuple(exprs))) -} - -fn expr_single(i: &str) -> IResult<&str, Expr<'_>> { - alt(( - expr_bool_lit, - expr_num_lit, - expr_str_lit, - expr_char_lit, - expr_path, - expr_rust_macro, - expr_array_lit, - expr_var, - expr_group, - ))(i) -} - -enum Suffix<'a> { - Attr(&'a str), - Index(Expr<'a>), - Call(Vec>), - Try, -} - -fn expr_attr(i: &str) -> IResult<&str, Suffix<'_>> { - map( - preceded( - ws(pair(char('.'), not(char('.')))), - cut(alt((num_lit, identifier))), - ), - Suffix::Attr, - )(i) -} - -fn expr_index(i: &str) -> IResult<&str, Suffix<'_>> { - map( - preceded(ws(char('[')), cut(terminated(expr_any, ws(char(']'))))), - Suffix::Index, - )(i) -} - -fn expr_call(i: &str) -> IResult<&str, Suffix<'_>> { - map(arguments, Suffix::Call)(i) -} - -fn expr_try(i: &str) -> IResult<&str, Suffix<'_>> { - map(preceded(take_till(not_ws), char('?')), |_| Suffix::Try)(i) -} - -fn filter(i: &str) -> IResult<&str, (&str, Option>>)> { - let (i, (_, fname, args)) = tuple((char('|'), ws(identifier), opt(arguments)))(i)?; - Ok((i, (fname, args))) -} - -fn expr_filtered(i: &str) -> IResult<&str, Expr<'_>> { - let (i, (obj, filters)) = tuple((expr_prefix, many0(filter)))(i)?; - - let mut res = obj; - for (fname, args) in filters { - res = Expr::Filter(fname, { - let mut args = match args { - Some(inner) => inner, - None => Vec::new(), - }; - args.insert(0, res); - args - }); - } - - Ok((i, res)) -} - -fn expr_prefix(i: &str) -> IResult<&str, Expr<'_>> { - let (i, (ops, mut expr)) = pair(many0(ws(alt((tag("!"), tag("-"))))), expr_suffix)(i)?; - for op in ops.iter().rev() { - expr = Expr::Unary(op, Box::new(expr)); - } - Ok((i, expr)) -} - -fn expr_suffix(i: &str) -> IResult<&str, Expr<'_>> { - let (mut i, mut expr) = expr_single(i)?; - loop { - let (j, suffix) = opt(alt((expr_attr, expr_index, expr_call, expr_try)))(i)?; - i = j; - match suffix { - Some(Suffix::Attr(attr)) => expr = Expr::Attr(expr.into(), attr), - Some(Suffix::Index(index)) => expr = Expr::Index(expr.into(), index.into()), - Some(Suffix::Call(args)) => expr = Expr::Call(expr.into(), args), - Some(Suffix::Try) => expr = Expr::Try(expr.into()), - None => break, - } - } - Ok((i, expr)) -} - -fn expr_rust_macro(i: &str) -> IResult<&str, Expr<'_>> { - let (i, (mname, _, args)) = tuple((identifier, char('!'), macro_arguments))(i)?; - Ok((i, Expr::RustMacro(mname, args))) -} - -macro_rules! expr_prec_layer { - ( $name:ident, $inner:ident, $op:expr ) => { - fn $name(i: &str) -> IResult<&str, Expr<'_>> { - let (i, left) = $inner(i)?; - let (i, right) = many0(pair( - ws(tag($op)), - $inner, - ))(i)?; - Ok(( - i, - right.into_iter().fold(left, |left, (op, right)| { - Expr::BinOp(op, Box::new(left), Box::new(right)) - }), - )) - } - }; - ( $name:ident, $inner:ident, $( $op:expr ),+ ) => { - fn $name(i: &str) -> IResult<&str, Expr<'_>> { - let (i, left) = $inner(i)?; - let (i, right) = many0(pair( - ws(alt(($( tag($op) ),+,))), - $inner, - ))(i)?; - Ok(( - i, - right.into_iter().fold(left, |left, (op, right)| { - Expr::BinOp(op, Box::new(left), Box::new(right)) - }), - )) - } - } -} - -expr_prec_layer!(expr_muldivmod, expr_filtered, "*", "/", "%"); -expr_prec_layer!(expr_addsub, expr_muldivmod, "+", "-"); -expr_prec_layer!(expr_shifts, expr_addsub, ">>", "<<"); -expr_prec_layer!(expr_band, expr_shifts, "&"); -expr_prec_layer!(expr_bxor, expr_band, "^"); -expr_prec_layer!(expr_bor, expr_bxor, "|"); -expr_prec_layer!(expr_compare, expr_bor, "==", "!=", ">=", ">", "<=", "<"); -expr_prec_layer!(expr_and, expr_compare, "&&"); -expr_prec_layer!(expr_or, expr_and, "||"); - -fn expr_handle_ws(i: &str) -> IResult<&str, Whitespace> { - alt((char('-'), char('+'), char('~')))(i).map(|(s, r)| (s, Whitespace::from(r))) -} - -fn expr_any(i: &str) -> IResult<&str, Expr<'_>> { - let range_right = |i| pair(ws(alt((tag("..="), tag("..")))), opt(expr_or))(i); - alt(( - map(range_right, |(op, right)| { - Expr::Range(op, None, right.map(Box::new)) - }), - map( - pair(expr_or, opt(range_right)), - |(left, right)| match right { - Some((op, right)) => Expr::Range(op, Some(Box::new(left)), right.map(Box::new)), - None => left, - }, - ), - ))(i) -} - -fn expr_node<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { - let mut p = tuple(( - |i| tag_expr_start(i, s), - cut(tuple(( - opt(expr_handle_ws), - ws(expr_any), - opt(expr_handle_ws), - |i| tag_expr_end(i, s), - ))), - )); - let (i, (_, (pws, expr, nws, _))) = p(i)?; - Ok((i, Node::Expr(Ws(pws, nws), expr))) -} - -fn block_call(i: &str) -> IResult<&str, Node<'_>> { - let mut p = tuple(( - opt(expr_handle_ws), - ws(tag("call")), - cut(tuple(( - opt(tuple((ws(identifier), ws(tag("::"))))), - ws(identifier), - ws(arguments), - opt(expr_handle_ws), - ))), - )); - let (i, (pws, _, (scope, name, args, nws))) = p(i)?; - let scope = scope.map(|(scope, _)| scope); - Ok((i, Node::Call(Ws(pws, nws), scope, name, args))) -} - -fn cond_if(i: &str) -> IResult<&str, CondTest<'_>> { - let mut p = preceded( - ws(tag("if")), - cut(tuple(( - opt(delimited( - ws(alt((tag("let"), tag("set")))), - ws(target), - ws(char('=')), - )), - ws(expr_any), - ))), - ); - let (i, (target, expr)) = p(i)?; - Ok((i, CondTest { target, expr })) -} - -fn cond_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Cond<'a>> { - let mut p = tuple(( - |i| tag_block_start(i, s), - opt(expr_handle_ws), - ws(tag("else")), - cut(tuple(( - opt(cond_if), - opt(expr_handle_ws), - |i| tag_block_end(i, s), - cut(|i| parse_template(i, s)), - ))), - )); - let (i, (_, pws, _, (cond, nws, _, block))) = p(i)?; - Ok((i, (Ws(pws, nws), cond, block))) -} - -fn block_if<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { - let mut p = tuple(( - opt(expr_handle_ws), - cond_if, - cut(tuple(( - opt(expr_handle_ws), - |i| tag_block_end(i, s), - cut(tuple(( - |i| parse_template(i, s), - many0(|i| cond_block(i, s)), - cut(tuple(( - |i| tag_block_start(i, s), - opt(expr_handle_ws), - ws(tag("endif")), - opt(expr_handle_ws), - ))), - ))), - ))), - )); - let (i, (pws1, cond, (nws1, _, (block, elifs, (_, pws2, _, nws2))))) = p(i)?; - - let mut res = vec![(Ws(pws1, nws1), Some(cond), block)]; - res.extend(elifs); - Ok((i, Node::Cond(res, Ws(pws2, nws2)))) -} - -fn match_else_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, When<'a>> { - let mut p = tuple(( - |i| tag_block_start(i, s), - opt(expr_handle_ws), - ws(tag("else")), - cut(tuple(( - opt(expr_handle_ws), - |i| tag_block_end(i, s), - cut(|i| parse_template(i, s)), - ))), - )); - let (i, (_, pws, _, (nws, _, block))) = p(i)?; - Ok((i, (Ws(pws, nws), Target::Name("_"), block))) -} - -fn when_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, When<'a>> { - let mut p = tuple(( - |i| tag_block_start(i, s), - opt(expr_handle_ws), - ws(tag("when")), - cut(tuple(( - ws(target), - opt(expr_handle_ws), - |i| tag_block_end(i, s), - cut(|i| parse_template(i, s)), - ))), - )); - let (i, (_, pws, _, (target, nws, _, block))) = p(i)?; - Ok((i, (Ws(pws, nws), target, block))) -} - -fn block_match<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { - let mut p = tuple(( - opt(expr_handle_ws), - ws(tag("match")), - cut(tuple(( - ws(expr_any), - opt(expr_handle_ws), - |i| tag_block_end(i, s), - cut(tuple(( - ws(many0(ws(value((), |i| block_comment(i, s))))), - many1(|i| when_block(i, s)), - cut(tuple(( - opt(|i| match_else_block(i, s)), - cut(tuple(( - ws(|i| tag_block_start(i, s)), - opt(expr_handle_ws), - ws(tag("endmatch")), - opt(expr_handle_ws), - ))), - ))), - ))), - ))), - )); - let (i, (pws1, _, (expr, nws1, _, (_, arms, (else_arm, (_, pws2, _, nws2)))))) = p(i)?; - - let mut arms = arms; - if let Some(arm) = else_arm { - arms.push(arm); - } - - Ok((i, Node::Match(Ws(pws1, nws1), expr, arms, Ws(pws2, nws2)))) -} - -fn block_let(i: &str) -> IResult<&str, Node<'_>> { - let mut p = tuple(( - opt(expr_handle_ws), - ws(alt((tag("let"), tag("set")))), - cut(tuple(( - ws(target), - opt(tuple((ws(char('=')), ws(expr_any)))), - opt(expr_handle_ws), - ))), - )); - let (i, (pws, _, (var, val, nws))) = p(i)?; - - Ok(( - i, - if let Some((_, val)) = val { - Node::Let(Ws(pws, nws), var, val) - } else { - Node::LetDecl(Ws(pws, nws), var) - }, - )) -} - -fn parse_loop_content<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec>> { - s.loop_depth.set(s.loop_depth.get() + 1); - let result = parse_template(i, s); - s.loop_depth.set(s.loop_depth.get() - 1); - result -} - -fn block_for<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { - let if_cond = preceded(ws(tag("if")), cut(ws(expr_any))); - let else_block = |i| { - let mut p = preceded( - ws(tag("else")), - cut(tuple(( - opt(expr_handle_ws), - delimited( - |i| tag_block_end(i, s), - |i| parse_template(i, s), - |i| tag_block_start(i, s), - ), - opt(expr_handle_ws), - ))), - ); - let (i, (pws, nodes, nws)) = p(i)?; - Ok((i, (pws, nodes, nws))) - }; - let mut p = tuple(( - opt(expr_handle_ws), - ws(tag("for")), - cut(tuple(( - ws(target), - ws(tag("in")), - cut(tuple(( - ws(expr_any), - opt(if_cond), - opt(expr_handle_ws), - |i| tag_block_end(i, s), - cut(tuple(( - |i| parse_loop_content(i, s), - cut(tuple(( - |i| tag_block_start(i, s), - opt(expr_handle_ws), - opt(else_block), - ws(tag("endfor")), - opt(expr_handle_ws), - ))), - ))), - ))), - ))), - )); - let (i, (pws1, _, (var, _, (iter, cond, nws1, _, (body, (_, pws2, else_block, _, nws2)))))) = - p(i)?; - let (nws3, else_block, pws3) = else_block.unwrap_or_default(); - Ok(( - i, - Node::Loop(Loop { - ws1: Ws(pws1, nws1), - var, - iter, - cond, - body, - ws2: Ws(pws2, nws3), - else_block, - ws3: Ws(pws3, nws2), - }), - )) -} - -fn block_extends(i: &str) -> IResult<&str, Node<'_>> { - let (i, (_, name)) = tuple((ws(tag("extends")), ws(expr_str_lit)))(i)?; - Ok((i, Node::Extends(name))) -} - -fn block_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { - let mut start = tuple(( - opt(expr_handle_ws), - ws(tag("block")), - cut(tuple((ws(identifier), opt(expr_handle_ws), |i| { - tag_block_end(i, s) - }))), - )); - let (i, (pws1, _, (name, nws1, _))) = start(i)?; - - let mut end = cut(tuple(( - |i| parse_template(i, s), - cut(tuple(( - |i| tag_block_start(i, s), - opt(expr_handle_ws), - ws(tag("endblock")), - cut(tuple((opt(ws(tag(name))), opt(expr_handle_ws)))), - ))), - ))); - let (i, (contents, (_, pws2, _, (_, nws2)))) = end(i)?; - - Ok(( - i, - Node::BlockDef(Ws(pws1, nws1), name, contents, Ws(pws2, nws2)), - )) -} - -fn block_include(i: &str) -> IResult<&str, Node<'_>> { - let mut p = tuple(( - opt(expr_handle_ws), - ws(tag("include")), - cut(pair(ws(str_lit), opt(expr_handle_ws))), - )); - let (i, (pws, _, (name, nws))) = p(i)?; - Ok((i, Node::Include(Ws(pws, nws), name))) -} - -fn block_import(i: &str) -> IResult<&str, Node<'_>> { - let mut p = tuple(( - opt(expr_handle_ws), - ws(tag("import")), - cut(tuple(( - ws(str_lit), - ws(tag("as")), - cut(pair(ws(identifier), opt(expr_handle_ws))), - ))), - )); - let (i, (pws, _, (name, _, (scope, nws)))) = p(i)?; - Ok((i, Node::Import(Ws(pws, nws), name, scope))) -} - -fn block_macro<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { - let mut start = tuple(( - opt(expr_handle_ws), - ws(tag("macro")), - cut(tuple(( - ws(identifier), - ws(parameters), - opt(expr_handle_ws), - |i| tag_block_end(i, s), - ))), - )); - let (i, (pws1, _, (name, params, nws1, _))) = start(i)?; - - let mut end = cut(tuple(( - |i| parse_template(i, s), - cut(tuple(( - |i| tag_block_start(i, s), - opt(expr_handle_ws), - ws(tag("endmacro")), - cut(tuple((opt(ws(tag(name))), opt(expr_handle_ws)))), - ))), - ))); - let (i, (contents, (_, pws2, _, (_, nws2)))) = end(i)?; - - assert_ne!(name, "super", "invalid macro name 'super'"); - - Ok(( - i, - Node::Macro( - name, - Macro { - ws1: Ws(pws1, nws1), - args: params, - nodes: contents, - ws2: Ws(pws2, nws2), - }, - ), - )) -} - -fn block_raw<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { - let endraw = tuple(( - |i| tag_block_start(i, s), - opt(expr_handle_ws), - ws(tag("endraw")), - opt(expr_handle_ws), - peek(|i| tag_block_end(i, s)), - )); - - let mut p = tuple(( - opt(expr_handle_ws), - ws(tag("raw")), - cut(tuple(( - opt(expr_handle_ws), - |i| tag_block_end(i, s), - consumed(skip_till(endraw)), - ))), - )); - - let (_, (pws1, _, (nws1, _, (contents, (i, (_, pws2, _, nws2, _)))))) = p(i)?; - let (lws, val, rws) = match split_ws_parts(contents) { - Node::Lit(lws, val, rws) => (lws, val, rws), - _ => unreachable!(), - }; - let ws1 = Ws(pws1, nws1); - let ws2 = Ws(pws2, nws2); - Ok((i, Node::Raw(ws1, lws, val, rws, ws2))) -} - -fn break_statement<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { - let mut p = tuple((opt(expr_handle_ws), ws(tag("break")), opt(expr_handle_ws))); - let (j, (pws, _, nws)) = p(i)?; - if s.loop_depth.get() == 0 { - return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag))); - } - Ok((j, Node::Break(Ws(pws, nws)))) -} - -fn continue_statement<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { - let mut p = tuple(( - opt(expr_handle_ws), - ws(tag("continue")), - opt(expr_handle_ws), - )); - let (j, (pws, _, nws)) = p(i)?; - if s.loop_depth.get() == 0 { - return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag))); - } - Ok((j, Node::Continue(Ws(pws, nws)))) -} - -fn block_node<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { - let mut p = tuple(( - |i| tag_block_start(i, s), - alt(( - block_call, - block_let, - |i| block_if(i, s), - |i| block_for(i, s), - |i| block_match(i, s), - block_extends, - block_include, - block_import, - |i| block_block(i, s), - |i| block_macro(i, s), - |i| block_raw(i, s), - |i| break_statement(i, s), - |i| continue_statement(i, s), - )), - cut(|i| tag_block_end(i, s)), - )); - let (i, (_, contents, _)) = p(i)?; - Ok((i, contents)) -} - -fn block_comment_body<'a>(mut i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { - let mut level = 0; - loop { - let (end, tail) = take_until(s.syntax.comment_end)(i)?; - match take_until::<_, _, Error<_>>(s.syntax.comment_start)(i) { - Ok((start, _)) if start.as_ptr() < end.as_ptr() => { - level += 1; - i = &start[2..]; - } - _ if level > 0 => { - level -= 1; - i = &end[2..]; - } - _ => return Ok((end, tail)), - } - } -} - -fn block_comment<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> { - let mut p = tuple(( - |i| tag_comment_start(i, s), - cut(tuple(( - opt(expr_handle_ws), - |i| block_comment_body(i, s), - |i| tag_comment_end(i, s), - ))), - )); - let (i, (_, (pws, tail, _))) = p(i)?; - let nws = if tail.ends_with('-') { - Some(Whitespace::Suppress) - } else if tail.ends_with('+') { - Some(Whitespace::Preserve) - } else if tail.ends_with('~') { - Some(Whitespace::Minimize) - } else { - None - }; - Ok((i, Node::Comment(Ws(pws, nws)))) -} - -fn parse_template<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec>> { - many0(alt(( - complete(|i| take_content(i, s)), - complete(|i| block_comment(i, s)), - complete(|i| expr_node(i, s)), - complete(|i| block_node(i, s)), - )))(i) -} - -fn tag_block_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { - tag(s.syntax.block_start)(i) -} -fn tag_block_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { - tag(s.syntax.block_end)(i) -} -fn tag_comment_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { - tag(s.syntax.comment_start)(i) -} -fn tag_comment_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { - tag(s.syntax.comment_end)(i) -} -fn tag_expr_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { - tag(s.syntax.expr_start)(i) -} -fn tag_expr_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> { - tag(s.syntax.expr_end)(i) -} - -pub(crate) fn parse<'a>( - src: &'a str, - syntax: &'a Syntax<'a>, -) -> Result>, CompileError> { - let state = State { - syntax, - loop_depth: Cell::new(0), - }; - match parse_template(src, &state) { - Ok((left, res)) => { - if !left.is_empty() { - Err(format!("unable to parse template:\n\n{:?}", left).into()) - } else { - Ok(res) - } - } - - Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => { - let nom::error::Error { input, .. } = err; - let offset = src.len() - input.len(); - let (source_before, source_after) = src.split_at(offset); - - let source_after = match source_after.char_indices().enumerate().take(41).last() { - Some((40, (i, _))) => format!("{:?}...", &source_after[..i]), - _ => format!("{:?}", source_after), - }; - - let (row, last_line) = source_before.lines().enumerate().last().unwrap(); - let column = last_line.chars().count(); - - let msg = format!( - "problems parsing template source at row {}, column {} near:\n{}", - row + 1, - column, - source_after, - ); - Err(msg.into()) - } - - Err(nom::Err::Incomplete(_)) => Err("parsing incomplete".into()), - } -} - -#[cfg(test)] -mod tests { - use super::{Expr, Node, Whitespace, Ws}; - use crate::config::Syntax; - - fn check_ws_split(s: &str, res: &(&str, &str, &str)) { - match super::split_ws_parts(s) { - Node::Lit(lws, s, rws) => { - assert_eq!(lws, res.0); - assert_eq!(s, res.1); - assert_eq!(rws, res.2); - } - _ => { - panic!("fail"); - } - } - } - - #[test] - fn test_ws_splitter() { - check_ws_split("", &("", "", "")); - check_ws_split("a", &("", "a", "")); - check_ws_split("\ta", &("\t", "a", "")); - check_ws_split("b\n", &("", "b", "\n")); - check_ws_split(" \t\r\n", &(" \t\r\n", "", "")); - } - - #[test] - #[should_panic] - fn test_invalid_block() { - super::parse("{% extend \"blah\" %}", &Syntax::default()).unwrap(); - } - - #[test] - fn test_parse_filter() { - use Expr::*; - let syntax = Syntax::default(); - assert_eq!( - super::parse("{{ strvar|e }}", &syntax).unwrap(), - vec![Node::Expr(Ws(None, None), Filter("e", vec![Var("strvar")]),)], - ); - assert_eq!( - super::parse("{{ 2|abs }}", &syntax).unwrap(), - vec![Node::Expr(Ws(None, None), Filter("abs", vec![NumLit("2")]),)], - ); - assert_eq!( - super::parse("{{ -2|abs }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - Filter("abs", vec![Unary("-", NumLit("2").into())]), - )], - ); - assert_eq!( - super::parse("{{ (1 - 2)|abs }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - Filter( - "abs", - vec![Group( - BinOp("-", NumLit("1").into(), NumLit("2").into()).into() - )] - ), - )], - ); - } - - #[test] - fn test_parse_numbers() { - let syntax = Syntax::default(); - assert_eq!( - super::parse("{{ 2 }}", &syntax).unwrap(), - vec![Node::Expr(Ws(None, None), Expr::NumLit("2"),)], - ); - assert_eq!( - super::parse("{{ 2.5 }}", &syntax).unwrap(), - vec![Node::Expr(Ws(None, None), Expr::NumLit("2.5"),)], - ); - } - - #[test] - fn test_parse_var() { - let s = Syntax::default(); - - assert_eq!( - super::parse("{{ foo }}", &s).unwrap(), - vec![Node::Expr(Ws(None, None), Expr::Var("foo"))], - ); - assert_eq!( - super::parse("{{ foo_bar }}", &s).unwrap(), - vec![Node::Expr(Ws(None, None), Expr::Var("foo_bar"))], - ); - - assert_eq!( - super::parse("{{ none }}", &s).unwrap(), - vec![Node::Expr(Ws(None, None), Expr::Var("none"))], - ); - } - - #[test] - fn test_parse_const() { - let s = Syntax::default(); - - assert_eq!( - super::parse("{{ FOO }}", &s).unwrap(), - vec![Node::Expr(Ws(None, None), Expr::Path(vec!["FOO"]))], - ); - assert_eq!( - super::parse("{{ FOO_BAR }}", &s).unwrap(), - vec![Node::Expr(Ws(None, None), Expr::Path(vec!["FOO_BAR"]))], - ); - - assert_eq!( - super::parse("{{ NONE }}", &s).unwrap(), - vec![Node::Expr(Ws(None, None), Expr::Path(vec!["NONE"]))], - ); - } - - #[test] - fn test_parse_path() { - let s = Syntax::default(); - - assert_eq!( - super::parse("{{ None }}", &s).unwrap(), - vec![Node::Expr(Ws(None, None), Expr::Path(vec!["None"]))], - ); - assert_eq!( - super::parse("{{ Some(123) }}", &s).unwrap(), - vec![Node::Expr( - Ws(None, None), - Expr::Call( - Box::new(Expr::Path(vec!["Some"])), - vec![Expr::NumLit("123")] - ), - )], - ); - - assert_eq!( - super::parse("{{ Ok(123) }}", &s).unwrap(), - vec![Node::Expr( - Ws(None, None), - Expr::Call(Box::new(Expr::Path(vec!["Ok"])), vec![Expr::NumLit("123")]), - )], - ); - assert_eq!( - super::parse("{{ Err(123) }}", &s).unwrap(), - vec![Node::Expr( - Ws(None, None), - Expr::Call(Box::new(Expr::Path(vec!["Err"])), vec![Expr::NumLit("123")]), - )], - ); - } - - #[test] - fn test_parse_var_call() { - assert_eq!( - super::parse("{{ function(\"123\", 3) }}", &Syntax::default()).unwrap(), - vec![Node::Expr( - Ws(None, None), - Expr::Call( - Box::new(Expr::Var("function")), - vec![Expr::StrLit("123"), Expr::NumLit("3")] - ), - )], - ); - } - - #[test] - fn test_parse_path_call() { - let s = Syntax::default(); - - assert_eq!( - super::parse("{{ Option::None }}", &s).unwrap(), - vec![Node::Expr( - Ws(None, None), - Expr::Path(vec!["Option", "None"]) - )], - ); - assert_eq!( - super::parse("{{ Option::Some(123) }}", &s).unwrap(), - vec![Node::Expr( - Ws(None, None), - Expr::Call( - Box::new(Expr::Path(vec!["Option", "Some"])), - vec![Expr::NumLit("123")], - ), - )], - ); - - assert_eq!( - super::parse("{{ self::function(\"123\", 3) }}", &s).unwrap(), - vec![Node::Expr( - Ws(None, None), - Expr::Call( - Box::new(Expr::Path(vec!["self", "function"])), - vec![Expr::StrLit("123"), Expr::NumLit("3")], - ), - )], - ); - } - - #[test] - fn test_parse_root_path() { - let syntax = Syntax::default(); - assert_eq!( - super::parse("{{ std::string::String::new() }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - Expr::Call( - Box::new(Expr::Path(vec!["std", "string", "String", "new"])), - vec![] - ), - )], - ); - assert_eq!( - super::parse("{{ ::std::string::String::new() }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - Expr::Call( - Box::new(Expr::Path(vec!["", "std", "string", "String", "new"])), - vec![] - ), - )], - ); - } - - #[test] - fn change_delimiters_parse_filter() { - let syntax = Syntax { - expr_start: "{=", - expr_end: "=}", - ..Syntax::default() - }; - - super::parse("{= strvar|e =}", &syntax).unwrap(); - } - - #[test] - fn test_precedence() { - use Expr::*; - let syntax = Syntax::default(); - assert_eq!( - super::parse("{{ a + b == c }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - BinOp( - "==", - BinOp("+", Var("a").into(), Var("b").into()).into(), - Var("c").into(), - ) - )], - ); - assert_eq!( - super::parse("{{ a + b * c - d / e }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - BinOp( - "-", - BinOp( - "+", - Var("a").into(), - BinOp("*", Var("b").into(), Var("c").into()).into(), - ) - .into(), - BinOp("/", Var("d").into(), Var("e").into()).into(), - ) - )], - ); - assert_eq!( - super::parse("{{ a * (b + c) / -d }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - BinOp( - "/", - BinOp( - "*", - Var("a").into(), - Group(BinOp("+", Var("b").into(), Var("c").into()).into()).into() - ) - .into(), - Unary("-", Var("d").into()).into() - ) - )], - ); - assert_eq!( - super::parse("{{ a || b && c || d && e }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - BinOp( - "||", - BinOp( - "||", - Var("a").into(), - BinOp("&&", Var("b").into(), Var("c").into()).into(), - ) - .into(), - BinOp("&&", Var("d").into(), Var("e").into()).into(), - ) - )], - ); - } - - #[test] - fn test_associativity() { - use Expr::*; - let syntax = Syntax::default(); - assert_eq!( - super::parse("{{ a + b + c }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - BinOp( - "+", - BinOp("+", Var("a").into(), Var("b").into()).into(), - Var("c").into() - ) - )], - ); - assert_eq!( - super::parse("{{ a * b * c }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - BinOp( - "*", - BinOp("*", Var("a").into(), Var("b").into()).into(), - Var("c").into() - ) - )], - ); - assert_eq!( - super::parse("{{ a && b && c }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - BinOp( - "&&", - BinOp("&&", Var("a").into(), Var("b").into()).into(), - Var("c").into() - ) - )], - ); - assert_eq!( - super::parse("{{ a + b - c + d }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - BinOp( - "+", - BinOp( - "-", - BinOp("+", Var("a").into(), Var("b").into()).into(), - Var("c").into() - ) - .into(), - Var("d").into() - ) - )], - ); - assert_eq!( - super::parse("{{ a == b != c > d > e == f }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - BinOp( - "==", - BinOp( - ">", - BinOp( - ">", - BinOp( - "!=", - BinOp("==", Var("a").into(), Var("b").into()).into(), - Var("c").into() - ) - .into(), - Var("d").into() - ) - .into(), - Var("e").into() - ) - .into(), - Var("f").into() - ) - )], - ); - } - - #[test] - fn test_odd_calls() { - use Expr::*; - let syntax = Syntax::default(); - assert_eq!( - super::parse("{{ a[b](c) }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - Call( - Box::new(Index(Box::new(Var("a")), Box::new(Var("b")))), - vec![Var("c")], - ), - )], - ); - assert_eq!( - super::parse("{{ (a + b)(c) }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - Call( - Box::new(Group(Box::new(BinOp( - "+", - Box::new(Var("a")), - Box::new(Var("b")) - )))), - vec![Var("c")], - ), - )], - ); - assert_eq!( - super::parse("{{ a + b(c) }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - BinOp( - "+", - Box::new(Var("a")), - Box::new(Call(Box::new(Var("b")), vec![Var("c")])), - ), - )], - ); - assert_eq!( - super::parse("{{ (-a)(b) }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - Call( - Box::new(Group(Box::new(Unary("-", Box::new(Var("a")))))), - vec![Var("b")], - ), - )], - ); - assert_eq!( - super::parse("{{ -a(b) }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - Unary("-", Box::new(Call(Box::new(Var("a")), vec![Var("b")])),), - )], - ); - } - - #[test] - fn test_parse_comments() { - let s = &Syntax::default(); - - assert_eq!( - super::parse("{##}", s).unwrap(), - vec![Node::Comment(Ws(None, None))], - ); - assert_eq!( - super::parse("{#- #}", s).unwrap(), - vec![Node::Comment(Ws(Some(Whitespace::Suppress), None))], - ); - assert_eq!( - super::parse("{# -#}", s).unwrap(), - vec![Node::Comment(Ws(None, Some(Whitespace::Suppress)))], - ); - assert_eq!( - super::parse("{#--#}", s).unwrap(), - vec![Node::Comment(Ws( - Some(Whitespace::Suppress), - Some(Whitespace::Suppress) - ))], - ); - assert_eq!( - super::parse("{#- foo\n bar -#}", s).unwrap(), - vec![Node::Comment(Ws( - Some(Whitespace::Suppress), - Some(Whitespace::Suppress) - ))], - ); - assert_eq!( - super::parse("{#- foo\n {#- bar\n -#} baz -#}", s).unwrap(), - vec![Node::Comment(Ws( - Some(Whitespace::Suppress), - Some(Whitespace::Suppress) - ))], - ); - assert_eq!( - super::parse("{#+ #}", s).unwrap(), - vec![Node::Comment(Ws(Some(Whitespace::Preserve), None))], - ); - assert_eq!( - super::parse("{# +#}", s).unwrap(), - vec![Node::Comment(Ws(None, Some(Whitespace::Preserve)))], - ); - assert_eq!( - super::parse("{#++#}", s).unwrap(), - vec![Node::Comment(Ws( - Some(Whitespace::Preserve), - Some(Whitespace::Preserve) - ))], - ); - assert_eq!( - super::parse("{#+ foo\n bar +#}", s).unwrap(), - vec![Node::Comment(Ws( - Some(Whitespace::Preserve), - Some(Whitespace::Preserve) - ))], - ); - assert_eq!( - super::parse("{#+ foo\n {#+ bar\n +#} baz -+#}", s).unwrap(), - vec![Node::Comment(Ws( - Some(Whitespace::Preserve), - Some(Whitespace::Preserve) - ))], - ); - assert_eq!( - super::parse("{#~ #}", s).unwrap(), - vec![Node::Comment(Ws(Some(Whitespace::Minimize), None))], - ); - assert_eq!( - super::parse("{# ~#}", s).unwrap(), - vec![Node::Comment(Ws(None, Some(Whitespace::Minimize)))], - ); - assert_eq!( - super::parse("{#~~#}", s).unwrap(), - vec![Node::Comment(Ws( - Some(Whitespace::Minimize), - Some(Whitespace::Minimize) - ))], - ); - assert_eq!( - super::parse("{#~ foo\n bar ~#}", s).unwrap(), - vec![Node::Comment(Ws( - Some(Whitespace::Minimize), - Some(Whitespace::Minimize) - ))], - ); - assert_eq!( - super::parse("{#~ foo\n {#~ bar\n ~#} baz -~#}", s).unwrap(), - vec![Node::Comment(Ws( - Some(Whitespace::Minimize), - Some(Whitespace::Minimize) - ))], - ); - - assert_eq!( - super::parse("{# foo {# bar #} {# {# baz #} qux #} #}", s).unwrap(), - vec![Node::Comment(Ws(None, None))], - ); - } - - #[test] - fn test_parse_tuple() { - use super::Expr::*; - let syntax = Syntax::default(); - assert_eq!( - super::parse("{{ () }}", &syntax).unwrap(), - vec![Node::Expr(Ws(None, None), Tuple(vec![]),)], - ); - assert_eq!( - super::parse("{{ (1) }}", &syntax).unwrap(), - vec![Node::Expr(Ws(None, None), Group(Box::new(NumLit("1"))),)], - ); - assert_eq!( - super::parse("{{ (1,) }}", &syntax).unwrap(), - vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)], - ); - assert_eq!( - super::parse("{{ (1, ) }}", &syntax).unwrap(), - vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)], - ); - assert_eq!( - super::parse("{{ (1 ,) }}", &syntax).unwrap(), - vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)], - ); - assert_eq!( - super::parse("{{ (1 , ) }}", &syntax).unwrap(), - vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)], - ); - assert_eq!( - super::parse("{{ (1, 2) }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - Tuple(vec![NumLit("1"), NumLit("2")]), - )], - ); - assert_eq!( - super::parse("{{ (1, 2,) }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - Tuple(vec![NumLit("1"), NumLit("2")]), - )], - ); - assert_eq!( - super::parse("{{ (1, 2, 3) }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - Tuple(vec![NumLit("1"), NumLit("2"), NumLit("3")]), - )], - ); - assert_eq!( - super::parse("{{ ()|abs }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - Filter("abs", vec![Tuple(vec![])]), - )], - ); - assert_eq!( - super::parse("{{ () | abs }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - BinOp("|", Box::new(Tuple(vec![])), Box::new(Var("abs"))), - )], - ); - assert_eq!( - super::parse("{{ (1)|abs }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - Filter("abs", vec![Group(Box::new(NumLit("1")))]), - )], - ); - assert_eq!( - super::parse("{{ (1) | abs }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - BinOp( - "|", - Box::new(Group(Box::new(NumLit("1")))), - Box::new(Var("abs")) - ), - )], - ); - assert_eq!( - super::parse("{{ (1,)|abs }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - Filter("abs", vec![Tuple(vec![NumLit("1")])]), - )], - ); - assert_eq!( - super::parse("{{ (1,) | abs }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - BinOp( - "|", - Box::new(Tuple(vec![NumLit("1")])), - Box::new(Var("abs")) - ), - )], - ); - assert_eq!( - super::parse("{{ (1, 2)|abs }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - Filter("abs", vec![Tuple(vec![NumLit("1"), NumLit("2")])]), - )], - ); - assert_eq!( - super::parse("{{ (1, 2) | abs }}", &syntax).unwrap(), - vec![Node::Expr( - Ws(None, None), - BinOp( - "|", - Box::new(Tuple(vec![NumLit("1"), NumLit("2")])), - Box::new(Var("abs")) - ), - )], - ); - } -} diff --git a/askama_shared/templates/a.html b/askama_shared/templates/a.html deleted file mode 100644 index 257cc56..0000000 --- a/askama_shared/templates/a.html +++ /dev/null @@ -1 +0,0 @@ -foo diff --git a/askama_shared/templates/b.html b/askama_shared/templates/b.html deleted file mode 100644 index 5716ca5..0000000 --- a/askama_shared/templates/b.html +++ /dev/null @@ -1 +0,0 @@ -bar diff --git a/askama_shared/templates/sub/b.html b/askama_shared/templates/sub/b.html deleted file mode 100644 index 5716ca5..0000000 --- a/askama_shared/templates/sub/b.html +++ /dev/null @@ -1 +0,0 @@ -bar diff --git a/askama_shared/templates/sub/c.html b/askama_shared/templates/sub/c.html deleted file mode 100644 index 7601807..0000000 --- a/askama_shared/templates/sub/c.html +++ /dev/null @@ -1 +0,0 @@ -baz diff --git a/askama_shared/templates/sub/sub1/d.html b/askama_shared/templates/sub/sub1/d.html deleted file mode 100644 index fa11a6a..0000000 --- a/askama_shared/templates/sub/sub1/d.html +++ /dev/null @@ -1 +0,0 @@ -echo -- cgit