From 8ffed1822bcbc1b6ce6647b840fb03996b0635ea Mon Sep 17 00:00:00 2001 From: Titus Wormer Date: Tue, 2 Aug 2022 14:27:31 +0200 Subject: Refactor to remove most closures --- src/construct/html_flow.rs | 419 +++++++++++++++++++++++---------------------- 1 file changed, 210 insertions(+), 209 deletions(-) (limited to 'src/construct/html_flow.rs') diff --git a/src/construct/html_flow.rs b/src/construct/html_flow.rs index 064da35..aaa803d 100644 --- a/src/construct/html_flow.rs +++ b/src/construct/html_flow.rs @@ -1,4 +1,4 @@ -//! HTML (flow) is a construct that occurs in the [flow][] content type. +//! HTML (flow) is a construct that occurs in the [flow][] cont&ent type. //! //! It forms with the following BNF: //! @@ -110,37 +110,20 @@ use crate::token::Token; use crate::tokenizer::{State, Tokenizer}; use crate::util::slice::Slice; -/// Kind of HTML (flow). -#[derive(Debug, PartialEq)] -enum Kind { - /// Symbol for ` /// ^ /// ``` -fn continuation_raw_tag_open(tokenizer: &mut Tokenizer, mut info: Info) -> State { +fn continuation_raw_tag_open(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { Some(b'/') => { tokenizer.consume(); - info.start = tokenizer.point.index; - State::Fn(Box::new(|t| continuation_raw_end_tag(t, info))) + tokenizer.tokenize_state.start = tokenizer.point.index; + State::Fn(Box::new(continuation_raw_end_tag)) } - _ => continuation(tokenizer, info), + _ => continuation(tokenizer), } } @@ -764,35 +764,35 @@ fn continuation_raw_tag_open(tokenizer: &mut Tokenizer, mut info: Info) -> State /// > | /// ^^^^^^ /// ``` -fn continuation_raw_end_tag(tokenizer: &mut Tokenizer, mut info: Info) -> State { +fn continuation_raw_end_tag(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { Some(b'>') => { // Guaranteed to be valid ASCII bytes. let slice = Slice::from_indices( tokenizer.parse_state.bytes, - info.start, + tokenizer.tokenize_state.start, tokenizer.point.index, ); let name = slice.as_str().to_ascii_lowercase(); - info.start = 0; + tokenizer.tokenize_state.start = 0; if HTML_RAW_NAMES.contains(&name.as_str()) { tokenizer.consume(); - State::Fn(Box::new(|t| continuation_close(t, info))) + State::Fn(Box::new(continuation_close)) } else { - continuation(tokenizer, info) + continuation(tokenizer) } } Some(b'A'..=b'Z' | b'a'..=b'z') - if tokenizer.point.index - info.start < HTML_RAW_SIZE_MAX => + if tokenizer.point.index - tokenizer.tokenize_state.start < HTML_RAW_SIZE_MAX => { tokenizer.consume(); - State::Fn(Box::new(|t| continuation_raw_end_tag(t, info))) + State::Fn(Box::new(continuation_raw_end_tag)) } _ => { - info.start = 0; - continuation(tokenizer, info) + tokenizer.tokenize_state.start = 0; + continuation(tokenizer) } } } @@ -803,13 +803,13 @@ fn continuation_raw_end_tag(tokenizer: &mut Tokenizer, mut info: Info) -> State /// > | &<]]> /// ^ /// ``` -fn continuation_character_data_inside(tokenizer: &mut Tokenizer, info: Info) -> State { +fn continuation_character_data_inside(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { Some(b']') => { tokenizer.consume(); - State::Fn(Box::new(|t| continuation_declaration_inside(t, info))) + State::Fn(Box::new(continuation_declaration_inside)) } - _ => continuation(tokenizer, info), + _ => continuation(tokenizer), } } @@ -827,17 +827,17 @@ fn continuation_character_data_inside(tokenizer: &mut Tokenizer, info: Info) -> /// > | &<]]> /// ^ /// ``` -fn continuation_declaration_inside(tokenizer: &mut Tokenizer, info: Info) -> State { +fn continuation_declaration_inside(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { Some(b'>') => { tokenizer.consume(); - State::Fn(Box::new(|t| continuation_close(t, info))) + State::Fn(Box::new(continuation_close)) } - Some(b'-') if info.kind == Kind::Comment => { + Some(b'-') if tokenizer.tokenize_state.marker == COMMENT => { tokenizer.consume(); - State::Fn(Box::new(|t| continuation_declaration_inside(t, info))) + State::Fn(Box::new(continuation_declaration_inside)) } - _ => continuation(tokenizer, info), + _ => continuation(tokenizer), } } @@ -847,7 +847,7 @@ fn continuation_declaration_inside(tokenizer: &mut Tokenizer, info: Info) -> Sta /// > | /// ^ /// ``` -fn continuation_close(tokenizer: &mut Tokenizer, info: Info) -> State { +fn continuation_close(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { None | Some(b'\n') => { tokenizer.exit(Token::HtmlFlowData); @@ -855,7 +855,7 @@ fn continuation_close(tokenizer: &mut Tokenizer, info: Info) -> State { } _ => { tokenizer.consume(); - State::Fn(Box::new(|t| continuation_close(t, info))) + State::Fn(Box::new(continuation_close)) } } } @@ -868,6 +868,7 @@ fn continuation_close(tokenizer: &mut Tokenizer, info: Info) -> State { /// ``` fn continuation_after(tokenizer: &mut Tokenizer) -> State { tokenizer.exit(Token::HtmlFlow); + tokenizer.tokenize_state.marker = 0; // Feel free to interrupt. tokenizer.interrupt = false; // No longer concrete. -- cgit