From f7e5fb852dc9c416b9eeb1f0d4f2d51ba5b68456 Mon Sep 17 00:00:00 2001 From: Titus Wormer Date: Thu, 28 Jul 2022 16:48:00 +0200 Subject: Refactor to work on `char`s MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Previously, a custom char implementation was used. This was easier to work with, as sometimes “virtual” characters are injected, or characters are ignored. This replaces that with working on actual `char`s. In the hope of in the future working on `u8`s, even. This simplifies the state machine somewhat, as only `\n` is fed, regardless of whether it was a CRLF, CR, or LF. It also feeds `' '` instead of virtual spaces. The BOM, if present, is now available as a `ByteOrderMark` event. --- src/construct/partial_data.rs | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) (limited to 'src/construct/partial_data.rs') diff --git a/src/construct/partial_data.rs b/src/construct/partial_data.rs index 4216276..0b66b09 100644 --- a/src/construct/partial_data.rs +++ b/src/construct/partial_data.rs @@ -7,7 +7,7 @@ //! [text]: crate::content::text use crate::token::Token; -use crate::tokenizer::{Code, EventType, State, Tokenizer}; +use crate::tokenizer::{EventType, State, Tokenizer}; /// At the beginning of data. /// @@ -15,13 +15,14 @@ use crate::tokenizer::{Code, EventType, State, Tokenizer}; /// > | abc /// ^ /// ``` -pub fn start(tokenizer: &mut Tokenizer, stop: &'static [Code]) -> State { - if stop.contains(&tokenizer.current) { - tokenizer.enter(Token::Data); - tokenizer.consume(); - State::Fn(Box::new(move |t| data(t, stop))) - } else { - at_break(tokenizer, stop) +pub fn start(tokenizer: &mut Tokenizer, stop: &'static [char]) -> State { + match tokenizer.current { + Some(char) if stop.contains(&char) => { + tokenizer.enter(Token::Data); + tokenizer.consume(); + State::Fn(Box::new(move |t| data(t, stop))) + } + _ => at_break(tokenizer, stop), } } @@ -31,16 +32,16 @@ pub fn start(tokenizer: &mut Tokenizer, stop: &'static [Code]) -> State { /// > | abc /// ^ /// ``` -fn at_break(tokenizer: &mut Tokenizer, stop: &'static [Code]) -> State { +fn at_break(tokenizer: &mut Tokenizer, stop: &'static [char]) -> State { match tokenizer.current { - Code::None => State::Ok, - Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => { + None => State::Ok, + Some('\n') => { tokenizer.enter(Token::LineEnding); tokenizer.consume(); tokenizer.exit(Token::LineEnding); State::Fn(Box::new(move |t| at_break(t, stop))) } - _ if stop.contains(&tokenizer.current) => { + Some(char) if stop.contains(&char) => { tokenizer.register_resolver_before("data".to_string(), Box::new(resolve_data)); State::Ok } @@ -57,10 +58,10 @@ fn at_break(tokenizer: &mut Tokenizer, stop: &'static [Code]) -> State { /// > | abc /// ^^^ /// ``` -fn data(tokenizer: &mut Tokenizer, stop: &'static [Code]) -> State { +fn data(tokenizer: &mut Tokenizer, stop: &'static [char]) -> State { let done = match tokenizer.current { - Code::None | Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => true, - _ if stop.contains(&tokenizer.current) => true, + None | Some('\n') => true, + Some(char) if stop.contains(&char) => true, _ => false, }; -- cgit