diff options
author | Titus Wormer <tituswormer@gmail.com> | 2022-08-09 10:45:15 +0200 |
---|---|---|
committer | Titus Wormer <tituswormer@gmail.com> | 2022-08-09 10:45:15 +0200 |
commit | 4ce1ac9e41cafa9051377470e8a246063f7d9b1a (patch) | |
tree | d678d9583764b2706fe7ea4876e91e40609f15b0 /src/construct/block_quote.rs | |
parent | 8ffed1822bcbc1b6ce6647b840fb03996b0635ea (diff) | |
download | markdown-rs-4ce1ac9e41cafa9051377470e8a246063f7d9b1a.tar.gz markdown-rs-4ce1ac9e41cafa9051377470e8a246063f7d9b1a.tar.bz2 markdown-rs-4ce1ac9e41cafa9051377470e8a246063f7d9b1a.zip |
Rewrite algorithm to not pass around boxed functions
* Pass state names from an enum around instead of boxed functions
* Refactor to simplify attempts a lot
* Use a subtokenizer for the the `document` content type
Diffstat (limited to '')
-rw-r--r-- | src/construct/block_quote.rs | 52 |
1 files changed, 29 insertions, 23 deletions
diff --git a/src/construct/block_quote.rs b/src/construct/block_quote.rs index 7e4753d..df58d62 100644 --- a/src/construct/block_quote.rs +++ b/src/construct/block_quote.rs @@ -36,7 +36,7 @@ use crate::constant::TAB_SIZE; use crate::construct::partial_space_or_tab::space_or_tab_min_max; use crate::token::Token; -use crate::tokenizer::{State, Tokenizer}; +use crate::tokenizer::{State, StateName, Tokenizer}; /// Start of block quote. /// @@ -45,13 +45,17 @@ use crate::tokenizer::{State, Tokenizer}; /// ^ /// ``` pub fn start(tokenizer: &mut Tokenizer) -> State { - let max = if tokenizer.parse_state.constructs.code_indented { - TAB_SIZE - 1 - } else { - usize::MAX - }; if tokenizer.parse_state.constructs.block_quote { - tokenizer.go(space_or_tab_min_max(0, max), before)(tokenizer) + let state_name = space_or_tab_min_max( + tokenizer, + 0, + if tokenizer.parse_state.constructs.code_indented { + TAB_SIZE - 1 + } else { + usize::MAX + }, + ); + tokenizer.go(state_name, StateName::BlockQuoteBefore) } else { State::Nok } @@ -63,7 +67,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State { /// > | > a /// ^ /// ``` -fn before(tokenizer: &mut Tokenizer) -> State { +pub fn before(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { Some(b'>') => { tokenizer.enter(Token::BlockQuote); @@ -80,13 +84,17 @@ fn before(tokenizer: &mut Tokenizer) -> State { /// > | > b /// ^ /// ``` -pub fn cont(tokenizer: &mut Tokenizer) -> State { - let max = if tokenizer.parse_state.constructs.code_indented { - TAB_SIZE - 1 - } else { - usize::MAX - }; - tokenizer.go(space_or_tab_min_max(0, max), cont_before)(tokenizer) +pub fn cont_start(tokenizer: &mut Tokenizer) -> State { + let state_name = space_or_tab_min_max( + tokenizer, + 0, + if tokenizer.parse_state.constructs.code_indented { + TAB_SIZE - 1 + } else { + usize::MAX + }, + ); + tokenizer.go(state_name, StateName::BlockQuoteContBefore) } /// After whitespace, before `>`. @@ -96,14 +104,14 @@ pub fn cont(tokenizer: &mut Tokenizer) -> State { /// > | > b /// ^ /// ``` -fn cont_before(tokenizer: &mut Tokenizer) -> State { +pub fn cont_before(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { Some(b'>') => { tokenizer.enter(Token::BlockQuotePrefix); tokenizer.enter(Token::BlockQuoteMarker); tokenizer.consume(); tokenizer.exit(Token::BlockQuoteMarker); - State::Fn(Box::new(cont_after)) + State::Fn(StateName::BlockQuoteContAfter) } _ => State::Nok, } @@ -117,15 +125,13 @@ fn cont_before(tokenizer: &mut Tokenizer) -> State { /// > | >b /// ^ /// ``` -fn cont_after(tokenizer: &mut Tokenizer) -> State { +pub fn cont_after(tokenizer: &mut Tokenizer) -> State { if let Some(b'\t' | b' ') = tokenizer.current { tokenizer.enter(Token::SpaceOrTab); tokenizer.consume(); tokenizer.exit(Token::SpaceOrTab); - tokenizer.exit(Token::BlockQuotePrefix); - State::Ok - } else { - tokenizer.exit(Token::BlockQuotePrefix); - State::Ok } + + tokenizer.exit(Token::BlockQuotePrefix); + State::Ok } |