diff options
author | Titus Wormer <tituswormer@gmail.com> | 2022-08-10 10:54:43 +0200 |
---|---|---|
committer | Titus Wormer <tituswormer@gmail.com> | 2022-08-10 10:54:43 +0200 |
commit | 3a90a49518bbc53876d3f46d8763b2fe0f03d789 (patch) | |
tree | eecaaf9586cf8632b3b6fe22794dae1f492849f5 /src/construct/heading_atx.rs | |
parent | 8162222295d71ea7fd9270c7b3b9497b91db3f1f (diff) | |
download | markdown-rs-3a90a49518bbc53876d3f46d8763b2fe0f03d789.tar.gz markdown-rs-3a90a49518bbc53876d3f46d8763b2fe0f03d789.tar.bz2 markdown-rs-3a90a49518bbc53876d3f46d8763b2fe0f03d789.zip |
Add `State::Retry`
Diffstat (limited to 'src/construct/heading_atx.rs')
-rw-r--r-- | src/construct/heading_atx.rs | 12 |
1 files changed, 6 insertions, 6 deletions
diff --git a/src/construct/heading_atx.rs b/src/construct/heading_atx.rs index 41fad49..d70f7db 100644 --- a/src/construct/heading_atx.rs +++ b/src/construct/heading_atx.rs @@ -92,7 +92,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State { pub fn before(tokenizer: &mut Tokenizer) -> State { if Some(b'#') == tokenizer.current { tokenizer.enter(Token::HeadingAtxSequence); - sequence_open(tokenizer) + State::Retry(StateName::HeadingAtxSequenceOpen) } else { State::Nok } @@ -109,7 +109,7 @@ pub fn sequence_open(tokenizer: &mut Tokenizer) -> State { None | Some(b'\n') if tokenizer.tokenize_state.size > 0 => { tokenizer.tokenize_state.size = 0; tokenizer.exit(Token::HeadingAtxSequence); - at_break(tokenizer) + State::Retry(StateName::HeadingAtxAtBreak) } Some(b'#') if tokenizer.tokenize_state.size < HEADING_ATX_OPENING_FENCE_SIZE_MAX => { tokenizer.tokenize_state.size += 1; @@ -150,11 +150,11 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State { } Some(b'#') => { tokenizer.enter(Token::HeadingAtxSequence); - sequence_further(tokenizer) + State::Retry(StateName::HeadingAtxSequenceFurther) } Some(_) => { tokenizer.enter_with_content(Token::Data, Some(ContentType::Text)); - data(tokenizer) + State::Retry(StateName::HeadingAtxData) } } } @@ -173,7 +173,7 @@ pub fn sequence_further(tokenizer: &mut Tokenizer) -> State { State::Next(StateName::HeadingAtxSequenceFurther) } else { tokenizer.exit(Token::HeadingAtxSequence); - at_break(tokenizer) + State::Retry(StateName::HeadingAtxAtBreak) } } @@ -188,7 +188,7 @@ pub fn data(tokenizer: &mut Tokenizer) -> State { // Note: `#` for closing sequence must be preceded by whitespace, otherwise it’s just text. None | Some(b'\t' | b'\n' | b' ') => { tokenizer.exit(Token::Data); - at_break(tokenizer) + State::Retry(StateName::HeadingAtxAtBreak) } _ => { tokenizer.consume(); |