diff options
author | Titus Wormer <tituswormer@gmail.com> | 2022-08-11 17:26:17 +0200 |
---|---|---|
committer | Titus Wormer <tituswormer@gmail.com> | 2022-08-11 17:26:17 +0200 |
commit | 0d9c4611922535533746d1a86f10ef4e701c950e (patch) | |
tree | dd776161db75ba264b67830635b736ca5dd5c314 /src/construct/heading_atx.rs | |
parent | 90969231bfcdfcd09bae646abba17d832b633376 (diff) | |
download | markdown-rs-0d9c4611922535533746d1a86f10ef4e701c950e.tar.gz markdown-rs-0d9c4611922535533746d1a86f10ef4e701c950e.tar.bz2 markdown-rs-0d9c4611922535533746d1a86f10ef4e701c950e.zip |
Refactor attempts to remove unneeded state name
Diffstat (limited to 'src/construct/heading_atx.rs')
-rw-r--r-- | src/construct/heading_atx.rs | 14 |
1 files changed, 7 insertions, 7 deletions
diff --git a/src/construct/heading_atx.rs b/src/construct/heading_atx.rs index 4e656d4..17cf617 100644 --- a/src/construct/heading_atx.rs +++ b/src/construct/heading_atx.rs @@ -70,7 +70,8 @@ use crate::tokenizer::Tokenizer; pub fn start(tokenizer: &mut Tokenizer) -> State { if tokenizer.parse_state.constructs.heading_atx { tokenizer.enter(Name::HeadingAtx); - let name = space_or_tab_min_max( + tokenizer.attempt(State::Next(StateName::HeadingAtxBefore), State::Nok); + State::Retry(space_or_tab_min_max( tokenizer, 0, if tokenizer.parse_state.constructs.code_indented { @@ -78,8 +79,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State { } else { usize::MAX }, - ); - tokenizer.attempt(name, State::Next(StateName::HeadingAtxBefore), State::Nok) + )) } else { State::Nok } @@ -121,8 +121,8 @@ pub fn sequence_open(tokenizer: &mut Tokenizer) -> State { _ if tokenizer.tokenize_state.size > 0 => { tokenizer.tokenize_state.size = 0; tokenizer.exit(Name::HeadingAtxSequence); - let name = space_or_tab(tokenizer); - tokenizer.attempt(name, State::Next(StateName::HeadingAtxAtBreak), State::Nok) + tokenizer.attempt(State::Next(StateName::HeadingAtxAtBreak), State::Nok); + State::Retry(space_or_tab(tokenizer)) } _ => { tokenizer.tokenize_state.size = 0; @@ -147,8 +147,8 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State { State::Ok } Some(b'\t' | b' ') => { - let name = space_or_tab(tokenizer); - tokenizer.attempt(name, State::Next(StateName::HeadingAtxAtBreak), State::Nok) + tokenizer.attempt(State::Next(StateName::HeadingAtxAtBreak), State::Nok); + State::Retry(space_or_tab(tokenizer)) } Some(b'#') => { tokenizer.enter(Name::HeadingAtxSequence); |