diff options
author | Titus Wormer <tituswormer@gmail.com> | 2022-07-29 18:22:59 +0200 |
---|---|---|
committer | Titus Wormer <tituswormer@gmail.com> | 2022-07-29 18:22:59 +0200 |
commit | 0eeff9148e327183e532752f46421a75506dd7a6 (patch) | |
tree | 4f0aed04f90aa759ce96a2e87aa719e7fa95c450 /src/construct/heading_atx.rs | |
parent | 148ede7f0f42f0ccb1620b13d91f35d0c7d04c2f (diff) | |
download | markdown-rs-0eeff9148e327183e532752f46421a75506dd7a6.tar.gz markdown-rs-0eeff9148e327183e532752f46421a75506dd7a6.tar.bz2 markdown-rs-0eeff9148e327183e532752f46421a75506dd7a6.zip |
Refactor to improve states
* Remove custom kind wrappers, use plain bytes instead
* Remove `Into`s, use the explicit expected types instead
* Refactor to use `slice.as_str` in most places
* Remove unneeded unique check before adding a definition
* Use a shared CDATA prefix in constants
* Inline byte checks into matches
* Pass bytes back from parser instead of whole parse state
* Refactor to work more often on bytes
* Rename custom `size` to `len`
Diffstat (limited to '')
-rw-r--r-- | src/construct/heading_atx.rs | 28 |
1 files changed, 16 insertions, 12 deletions
diff --git a/src/construct/heading_atx.rs b/src/construct/heading_atx.rs index 9a73b77..aa388ee 100644 --- a/src/construct/heading_atx.rs +++ b/src/construct/heading_atx.rs @@ -66,15 +66,19 @@ use crate::tokenizer::{ContentType, Event, EventType, State, Tokenizer}; /// ^ /// ``` pub fn start(tokenizer: &mut Tokenizer) -> State { - let max = if tokenizer.parse_state.constructs.code_indented { - TAB_SIZE - 1 - } else { - usize::MAX - }; - if tokenizer.parse_state.constructs.heading_atx { tokenizer.enter(Token::HeadingAtx); - tokenizer.go(space_or_tab_min_max(0, max), before)(tokenizer) + tokenizer.go( + space_or_tab_min_max( + 0, + if tokenizer.parse_state.constructs.code_indented { + TAB_SIZE - 1 + } else { + usize::MAX + }, + ), + before, + )(tokenizer) } else { State::Nok } @@ -101,19 +105,19 @@ fn before(tokenizer: &mut Tokenizer) -> State { /// > | ## aa /// ^ /// ``` -fn sequence_open(tokenizer: &mut Tokenizer, rank: usize) -> State { +fn sequence_open(tokenizer: &mut Tokenizer, size: usize) -> State { match tokenizer.current { - None | Some(b'\n') if rank > 0 => { + None | Some(b'\n') if size > 0 => { tokenizer.exit(Token::HeadingAtxSequence); at_break(tokenizer) } - Some(b'#') if rank < HEADING_ATX_OPENING_FENCE_SIZE_MAX => { + Some(b'#') if size < HEADING_ATX_OPENING_FENCE_SIZE_MAX => { tokenizer.consume(); State::Fn(Box::new(move |tokenizer| { - sequence_open(tokenizer, rank + 1) + sequence_open(tokenizer, size + 1) })) } - _ if rank > 0 => { + _ if size > 0 => { tokenizer.exit(Token::HeadingAtxSequence); tokenizer.go(space_or_tab(), at_break)(tokenizer) } |