diff options
author | Titus Wormer <tituswormer@gmail.com> | 2022-07-29 18:22:59 +0200 |
---|---|---|
committer | Titus Wormer <tituswormer@gmail.com> | 2022-07-29 18:22:59 +0200 |
commit | 0eeff9148e327183e532752f46421a75506dd7a6 (patch) | |
tree | 4f0aed04f90aa759ce96a2e87aa719e7fa95c450 /src/construct/definition.rs | |
parent | 148ede7f0f42f0ccb1620b13d91f35d0c7d04c2f (diff) | |
download | markdown-rs-0eeff9148e327183e532752f46421a75506dd7a6.tar.gz markdown-rs-0eeff9148e327183e532752f46421a75506dd7a6.tar.bz2 markdown-rs-0eeff9148e327183e532752f46421a75506dd7a6.zip |
Refactor to improve states
* Remove custom kind wrappers, use plain bytes instead
* Remove `Into`s, use the explicit expected types instead
* Refactor to use `slice.as_str` in most places
* Remove unneeded unique check before adding a definition
* Use a shared CDATA prefix in constants
* Inline byte checks into matches
* Pass bytes back from parser instead of whole parse state
* Refactor to work more often on bytes
* Rename custom `size` to `len`
Diffstat (limited to 'src/construct/definition.rs')
-rw-r--r-- | src/construct/definition.rs | 21 |
1 files changed, 11 insertions, 10 deletions
diff --git a/src/construct/definition.rs b/src/construct/definition.rs index 14755c9..bd7df82 100644 --- a/src/construct/definition.rs +++ b/src/construct/definition.rs @@ -110,17 +110,18 @@ use crate::util::skip::opt_back as skip_opt_back; /// ^ /// ``` pub fn start(tokenizer: &mut Tokenizer) -> State { - let definition_before = !tokenizer.events.is_empty() - && tokenizer.events[skip_opt_back( - &tokenizer.events, - tokenizer.events.len() - 1, - &[Token::LineEnding, Token::SpaceOrTab], - )] - .token_type - == Token::Definition; - // Do not interrupt paragraphs (but do follow definitions). - if (!tokenizer.interrupt || definition_before) && tokenizer.parse_state.constructs.definition { + let possible = !tokenizer.interrupt + || (!tokenizer.events.is_empty() + && tokenizer.events[skip_opt_back( + &tokenizer.events, + tokenizer.events.len() - 1, + &[Token::LineEnding, Token::SpaceOrTab], + )] + .token_type + == Token::Definition); + + if possible && tokenizer.parse_state.constructs.definition { tokenizer.enter(Token::Definition); // Note: arbitrary whitespace allowed even if code (indented) is on. tokenizer.attempt_opt(space_or_tab(), before)(tokenizer) |