aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/heading_atx.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-12 17:02:01 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-12 17:02:01 +0200
commit6ba11bdaca1721fb4591819604c340d147798f45 (patch)
treeef602b518043c0a7228e76d9d00bee95a17798d4 /src/construct/heading_atx.rs
parent504729a4a0c8f3e0d8fc9159e0273150b169e184 (diff)
downloadmarkdown-rs-6ba11bdaca1721fb4591819604c340d147798f45.tar.gz
markdown-rs-6ba11bdaca1721fb4591819604c340d147798f45.tar.bz2
markdown-rs-6ba11bdaca1721fb4591819604c340d147798f45.zip
Remove `match` statements if clumsy
Diffstat (limited to 'src/construct/heading_atx.rs')
-rw-r--r--src/construct/heading_atx.rs24
1 files changed, 13 insertions, 11 deletions
diff --git a/src/construct/heading_atx.rs b/src/construct/heading_atx.rs
index 974158f..f75805a 100644
--- a/src/construct/heading_atx.rs
+++ b/src/construct/heading_atx.rs
@@ -107,27 +107,29 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
/// ^
/// ```
pub fn sequence_open(tokenizer: &mut Tokenizer) -> State {
- match tokenizer.current {
- None | Some(b'\n') if tokenizer.tokenize_state.size > 0 => {
+ if tokenizer.current == Some(b'#')
+ && tokenizer.tokenize_state.size < HEADING_ATX_OPENING_FENCE_SIZE_MAX
+ {
+ tokenizer.tokenize_state.size += 1;
+ tokenizer.consume();
+ State::Next(StateName::HeadingAtxSequenceOpen)
+ } else if tokenizer.tokenize_state.size > 0 {
+ if matches!(tokenizer.current, None | Some(b'\n')) {
tokenizer.tokenize_state.size = 0;
tokenizer.exit(Name::HeadingAtxSequence);
State::Retry(StateName::HeadingAtxAtBreak)
- }
- Some(b'#') if tokenizer.tokenize_state.size < HEADING_ATX_OPENING_FENCE_SIZE_MAX => {
- tokenizer.tokenize_state.size += 1;
- tokenizer.consume();
- State::Next(StateName::HeadingAtxSequenceOpen)
- }
- _ if tokenizer.tokenize_state.size > 0 => {
+ } else if matches!(tokenizer.current, Some(b'\t' | b' ')) {
tokenizer.tokenize_state.size = 0;
tokenizer.exit(Name::HeadingAtxSequence);
tokenizer.attempt(State::Next(StateName::HeadingAtxAtBreak), State::Nok);
State::Retry(space_or_tab(tokenizer))
- }
- _ => {
+ } else {
tokenizer.tokenize_state.size = 0;
State::Nok
}
+ } else {
+ tokenizer.tokenize_state.size = 0;
+ State::Nok
}
}