aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/heading_atx.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-02 14:27:31 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-02 14:27:31 +0200
commit8ffed1822bcbc1b6ce6647b840fb03996b0635ea (patch)
treedc01319d360b6e5ec08c893413ff056d3abd43bc /src/construct/heading_atx.rs
parent13cf914b5162fb12c842e983538b42d4b60cdcb8 (diff)
downloadmarkdown-rs-8ffed1822bcbc1b6ce6647b840fb03996b0635ea.tar.gz
markdown-rs-8ffed1822bcbc1b6ce6647b840fb03996b0635ea.tar.bz2
markdown-rs-8ffed1822bcbc1b6ce6647b840fb03996b0635ea.zip
Refactor to remove most closures
Diffstat (limited to '')
-rw-r--r--src/construct/heading_atx.rs22
1 files changed, 13 insertions, 9 deletions
diff --git a/src/construct/heading_atx.rs b/src/construct/heading_atx.rs
index d432b6c..6751567 100644
--- a/src/construct/heading_atx.rs
+++ b/src/construct/heading_atx.rs
@@ -93,7 +93,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
fn before(tokenizer: &mut Tokenizer) -> State {
if Some(b'#') == tokenizer.current {
tokenizer.enter(Token::HeadingAtxSequence);
- sequence_open(tokenizer, 0)
+ sequence_open(tokenizer)
} else {
State::Nok
}
@@ -105,23 +105,27 @@ fn before(tokenizer: &mut Tokenizer) -> State {
/// > | ## aa
/// ^
/// ```
-fn sequence_open(tokenizer: &mut Tokenizer, size: usize) -> State {
+fn sequence_open(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
- None | Some(b'\n') if size > 0 => {
+ None | Some(b'\n') if tokenizer.tokenize_state.size > 0 => {
+ tokenizer.tokenize_state.size = 0;
tokenizer.exit(Token::HeadingAtxSequence);
at_break(tokenizer)
}
- Some(b'#') if size < HEADING_ATX_OPENING_FENCE_SIZE_MAX => {
+ Some(b'#') if tokenizer.tokenize_state.size < HEADING_ATX_OPENING_FENCE_SIZE_MAX => {
+ tokenizer.tokenize_state.size += 1;
tokenizer.consume();
- State::Fn(Box::new(move |tokenizer| {
- sequence_open(tokenizer, size + 1)
- }))
+ State::Fn(Box::new(sequence_open))
}
- _ if size > 0 => {
+ _ if tokenizer.tokenize_state.size > 0 => {
+ tokenizer.tokenize_state.size = 0;
tokenizer.exit(Token::HeadingAtxSequence);
tokenizer.go(space_or_tab(), at_break)(tokenizer)
}
- _ => State::Nok,
+ _ => {
+ tokenizer.tokenize_state.size = 0;
+ State::Nok
+ }
}
}