aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/list.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-02 14:27:31 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-02 14:27:31 +0200
commit8ffed1822bcbc1b6ce6647b840fb03996b0635ea (patch)
treedc01319d360b6e5ec08c893413ff056d3abd43bc /src/construct/list.rs
parent13cf914b5162fb12c842e983538b42d4b60cdcb8 (diff)
downloadmarkdown-rs-8ffed1822bcbc1b6ce6647b840fb03996b0635ea.tar.gz
markdown-rs-8ffed1822bcbc1b6ce6647b840fb03996b0635ea.tar.bz2
markdown-rs-8ffed1822bcbc1b6ce6647b840fb03996b0635ea.zip
Refactor to remove most closures
Diffstat (limited to '')
-rw-r--r--src/construct/list.rs42
1 files changed, 22 insertions, 20 deletions
diff --git a/src/construct/list.rs b/src/construct/list.rs
index d5a9899..0e12b7c 100644
--- a/src/construct/list.rs
+++ b/src/construct/list.rs
@@ -123,7 +123,7 @@ fn before_unordered(tokenizer: &mut Tokenizer) -> State {
fn before_ordered(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::ListItemPrefix);
tokenizer.enter(Token::ListItemValue);
- inside(tokenizer, 0)
+ inside(tokenizer)
}
/// In an ordered list item value.
@@ -132,17 +132,21 @@ fn before_ordered(tokenizer: &mut Tokenizer) -> State {
/// > | 1. a
/// ^
/// ```
-fn inside(tokenizer: &mut Tokenizer, size: usize) -> State {
+fn inside(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
- Some(b'.' | b')') if !tokenizer.interrupt || size < 2 => {
+ Some(b'.' | b')') if !tokenizer.interrupt || tokenizer.tokenize_state.size < 2 => {
tokenizer.exit(Token::ListItemValue);
marker(tokenizer)
}
- Some(b'0'..=b'9') if size + 1 < LIST_ITEM_VALUE_SIZE_MAX => {
+ Some(b'0'..=b'9') if tokenizer.tokenize_state.size + 1 < LIST_ITEM_VALUE_SIZE_MAX => {
+ tokenizer.tokenize_state.size += 1;
tokenizer.consume();
- State::Fn(Box::new(move |t| inside(t, size + 1)))
+ State::Fn(Box::new(inside))
+ }
+ _ => {
+ tokenizer.tokenize_state.size = 0;
+ State::Nok
}
- _ => State::Nok,
}
}
@@ -170,12 +174,9 @@ fn marker(tokenizer: &mut Tokenizer) -> State {
/// ^
/// ```
fn marker_after(tokenizer: &mut Tokenizer) -> State {
- tokenizer.check(blank_line, move |ok| {
- if ok {
- Box::new(|t| after(t, true))
- } else {
- Box::new(marker_after_not_blank)
- }
+ tokenizer.tokenize_state.size = 1;
+ tokenizer.check(blank_line, |ok| {
+ Box::new(if ok { after } else { marker_after_not_blank })
})(tokenizer)
}
@@ -186,13 +187,11 @@ fn marker_after(tokenizer: &mut Tokenizer) -> State {
/// ^
/// ```
fn marker_after_not_blank(tokenizer: &mut Tokenizer) -> State {
+ tokenizer.tokenize_state.size = 0;
+
// Attempt to parse up to the largest allowed indent, `nok` if there is more whitespace.
- tokenizer.attempt(whitespace, move |ok| {
- if ok {
- Box::new(|t| after(t, false))
- } else {
- Box::new(prefix_other)
- }
+ tokenizer.attempt(whitespace, |ok| {
+ Box::new(if ok { after } else { prefix_other })
})(tokenizer)
}
@@ -232,7 +231,7 @@ fn prefix_other(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::SpaceOrTab);
tokenizer.consume();
tokenizer.exit(Token::SpaceOrTab);
- State::Fn(Box::new(|t| after(t, false)))
+ State::Fn(Box::new(after))
}
_ => State::Nok,
}
@@ -244,7 +243,10 @@ fn prefix_other(tokenizer: &mut Tokenizer) -> State {
/// > | * a
/// ^
/// ```
-fn after(tokenizer: &mut Tokenizer, blank: bool) -> State {
+fn after(tokenizer: &mut Tokenizer) -> State {
+ let blank = tokenizer.tokenize_state.size == 1;
+ tokenizer.tokenize_state.size = 0;
+
if blank && tokenizer.interrupt {
State::Nok
} else {