diff options
author | Titus Wormer <tituswormer@gmail.com> | 2022-08-02 14:27:31 +0200 |
---|---|---|
committer | Titus Wormer <tituswormer@gmail.com> | 2022-08-02 14:27:31 +0200 |
commit | 8ffed1822bcbc1b6ce6647b840fb03996b0635ea (patch) | |
tree | dc01319d360b6e5ec08c893413ff056d3abd43bc /src/construct/partial_data.rs | |
parent | 13cf914b5162fb12c842e983538b42d4b60cdcb8 (diff) | |
download | markdown-rs-8ffed1822bcbc1b6ce6647b840fb03996b0635ea.tar.gz markdown-rs-8ffed1822bcbc1b6ce6647b840fb03996b0635ea.tar.bz2 markdown-rs-8ffed1822bcbc1b6ce6647b840fb03996b0635ea.zip |
Refactor to remove most closures
Diffstat (limited to '')
-rw-r--r-- | src/construct/partial_data.rs | 24 |
1 files changed, 12 insertions, 12 deletions
diff --git a/src/construct/partial_data.rs b/src/construct/partial_data.rs index 335d7ab..0365489 100644 --- a/src/construct/partial_data.rs +++ b/src/construct/partial_data.rs @@ -15,14 +15,14 @@ use crate::tokenizer::{EventType, State, Tokenizer}; /// > | abc /// ^ /// ``` -pub fn start(tokenizer: &mut Tokenizer, stop: &'static [u8]) -> State { +pub fn start(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { - Some(byte) if stop.contains(&byte) => { + Some(byte) if tokenizer.tokenize_state.stop.contains(&byte) => { tokenizer.enter(Token::Data); tokenizer.consume(); - State::Fn(Box::new(move |t| data(t, stop))) + State::Fn(Box::new(data)) } - _ => at_break(tokenizer, stop), + _ => at_break(tokenizer), } } @@ -32,22 +32,22 @@ pub fn start(tokenizer: &mut Tokenizer, stop: &'static [u8]) -> State { /// > | abc /// ^ /// ``` -fn at_break(tokenizer: &mut Tokenizer, stop: &'static [u8]) -> State { +fn at_break(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { None => State::Ok, Some(b'\n') => { tokenizer.enter(Token::LineEnding); tokenizer.consume(); tokenizer.exit(Token::LineEnding); - State::Fn(Box::new(move |t| at_break(t, stop))) + State::Fn(Box::new(at_break)) } - Some(byte) if stop.contains(&byte) => { + Some(byte) if tokenizer.tokenize_state.stop.contains(&byte) => { tokenizer.register_resolver_before("data".to_string(), Box::new(resolve_data)); State::Ok } _ => { tokenizer.enter(Token::Data); - data(tokenizer, stop) + data(tokenizer) } } } @@ -58,19 +58,19 @@ fn at_break(tokenizer: &mut Tokenizer, stop: &'static [u8]) -> State { /// > | abc /// ^^^ /// ``` -fn data(tokenizer: &mut Tokenizer, stop: &'static [u8]) -> State { +fn data(tokenizer: &mut Tokenizer) -> State { let done = match tokenizer.current { None | Some(b'\n') => true, - Some(byte) if stop.contains(&byte) => true, + Some(byte) if tokenizer.tokenize_state.stop.contains(&byte) => true, _ => false, }; if done { tokenizer.exit(Token::Data); - at_break(tokenizer, stop) + at_break(tokenizer) } else { tokenizer.consume(); - State::Fn(Box::new(move |t| data(t, stop))) + State::Fn(Box::new(data)) } } |