aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/partial_data.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-09 10:45:15 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-09 10:45:15 +0200
commit4ce1ac9e41cafa9051377470e8a246063f7d9b1a (patch)
treed678d9583764b2706fe7ea4876e91e40609f15b0 /src/construct/partial_data.rs
parent8ffed1822bcbc1b6ce6647b840fb03996b0635ea (diff)
downloadmarkdown-rs-4ce1ac9e41cafa9051377470e8a246063f7d9b1a.tar.gz
markdown-rs-4ce1ac9e41cafa9051377470e8a246063f7d9b1a.tar.bz2
markdown-rs-4ce1ac9e41cafa9051377470e8a246063f7d9b1a.zip
Rewrite algorithm to not pass around boxed functions
* Pass state names from an enum around instead of boxed functions * Refactor to simplify attempts a lot * Use a subtokenizer for the the `document` content type
Diffstat (limited to 'src/construct/partial_data.rs')
-rw-r--r--src/construct/partial_data.rs15
1 files changed, 8 insertions, 7 deletions
diff --git a/src/construct/partial_data.rs b/src/construct/partial_data.rs
index 0365489..1cb5e61 100644
--- a/src/construct/partial_data.rs
+++ b/src/construct/partial_data.rs
@@ -7,7 +7,7 @@
//! [text]: crate::content::text
use crate::token::Token;
-use crate::tokenizer::{EventType, State, Tokenizer};
+use crate::tokenizer::{EventType, State, StateName, Tokenizer};
/// At the beginning of data.
///
@@ -17,10 +17,11 @@ use crate::tokenizer::{EventType, State, Tokenizer};
/// ```
pub fn start(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
+ // Make sure to eat the first `stop`.
Some(byte) if tokenizer.tokenize_state.stop.contains(&byte) => {
tokenizer.enter(Token::Data);
tokenizer.consume();
- State::Fn(Box::new(data))
+ State::Fn(StateName::DataInside)
}
_ => at_break(tokenizer),
}
@@ -32,14 +33,14 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
/// > | abc
/// ^
/// ```
-fn at_break(tokenizer: &mut Tokenizer) -> State {
+pub fn at_break(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None => State::Ok,
Some(b'\n') => {
tokenizer.enter(Token::LineEnding);
tokenizer.consume();
tokenizer.exit(Token::LineEnding);
- State::Fn(Box::new(at_break))
+ State::Fn(StateName::DataAtBreak)
}
Some(byte) if tokenizer.tokenize_state.stop.contains(&byte) => {
tokenizer.register_resolver_before("data".to_string(), Box::new(resolve_data));
@@ -47,7 +48,7 @@ fn at_break(tokenizer: &mut Tokenizer) -> State {
}
_ => {
tokenizer.enter(Token::Data);
- data(tokenizer)
+ inside(tokenizer)
}
}
}
@@ -58,7 +59,7 @@ fn at_break(tokenizer: &mut Tokenizer) -> State {
/// > | abc
/// ^^^
/// ```
-fn data(tokenizer: &mut Tokenizer) -> State {
+pub fn inside(tokenizer: &mut Tokenizer) -> State {
let done = match tokenizer.current {
None | Some(b'\n') => true,
Some(byte) if tokenizer.tokenize_state.stop.contains(&byte) => true,
@@ -70,7 +71,7 @@ fn data(tokenizer: &mut Tokenizer) -> State {
at_break(tokenizer)
} else {
tokenizer.consume();
- State::Fn(Box::new(data))
+ State::Fn(StateName::DataInside)
}
}