aboutsummaryrefslogtreecommitdiffstats
path: root/src/content/string.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-09 10:45:15 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-09 10:45:15 +0200
commit4ce1ac9e41cafa9051377470e8a246063f7d9b1a (patch)
treed678d9583764b2706fe7ea4876e91e40609f15b0 /src/content/string.rs
parent8ffed1822bcbc1b6ce6647b840fb03996b0635ea (diff)
downloadmarkdown-rs-4ce1ac9e41cafa9051377470e8a246063f7d9b1a.tar.gz
markdown-rs-4ce1ac9e41cafa9051377470e8a246063f7d9b1a.tar.bz2
markdown-rs-4ce1ac9e41cafa9051377470e8a246063f7d9b1a.zip
Rewrite algorithm to not pass around boxed functions
* Pass state names from an enum around instead of boxed functions * Refactor to simplify attempts a lot * Use a subtokenizer for the the `document` content type
Diffstat (limited to 'src/content/string.rs')
-rw-r--r--src/content/string.rs28
1 files changed, 17 insertions, 11 deletions
diff --git a/src/content/string.rs b/src/content/string.rs
index 2e738fb..697ec2c 100644
--- a/src/content/string.rs
+++ b/src/content/string.rs
@@ -12,11 +12,8 @@
//!
//! [text]: crate::content::text
-use crate::construct::{
- character_escape::start as character_escape, character_reference::start as character_reference,
- partial_data::start as data, partial_whitespace::resolve_whitespace,
-};
-use crate::tokenizer::{State, Tokenizer};
+use crate::construct::partial_whitespace::resolve_whitespace;
+use crate::tokenizer::{State, StateName, Tokenizer};
const MARKERS: [u8; 2] = [b'&', b'\\'];
@@ -28,19 +25,28 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
}
/// Before string.
-fn before(tokenizer: &mut Tokenizer) -> State {
+pub fn before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None => State::Ok,
_ => tokenizer.attempt_n(
- vec![Box::new(character_reference), Box::new(character_escape)],
- |ok| Box::new(if ok { before } else { before_data }),
- )(tokenizer),
+ vec![
+ StateName::CharacterReferenceStart,
+ StateName::CharacterEscapeStart,
+ ],
+ |ok| {
+ State::Fn(if ok {
+ StateName::StringBefore
+ } else {
+ StateName::StringBeforeData
+ })
+ },
+ ),
}
}
/// At data.
-fn before_data(tokenizer: &mut Tokenizer) -> State {
- tokenizer.go(data, before)(tokenizer)
+pub fn before_data(tokenizer: &mut Tokenizer) -> State {
+ tokenizer.go(StateName::DataStart, StateName::StringBefore)
}
/// Resolve whitespace.