aboutsummaryrefslogtreecommitdiffstats
path: root/src/content/string.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-11 14:53:42 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-11 14:53:42 +0200
commitd87dc75500a2e73d9521135054b42c18e6eae987 (patch)
tree4bc3f4a72894db3ec5ed4069841c6be5be69713d /src/content/string.rs
parent6eb2f644057f371841fe25330a57ee185f91c7af (diff)
downloadmarkdown-rs-d87dc75500a2e73d9521135054b42c18e6eae987.tar.gz
markdown-rs-d87dc75500a2e73d9521135054b42c18e6eae987.tar.bz2
markdown-rs-d87dc75500a2e73d9521135054b42c18e6eae987.zip
Refactor to move some code to `event.rs`
Diffstat (limited to 'src/content/string.rs')
-rw-r--r--src/content/string.rs24
1 files changed, 14 insertions, 10 deletions
diff --git a/src/content/string.rs b/src/content/string.rs
index 927f582..79dee6c 100644
--- a/src/content/string.rs
+++ b/src/content/string.rs
@@ -13,7 +13,7 @@
//! [text]: crate::content::text
use crate::construct::partial_whitespace::resolve_whitespace;
-use crate::state::{Name, State};
+use crate::state::{Name as StateName, State};
use crate::tokenizer::Tokenizer;
const MARKERS: [u8; 2] = [b'&', b'\\'];
@@ -22,7 +22,7 @@ const MARKERS: [u8; 2] = [b'&', b'\\'];
pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.register_resolver("whitespace".to_string(), Box::new(resolve));
tokenizer.tokenize_state.markers = &MARKERS;
- State::Retry(Name::StringBefore)
+ State::Retry(StateName::StringBefore)
}
/// Before string.
@@ -30,22 +30,26 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None => State::Ok,
Some(b'&') => tokenizer.attempt(
- Name::CharacterReferenceStart,
- State::Next(Name::StringBefore),
- State::Next(Name::StringBeforeData),
+ StateName::CharacterReferenceStart,
+ State::Next(StateName::StringBefore),
+ State::Next(StateName::StringBeforeData),
),
Some(b'\\') => tokenizer.attempt(
- Name::CharacterEscapeStart,
- State::Next(Name::StringBefore),
- State::Next(Name::StringBeforeData),
+ StateName::CharacterEscapeStart,
+ State::Next(StateName::StringBefore),
+ State::Next(StateName::StringBeforeData),
),
- _ => State::Retry(Name::StringBeforeData),
+ _ => State::Retry(StateName::StringBeforeData),
}
}
/// At data.
pub fn before_data(tokenizer: &mut Tokenizer) -> State {
- tokenizer.attempt(Name::DataStart, State::Next(Name::StringBefore), State::Nok)
+ tokenizer.attempt(
+ StateName::DataStart,
+ State::Next(StateName::StringBefore),
+ State::Nok,
+ )
}
/// Resolve whitespace.