aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/partial_data.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-11 13:31:20 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-11 13:31:20 +0200
commit2d35cbfceace81a217cd0fbdae7a8777c7a6465e (patch)
treee5e69d44c5c00d1dc70f4e3a227f67fd5c771389 /src/construct/partial_data.rs
parent053a2603e4bd5ec9caf40617b52136e5ef3fcf0a (diff)
downloadmarkdown-rs-2d35cbfceace81a217cd0fbdae7a8777c7a6465e.tar.gz
markdown-rs-2d35cbfceace81a217cd0fbdae7a8777c7a6465e.tar.bz2
markdown-rs-2d35cbfceace81a217cd0fbdae7a8777c7a6465e.zip
Refactor internal docs, code style of tokenizer
Diffstat (limited to 'src/construct/partial_data.rs')
-rw-r--r--src/construct/partial_data.rs8
1 files changed, 4 insertions, 4 deletions
diff --git a/src/construct/partial_data.rs b/src/construct/partial_data.rs
index a68f359..0ad67c5 100644
--- a/src/construct/partial_data.rs
+++ b/src/construct/partial_data.rs
@@ -17,8 +17,8 @@ use crate::tokenizer::{EventType, State, StateName, Tokenizer};
/// ```
pub fn start(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
- // Make sure to eat the first `stop`.
- Some(byte) if tokenizer.tokenize_state.stop.contains(&byte) => {
+ // Make sure to eat the first `markers`.
+ Some(byte) if tokenizer.tokenize_state.markers.contains(&byte) => {
tokenizer.enter(Token::Data);
tokenizer.consume();
State::Next(StateName::DataInside)
@@ -42,7 +42,7 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
tokenizer.exit(Token::LineEnding);
State::Next(StateName::DataAtBreak)
}
- Some(byte) if tokenizer.tokenize_state.stop.contains(&byte) => {
+ Some(byte) if tokenizer.tokenize_state.markers.contains(&byte) => {
tokenizer.register_resolver_before("data".to_string(), Box::new(resolve_data));
State::Ok
}
@@ -62,7 +62,7 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
pub fn inside(tokenizer: &mut Tokenizer) -> State {
let done = match tokenizer.current {
None | Some(b'\n') => true,
- Some(byte) if tokenizer.tokenize_state.stop.contains(&byte) => true,
+ Some(byte) if tokenizer.tokenize_state.markers.contains(&byte) => true,
_ => false,
};