aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/partial_label.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-11 13:45:24 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-11 13:45:24 +0200
commit6eb2f644057f371841fe25330a57ee185f91c7af (patch)
tree7b4d02586339d1a7f82104b4473d9ac243b3abf9 /src/construct/partial_label.rs
parent2d35cbfceace81a217cd0fbdae7a8777c7a6465e (diff)
downloadmarkdown-rs-6eb2f644057f371841fe25330a57ee185f91c7af.tar.gz
markdown-rs-6eb2f644057f371841fe25330a57ee185f91c7af.tar.bz2
markdown-rs-6eb2f644057f371841fe25330a57ee185f91c7af.zip
Refactor to move some code to `state.rs`
Diffstat (limited to 'src/construct/partial_label.rs')
-rw-r--r--src/construct/partial_label.rs25
1 files changed, 13 insertions, 12 deletions
diff --git a/src/construct/partial_label.rs b/src/construct/partial_label.rs
index 6447961..6e7c843 100644
--- a/src/construct/partial_label.rs
+++ b/src/construct/partial_label.rs
@@ -60,9 +60,10 @@
use super::partial_space_or_tab::{space_or_tab_eol_with_options, EolOptions};
use crate::constant::LINK_REFERENCE_SIZE_MAX;
+use crate::state::{Name, State};
use crate::subtokenize::link;
use crate::token::Token;
-use crate::tokenizer::{ContentType, State, StateName, Tokenizer};
+use crate::tokenizer::{ContentType, Tokenizer};
/// Before a label.
///
@@ -78,7 +79,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.consume();
tokenizer.exit(tokenizer.tokenize_state.token_2.clone());
tokenizer.enter(tokenizer.tokenize_state.token_3.clone());
- State::Next(StateName::LabelAtBreak)
+ State::Next(Name::LabelAtBreak)
}
_ => State::Nok,
}
@@ -111,8 +112,8 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
);
tokenizer.attempt(
name,
- State::Next(StateName::LabelEolAfter),
- State::Next(StateName::LabelAtBlankLine),
+ State::Next(Name::LabelEolAfter),
+ State::Next(Name::LabelAtBlankLine),
)
}
Some(b']') => {
@@ -136,7 +137,7 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.connect = true;
}
- State::Retry(StateName::LabelInside)
+ State::Retry(Name::LabelInside)
}
}
}
@@ -151,7 +152,7 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn eol_after(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.connect = true;
- State::Retry(StateName::LabelAtBreak)
+ State::Retry(Name::LabelAtBreak)
}
/// In a label, at a blank line.
@@ -178,12 +179,12 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n' | b'[' | b']') => {
tokenizer.exit(Token::Data);
- State::Retry(StateName::LabelAtBreak)
+ State::Retry(Name::LabelAtBreak)
}
Some(byte) => {
if tokenizer.tokenize_state.size > LINK_REFERENCE_SIZE_MAX {
tokenizer.exit(Token::Data);
- State::Retry(StateName::LabelAtBreak)
+ State::Retry(Name::LabelAtBreak)
} else {
tokenizer.consume();
tokenizer.tokenize_state.size += 1;
@@ -191,9 +192,9 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.seen = true;
}
State::Next(if matches!(byte, b'\\') {
- StateName::LabelEscape
+ Name::LabelEscape
} else {
- StateName::LabelInside
+ Name::LabelInside
})
}
}
@@ -211,8 +212,8 @@ pub fn escape(tokenizer: &mut Tokenizer) -> State {
Some(b'[' | b'\\' | b']') => {
tokenizer.consume();
tokenizer.tokenize_state.size += 1;
- State::Next(StateName::LabelInside)
+ State::Next(Name::LabelInside)
}
- _ => State::Retry(StateName::LabelInside),
+ _ => State::Retry(Name::LabelInside),
}
}