aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-10-13 13:16:04 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-10-13 13:16:04 +0200
commit02ac7c7a69fc7949912a266f3cfc40f7c219278e (patch)
treebf54fe3bf09362cad585802f1637c30b3566fc52 /src/construct
parent2bde864146fdee411b8acef0f088ca9544aec8d0 (diff)
downloadmarkdown-rs-02ac7c7a69fc7949912a266f3cfc40f7c219278e.tar.gz
markdown-rs-02ac7c7a69fc7949912a266f3cfc40f7c219278e.tar.bz2
markdown-rs-02ac7c7a69fc7949912a266f3cfc40f7c219278e.zip
Remove unneeded states in `partial_label`
Diffstat (limited to 'src/construct')
-rw-r--r--src/construct/partial_label.rs59
1 files changed, 13 insertions, 46 deletions
diff --git a/src/construct/partial_label.rs b/src/construct/partial_label.rs
index f56a591..ffea0da 100644
--- a/src/construct/partial_label.rs
+++ b/src/construct/partial_label.rs
@@ -75,41 +75,13 @@ use crate::util::constant::LINK_REFERENCE_SIZE_MAX;
/// ^
/// ```
pub fn start(tokenizer: &mut Tokenizer) -> State {
- match tokenizer.current {
- Some(b'[') => {
- tokenizer.enter(tokenizer.tokenize_state.token_1.clone());
- tokenizer.enter(tokenizer.tokenize_state.token_2.clone());
- tokenizer.consume();
- tokenizer.exit(tokenizer.tokenize_state.token_2.clone());
- State::Next(StateName::LabelAtMarker)
- }
- _ => State::Nok,
- }
-}
-
-/// At an optional extra marker.
-///
-/// Used for footnotes.
-///
-/// ```markdown
-/// > | [^a]
-/// ^
-/// ```
-pub fn at_marker(tokenizer: &mut Tokenizer) -> State {
- // For footnotes (and potentially other custom things in the future),
- // We need to make sure there is a certain marker after `[`.
- if tokenizer.tokenize_state.marker == 0 {
- tokenizer.enter(tokenizer.tokenize_state.token_3.clone());
- State::Retry(StateName::LabelAtBreak)
- } else if tokenizer.current == Some(tokenizer.tokenize_state.marker) {
- tokenizer.enter(tokenizer.tokenize_state.token_4.clone());
- tokenizer.consume();
- tokenizer.exit(tokenizer.tokenize_state.token_4.clone());
- tokenizer.enter(tokenizer.tokenize_state.token_3.clone());
- State::Next(StateName::LabelAtBreak)
- } else {
- State::Nok
- }
+ debug_assert_eq!(tokenizer.current, Some(b'['), "expected `[`");
+ tokenizer.enter(tokenizer.tokenize_state.token_1.clone());
+ tokenizer.enter(tokenizer.tokenize_state.token_2.clone());
+ tokenizer.consume();
+ tokenizer.exit(tokenizer.tokenize_state.token_2.clone());
+ tokenizer.enter(tokenizer.tokenize_state.token_3.clone());
+ State::Next(StateName::LabelAtBreak)
}
/// In label, at something, before something else.
@@ -123,16 +95,13 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
|| matches!(tokenizer.current, None | Some(b'['))
|| (matches!(tokenizer.current, Some(b']')) && !tokenizer.tokenize_state.seen)
{
- tokenizer.tokenize_state.connect = false;
- tokenizer.tokenize_state.seen = false;
- tokenizer.tokenize_state.size = 0;
- State::Nok
+ State::Retry(StateName::LabelNok)
} else {
match tokenizer.current {
Some(b'\n') => {
tokenizer.attempt(
State::Next(StateName::LabelEolAfter),
- State::Next(StateName::LabelAtBlankLine),
+ State::Next(StateName::LabelNok),
);
State::Retry(space_or_tab_eol_with_options(
tokenizer,
@@ -188,15 +157,13 @@ pub fn eol_after(tokenizer: &mut Tokenizer) -> State {
State::Retry(StateName::LabelAtBreak)
}
-/// In label, at blank line.
+/// In label, on something disallowed.
///
/// ```markdown
-/// | [a␊
-/// > | ␊
-/// ^
-/// | b]
+/// > | []
+/// ^
/// ```
-pub fn at_blank_line(tokenizer: &mut Tokenizer) -> State {
+pub fn nok(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.connect = false;
tokenizer.tokenize_state.seen = false;
tokenizer.tokenize_state.size = 0;