aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct
diff options
context:
space:
mode:
Diffstat (limited to 'src/construct')
-rw-r--r--src/construct/code_fenced.rs22
-rw-r--r--src/construct/code_text.rs8
-rw-r--r--src/construct/definition.rs13
-rw-r--r--src/construct/html_flow.rs10
-rw-r--r--src/construct/html_text.rs86
-rw-r--r--src/construct/label_end.rs36
-rw-r--r--src/construct/label_start_image.rs2
-rw-r--r--src/construct/label_start_link.rs2
-rw-r--r--src/construct/partial_data.rs8
-rw-r--r--src/construct/partial_destination.rs2
-rw-r--r--src/construct/partial_label.rs17
-rw-r--r--src/construct/partial_title.rs17
12 files changed, 133 insertions, 90 deletions
diff --git a/src/construct/code_fenced.rs b/src/construct/code_fenced.rs
index 0d4345a..26e1148 100644
--- a/src/construct/code_fenced.rs
+++ b/src/construct/code_fenced.rs
@@ -162,7 +162,7 @@ pub fn before_sequence_open(tokenizer: &mut Tokenizer) -> State {
if let Some(b'`' | b'~') = tokenizer.current {
tokenizer.tokenize_state.marker = tokenizer.current.unwrap();
- tokenizer.tokenize_state.prefix = prefix;
+ tokenizer.tokenize_state.size_c = prefix;
tokenizer.enter(Token::CodeFencedFenceSequence);
State::Retry(StateName::CodeFencedSequenceOpen)
} else {
@@ -196,7 +196,7 @@ pub fn sequence_open(tokenizer: &mut Tokenizer) -> State {
}
_ => {
tokenizer.tokenize_state.marker = 0;
- tokenizer.tokenize_state.prefix = 0;
+ tokenizer.tokenize_state.size_c = 0;
tokenizer.tokenize_state.size = 0;
State::Nok
}
@@ -259,7 +259,7 @@ pub fn info(tokenizer: &mut Tokenizer) -> State {
Some(b'`') if tokenizer.tokenize_state.marker == b'`' => {
tokenizer.concrete = false;
tokenizer.tokenize_state.marker = 0;
- tokenizer.tokenize_state.prefix = 0;
+ tokenizer.tokenize_state.size_c = 0;
tokenizer.tokenize_state.size = 0;
State::Nok
}
@@ -307,7 +307,7 @@ pub fn meta(tokenizer: &mut Tokenizer) -> State {
Some(b'`') if tokenizer.tokenize_state.marker == b'`' => {
tokenizer.concrete = false;
tokenizer.tokenize_state.marker = 0;
- tokenizer.tokenize_state.prefix = 0;
+ tokenizer.tokenize_state.size_c = 0;
tokenizer.tokenize_state.size = 0;
State::Nok
}
@@ -410,14 +410,14 @@ pub fn before_sequence_close(tokenizer: &mut Tokenizer) -> State {
pub fn sequence_close(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'`' | b'~') if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker => {
- tokenizer.tokenize_state.size_other += 1;
+ tokenizer.tokenize_state.size_b += 1;
tokenizer.consume();
State::Next(StateName::CodeFencedSequenceClose)
}
- _ if tokenizer.tokenize_state.size_other >= CODE_FENCED_SEQUENCE_SIZE_MIN
- && tokenizer.tokenize_state.size_other >= tokenizer.tokenize_state.size =>
+ _ if tokenizer.tokenize_state.size_b >= CODE_FENCED_SEQUENCE_SIZE_MIN
+ && tokenizer.tokenize_state.size_b >= tokenizer.tokenize_state.size =>
{
- tokenizer.tokenize_state.size_other = 0;
+ tokenizer.tokenize_state.size_b = 0;
tokenizer.exit(Token::CodeFencedFenceSequence);
let name = space_or_tab(tokenizer);
tokenizer.attempt(
@@ -427,7 +427,7 @@ pub fn sequence_close(tokenizer: &mut Tokenizer) -> State {
)
}
_ => {
- tokenizer.tokenize_state.size_other = 0;
+ tokenizer.tokenize_state.size_b = 0;
State::Nok
}
}
@@ -474,7 +474,7 @@ pub fn content_before(tokenizer: &mut Tokenizer) -> State {
/// | ~~~
/// ```
pub fn content_start(tokenizer: &mut Tokenizer) -> State {
- let name = space_or_tab_min_max(tokenizer, 0, tokenizer.tokenize_state.prefix);
+ let name = space_or_tab_min_max(tokenizer, 0, tokenizer.tokenize_state.size_c);
tokenizer.attempt(
name,
State::Next(StateName::CodeFencedBeforeContentChunk),
@@ -536,7 +536,7 @@ pub fn content_chunk(tokenizer: &mut Tokenizer) -> State {
pub fn after(tokenizer: &mut Tokenizer) -> State {
tokenizer.exit(Token::CodeFenced);
tokenizer.tokenize_state.marker = 0;
- tokenizer.tokenize_state.prefix = 0;
+ tokenizer.tokenize_state.size_c = 0;
tokenizer.tokenize_state.size = 0;
// Feel free to interrupt.
tokenizer.interrupt = false;
diff --git a/src/construct/code_text.rs b/src/construct/code_text.rs
index 2c8faf3..d7ada3d 100644
--- a/src/construct/code_text.rs
+++ b/src/construct/code_text.rs
@@ -185,16 +185,16 @@ pub fn data(tokenizer: &mut Tokenizer) -> State {
pub fn sequence_close(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'`') => {
- tokenizer.tokenize_state.size_other += 1;
+ tokenizer.tokenize_state.size_b += 1;
tokenizer.consume();
State::Next(StateName::CodeTextSequenceClose)
}
_ => {
- if tokenizer.tokenize_state.size == tokenizer.tokenize_state.size_other {
+ if tokenizer.tokenize_state.size == tokenizer.tokenize_state.size_b {
tokenizer.exit(Token::CodeTextSequence);
tokenizer.exit(Token::CodeText);
tokenizer.tokenize_state.size = 0;
- tokenizer.tokenize_state.size_other = 0;
+ tokenizer.tokenize_state.size_b = 0;
State::Ok
} else {
let index = tokenizer.events.len();
@@ -202,7 +202,7 @@ pub fn sequence_close(tokenizer: &mut Tokenizer) -> State {
// More or less accents: mark as data.
tokenizer.events[index - 1].token_type = Token::CodeTextData;
tokenizer.events[index].token_type = Token::CodeTextData;
- tokenizer.tokenize_state.size_other = 0;
+ tokenizer.tokenize_state.size_b = 0;
State::Retry(StateName::CodeTextBetween)
}
}
diff --git a/src/construct/definition.rs b/src/construct/definition.rs
index 62d0f3b..5db611b 100644
--- a/src/construct/definition.rs
+++ b/src/construct/definition.rs
@@ -174,7 +174,12 @@ pub fn label_after(tokenizer: &mut Tokenizer) -> State {
}
}
-/// To do.
+/// After the marker.
+///
+/// ```markdown
+/// > | [a]: b "c"
+/// ^
+/// ```
pub fn marker_after(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab_eol(tokenizer);
tokenizer.attempt(
@@ -196,7 +201,7 @@ pub fn destination_before(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_3 = Token::DefinitionDestinationLiteralMarker;
tokenizer.tokenize_state.token_4 = Token::DefinitionDestinationRaw;
tokenizer.tokenize_state.token_5 = Token::DefinitionDestinationString;
- tokenizer.tokenize_state.size_other = usize::MAX;
+ tokenizer.tokenize_state.size_b = usize::MAX;
tokenizer.attempt(
StateName::DestinationStart,
State::Next(StateName::DefinitionDestinationAfter),
@@ -216,7 +221,7 @@ pub fn destination_after(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_3 = Token::Data;
tokenizer.tokenize_state.token_4 = Token::Data;
tokenizer.tokenize_state.token_5 = Token::Data;
- tokenizer.tokenize_state.size_other = 0;
+ tokenizer.tokenize_state.size_b = 0;
tokenizer.attempt(
StateName::DefinitionTitleBefore,
State::Next(StateName::DefinitionAfter),
@@ -231,7 +236,7 @@ pub fn destination_missing(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_3 = Token::Data;
tokenizer.tokenize_state.token_4 = Token::Data;
tokenizer.tokenize_state.token_5 = Token::Data;
- tokenizer.tokenize_state.size_other = 0;
+ tokenizer.tokenize_state.size_b = 0;
State::Nok
}
diff --git a/src/construct/html_flow.rs b/src/construct/html_flow.rs
index b49b231..7a346e9 100644
--- a/src/construct/html_flow.rs
+++ b/src/construct/html_flow.rs
@@ -508,7 +508,7 @@ pub fn complete_attribute_value_before(tokenizer: &mut Tokenizer) -> State {
State::Next(StateName::HtmlFlowCompleteAttributeValueBefore)
}
Some(b'"' | b'\'') => {
- tokenizer.tokenize_state.marker_other = tokenizer.current.unwrap();
+ tokenizer.tokenize_state.marker_b = tokenizer.current.unwrap();
tokenizer.consume();
State::Next(StateName::HtmlFlowCompleteAttributeValueQuoted)
}
@@ -528,13 +528,11 @@ pub fn complete_attribute_value_quoted(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => {
tokenizer.tokenize_state.marker = 0;
- tokenizer.tokenize_state.marker_other = 0;
+ tokenizer.tokenize_state.marker_b = 0;
State::Nok
}
- Some(b'"' | b'\'')
- if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker_other =>
- {
- tokenizer.tokenize_state.marker_other = 0;
+ Some(b'"' | b'\'') if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker_b => {
+ tokenizer.tokenize_state.marker_b = 0;
tokenizer.consume();
State::Next(StateName::HtmlFlowCompleteAttributeValueQuotedAfter)
}
diff --git a/src/construct/html_text.rs b/src/construct/html_text.rs
index df6bd99..7474dbf 100644
--- a/src/construct/html_text.rs
+++ b/src/construct/html_text.rs
@@ -207,10 +207,11 @@ pub fn comment_start_dash(tokenizer: &mut Tokenizer) -> State {
pub fn comment(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None => State::Nok,
- Some(b'\n') => {
- tokenizer.tokenize_state.return_state = Some(StateName::HtmlTextComment);
- State::Retry(StateName::HtmlTextLineEndingBefore)
- }
+ Some(b'\n') => tokenizer.attempt(
+ StateName::HtmlTextLineEndingBefore,
+ State::Next(StateName::HtmlTextComment),
+ State::Nok,
+ ),
Some(b'-') => {
tokenizer.consume();
State::Next(StateName::HtmlTextCommentClose)
@@ -269,10 +270,11 @@ pub fn cdata_open_inside(tokenizer: &mut Tokenizer) -> State {
pub fn cdata(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None => State::Nok,
- Some(b'\n') => {
- tokenizer.tokenize_state.return_state = Some(StateName::HtmlTextCdata);
- State::Retry(StateName::HtmlTextLineEndingBefore)
- }
+ Some(b'\n') => tokenizer.attempt(
+ StateName::HtmlTextLineEndingBefore,
+ State::Next(StateName::HtmlTextCdata),
+ State::Nok,
+ ),
Some(b']') => {
tokenizer.consume();
State::Next(StateName::HtmlTextCdataClose)
@@ -323,10 +325,11 @@ pub fn cdata_end(tokenizer: &mut Tokenizer) -> State {
pub fn declaration(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'>') => State::Retry(StateName::HtmlTextEnd),
- Some(b'\n') => {
- tokenizer.tokenize_state.return_state = Some(StateName::HtmlTextDeclaration);
- State::Retry(StateName::HtmlTextLineEndingBefore)
- }
+ Some(b'\n') => tokenizer.attempt(
+ StateName::HtmlTextLineEndingBefore,
+ State::Next(StateName::HtmlTextDeclaration),
+ State::Nok,
+ ),
_ => {
tokenizer.consume();
State::Next(StateName::HtmlTextDeclaration)
@@ -343,10 +346,11 @@ pub fn declaration(tokenizer: &mut Tokenizer) -> State {
pub fn instruction(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None => State::Nok,
- Some(b'\n') => {
- tokenizer.tokenize_state.return_state = Some(StateName::HtmlTextInstruction);
- State::Retry(StateName::HtmlTextLineEndingBefore)
- }
+ Some(b'\n') => tokenizer.attempt(
+ StateName::HtmlTextLineEndingBefore,
+ State::Next(StateName::HtmlTextInstruction),
+ State::Nok,
+ ),
Some(b'?') => {
tokenizer.consume();
State::Next(StateName::HtmlTextInstructionClose)
@@ -413,10 +417,11 @@ pub fn tag_close(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn tag_close_between(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
- Some(b'\n') => {
- tokenizer.tokenize_state.return_state = Some(StateName::HtmlTextTagCloseBetween);
- State::Retry(StateName::HtmlTextLineEndingBefore)
- }
+ Some(b'\n') => tokenizer.attempt(
+ StateName::HtmlTextLineEndingBefore,
+ State::Next(StateName::HtmlTextTagCloseBetween),
+ State::Nok,
+ ),
Some(b'\t' | b' ') => {
tokenizer.consume();
State::Next(StateName::HtmlTextTagCloseBetween)
@@ -451,10 +456,11 @@ pub fn tag_open(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn tag_open_between(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
- Some(b'\n') => {
- tokenizer.tokenize_state.return_state = Some(StateName::HtmlTextTagOpenBetween);
- State::Retry(StateName::HtmlTextLineEndingBefore)
- }
+ Some(b'\n') => tokenizer.attempt(
+ StateName::HtmlTextLineEndingBefore,
+ State::Next(StateName::HtmlTextTagOpenBetween),
+ State::Nok,
+ ),
Some(b'\t' | b' ') => {
tokenizer.consume();
State::Next(StateName::HtmlTextTagOpenBetween)
@@ -498,11 +504,11 @@ pub fn tag_open_attribute_name(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn tag_open_attribute_name_after(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
- Some(b'\n') => {
- tokenizer.tokenize_state.return_state =
- Some(StateName::HtmlTextTagOpenAttributeNameAfter);
- State::Retry(StateName::HtmlTextLineEndingBefore)
- }
+ Some(b'\n') => tokenizer.attempt(
+ StateName::HtmlTextLineEndingBefore,
+ State::Next(StateName::HtmlTextTagOpenAttributeNameAfter),
+ State::Nok,
+ ),
Some(b'\t' | b' ') => {
tokenizer.consume();
State::Next(StateName::HtmlTextTagOpenAttributeNameAfter)
@@ -525,11 +531,11 @@ pub fn tag_open_attribute_name_after(tokenizer: &mut Tokenizer) -> State {
pub fn tag_open_attribute_value_before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'<' | b'=' | b'>' | b'`') => State::Nok,
- Some(b'\n') => {
- tokenizer.tokenize_state.return_state =
- Some(StateName::HtmlTextTagOpenAttributeValueBefore);
- State::Retry(StateName::HtmlTextLineEndingBefore)
- }
+ Some(b'\n') => tokenizer.attempt(
+ StateName::HtmlTextLineEndingBefore,
+ State::Next(StateName::HtmlTextTagOpenAttributeValueBefore),
+ State::Nok,
+ ),
Some(b'\t' | b' ') => {
tokenizer.consume();
State::Next(StateName::HtmlTextTagOpenAttributeValueBefore)
@@ -558,11 +564,11 @@ pub fn tag_open_attribute_value_quoted(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.marker = 0;
State::Nok
}
- Some(b'\n') => {
- tokenizer.tokenize_state.return_state =
- Some(StateName::HtmlTextTagOpenAttributeValueQuoted);
- State::Retry(StateName::HtmlTextLineEndingBefore)
- }
+ Some(b'\n') => tokenizer.attempt(
+ StateName::HtmlTextLineEndingBefore,
+ State::Next(StateName::HtmlTextTagOpenAttributeValueQuoted),
+ State::Nok,
+ ),
Some(b'"' | b'\'') if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker => {
tokenizer.tokenize_state.marker = 0;
tokenizer.consume();
@@ -678,5 +684,5 @@ pub fn line_ending_after(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn line_ending_after_prefix(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::HtmlTextData);
- State::Retry(tokenizer.tokenize_state.return_state.take().unwrap())
+ State::Ok
}
diff --git a/src/construct/label_end.rs b/src/construct/label_end.rs
index 3337cec..a25f917 100644
--- a/src/construct/label_end.rs
+++ b/src/construct/label_end.rs
@@ -170,12 +170,12 @@ use crate::util::{
pub fn start(tokenizer: &mut Tokenizer) -> State {
if Some(b']') == tokenizer.current && tokenizer.parse_state.constructs.label_end {
let mut label_start_index = None;
- let mut index = tokenizer.label_start_stack.len();
+ let mut index = tokenizer.tokenize_state.label_start_stack.len();
while index > 0 {
index -= 1;
- if !tokenizer.label_start_stack[index].balanced {
+ if !tokenizer.tokenize_state.label_start_stack[index].balanced {
label_start_index = Some(index);
break;
}
@@ -184,6 +184,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
// If there is an okay opening:
if let Some(label_start_index) = label_start_index {
let label_start = tokenizer
+ .tokenize_state
.label_start_stack
.get_mut(label_start_index)
.unwrap();
@@ -221,7 +222,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
/// ^
/// ```
pub fn after(tokenizer: &mut Tokenizer) -> State {
- let start = &tokenizer.label_start_stack[tokenizer.tokenize_state.start];
+ let start = &tokenizer.tokenize_state.label_start_stack[tokenizer.tokenize_state.start];
let defined = tokenizer
.parse_state
.definitions
@@ -298,17 +299,23 @@ pub fn reference_not_full(tokenizer: &mut Tokenizer) -> State {
pub fn ok(tokenizer: &mut Tokenizer) -> State {
let label_start_index = tokenizer.tokenize_state.start;
// Remove this one and everything after it.
- let mut left = tokenizer.label_start_stack.split_off(label_start_index);
+ let mut left = tokenizer
+ .tokenize_state
+ .label_start_stack
+ .split_off(label_start_index);
// Remove this one from `left`, as we’ll move it to `media_list`.
let label_start = left.remove(0);
- tokenizer.label_start_list_loose.append(&mut left);
+ tokenizer
+ .tokenize_state
+ .label_start_list_loose
+ .append(&mut left);
let is_link = tokenizer.events[label_start.start.0].token_type == Token::LabelLink;
if is_link {
let mut index = 0;
- while index < tokenizer.label_start_stack.len() {
- let label_start = &mut tokenizer.label_start_stack[index];
+ while index < tokenizer.tokenize_state.label_start_stack.len() {
+ let label_start = &mut tokenizer.tokenize_state.label_start_stack[index];
if tokenizer.events[label_start.start.0].token_type == Token::LabelLink {
label_start.inactive = true;
}
@@ -316,7 +323,7 @@ pub fn ok(tokenizer: &mut Tokenizer) -> State {
}
}
- tokenizer.media_list.push(Media {
+ tokenizer.tokenize_state.media_list.push(Media {
start: label_start.start,
end: (tokenizer.tokenize_state.end, tokenizer.events.len() - 1),
});
@@ -340,6 +347,7 @@ pub fn ok(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn nok(tokenizer: &mut Tokenizer) -> State {
tokenizer
+ .tokenize_state
.label_start_stack
.get_mut(tokenizer.tokenize_state.start)
.unwrap()
@@ -398,7 +406,7 @@ pub fn resource_open(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_3 = Token::ResourceDestinationLiteralMarker;
tokenizer.tokenize_state.token_4 = Token::ResourceDestinationRaw;
tokenizer.tokenize_state.token_5 = Token::ResourceDestinationString;
- tokenizer.tokenize_state.size_other = RESOURCE_DESTINATION_BALANCE_MAX;
+ tokenizer.tokenize_state.size_b = RESOURCE_DESTINATION_BALANCE_MAX;
tokenizer.attempt(
StateName::DestinationStart,
@@ -420,7 +428,7 @@ pub fn resource_destination_after(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_3 = Token::Data;
tokenizer.tokenize_state.token_4 = Token::Data;
tokenizer.tokenize_state.token_5 = Token::Data;
- tokenizer.tokenize_state.size_other = 0;
+ tokenizer.tokenize_state.size_b = 0;
let name = space_or_tab_eol(tokenizer);
tokenizer.attempt(
name,
@@ -436,7 +444,7 @@ pub fn resource_destination_missing(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_3 = Token::Data;
tokenizer.tokenize_state.token_4 = Token::Data;
tokenizer.tokenize_state.token_5 = Token::Data;
- tokenizer.tokenize_state.size_other = 0;
+ tokenizer.tokenize_state.size_b = 0;
State::Nok
}
@@ -605,9 +613,9 @@ pub fn reference_collapsed_open(tokenizer: &mut Tokenizer) -> State {
/// images, or turns them back into data.
#[allow(clippy::too_many_lines)]
pub fn resolve_media(tokenizer: &mut Tokenizer) {
- let mut left = tokenizer.label_start_list_loose.split_off(0);
- let mut left_2 = tokenizer.label_start_stack.split_off(0);
- let media = tokenizer.media_list.split_off(0);
+ let mut left = tokenizer.tokenize_state.label_start_list_loose.split_off(0);
+ let mut left_2 = tokenizer.tokenize_state.label_start_stack.split_off(0);
+ let media = tokenizer.tokenize_state.media_list.split_off(0);
left.append(&mut left_2);
let events = &tokenizer.events;
diff --git a/src/construct/label_start_image.rs b/src/construct/label_start_image.rs
index 1730fc3..629e836 100644
--- a/src/construct/label_start_image.rs
+++ b/src/construct/label_start_image.rs
@@ -64,7 +64,7 @@ pub fn open(tokenizer: &mut Tokenizer) -> State {
tokenizer.consume();
tokenizer.exit(Token::LabelMarker);
tokenizer.exit(Token::LabelImage);
- tokenizer.label_start_stack.push(LabelStart {
+ tokenizer.tokenize_state.label_start_stack.push(LabelStart {
start: (tokenizer.events.len() - 6, tokenizer.events.len() - 1),
balanced: false,
inactive: false,
diff --git a/src/construct/label_start_link.rs b/src/construct/label_start_link.rs
index c47941c..6eb7b40 100644
--- a/src/construct/label_start_link.rs
+++ b/src/construct/label_start_link.rs
@@ -46,7 +46,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.consume();
tokenizer.exit(Token::LabelMarker);
tokenizer.exit(Token::LabelLink);
- tokenizer.label_start_stack.push(LabelStart {
+ tokenizer.tokenize_state.label_start_stack.push(LabelStart {
start: (start, tokenizer.events.len() - 1),
balanced: false,
inactive: false,
diff --git a/src/construct/partial_data.rs b/src/construct/partial_data.rs
index a68f359..0ad67c5 100644
--- a/src/construct/partial_data.rs
+++ b/src/construct/partial_data.rs
@@ -17,8 +17,8 @@ use crate::tokenizer::{EventType, State, StateName, Tokenizer};
/// ```
pub fn start(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
- // Make sure to eat the first `stop`.
- Some(byte) if tokenizer.tokenize_state.stop.contains(&byte) => {
+ // Make sure to eat the first `markers`.
+ Some(byte) if tokenizer.tokenize_state.markers.contains(&byte) => {
tokenizer.enter(Token::Data);
tokenizer.consume();
State::Next(StateName::DataInside)
@@ -42,7 +42,7 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
tokenizer.exit(Token::LineEnding);
State::Next(StateName::DataAtBreak)
}
- Some(byte) if tokenizer.tokenize_state.stop.contains(&byte) => {
+ Some(byte) if tokenizer.tokenize_state.markers.contains(&byte) => {
tokenizer.register_resolver_before("data".to_string(), Box::new(resolve_data));
State::Ok
}
@@ -62,7 +62,7 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
pub fn inside(tokenizer: &mut Tokenizer) -> State {
let done = match tokenizer.current {
None | Some(b'\n') => true,
- Some(byte) if tokenizer.tokenize_state.stop.contains(&byte) => true,
+ Some(byte) if tokenizer.tokenize_state.markers.contains(&byte) => true,
_ => false,
};
diff --git a/src/construct/partial_destination.rs b/src/construct/partial_destination.rs
index 26fadc4..735fb38 100644
--- a/src/construct/partial_destination.rs
+++ b/src/construct/partial_destination.rs
@@ -182,7 +182,7 @@ pub fn raw(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.size = 0;
State::Ok
}
- Some(b'(') if tokenizer.tokenize_state.size < tokenizer.tokenize_state.size_other => {
+ Some(b'(') if tokenizer.tokenize_state.size < tokenizer.tokenize_state.size_b => {
tokenizer.consume();
tokenizer.tokenize_state.size += 1;
State::Next(StateName::DestinationRaw)
diff --git a/src/construct/partial_label.rs b/src/construct/partial_label.rs
index a151841..6447961 100644
--- a/src/construct/partial_label.rs
+++ b/src/construct/partial_label.rs
@@ -142,13 +142,26 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
}
}
-/// To do.
+/// In a label, after whitespace.
+///
+/// ```markdown
+/// | [a␊
+/// > | b]
+/// ^
+/// ```
pub fn eol_after(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.connect = true;
State::Retry(StateName::LabelAtBreak)
}
-/// To do.
+/// In a label, at a blank line.
+///
+/// ```markdown
+/// | [a␊
+/// > | ␊
+/// ^
+/// | b]
+/// ```
pub fn at_blank_line(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.marker = 0;
tokenizer.tokenize_state.connect = false;
diff --git a/src/construct/partial_title.rs b/src/construct/partial_title.rs
index 0b81418..209240e 100644
--- a/src/construct/partial_title.rs
+++ b/src/construct/partial_title.rs
@@ -133,13 +133,26 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
}
}
-/// To do.
+/// In a title, after whitespace.
+///
+/// ```markdown
+/// | "a␊
+/// > | b"
+/// ^
+/// ```
pub fn after_eol(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.connect = true;
State::Retry(StateName::TitleAtBreak)
}
-/// To do.
+/// In a title, at a blank line.
+///
+/// ```markdown
+/// | "a␊
+/// > | ␊
+/// ^
+/// | b"
+/// ```
pub fn at_blank_line(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.marker = 0;
tokenizer.tokenize_state.connect = false;