aboutsummaryrefslogtreecommitdiffstats
path: root/src/content
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--src/content/document.rs38
-rw-r--r--src/content/flow.rs64
-rw-r--r--src/content/string.rs10
-rw-r--r--src/content/text.rs42
4 files changed, 77 insertions, 77 deletions
diff --git a/src/content/document.rs b/src/content/document.rs
index 2c340f2..5ce4dc4 100644
--- a/src/content/document.rs
+++ b/src/content/document.rs
@@ -105,11 +105,11 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.point.clone(),
tokenizer.parse_state,
)));
- tokenizer.tokenize_state.document_child_state = Some(State::Fn(StateName::FlowStart));
+ tokenizer.tokenize_state.document_child_state = Some(State::Next(StateName::FlowStart));
tokenizer.attempt(
StateName::BomStart,
- State::Fn(StateName::DocumentLineStart),
- State::Fn(StateName::DocumentLineStart),
+ State::Next(StateName::DocumentLineStart),
+ State::Next(StateName::DocumentLineStart),
)
}
@@ -144,16 +144,16 @@ pub fn container_existing_before(tokenizer: &mut Tokenizer) -> State {
.tokenize_state
.document_container_stack
.remove(tokenizer.tokenize_state.document_continued);
- let state_name = match container.kind {
+ let name = match container.kind {
Container::BlockQuote => StateName::BlockQuoteContStart,
Container::ListItem => StateName::ListContStart,
};
tokenizer.container = Some(container);
tokenizer.attempt(
- state_name,
- State::Fn(StateName::DocumentContainerExistingAfter),
- State::Fn(StateName::DocumentContainerExistingMissing),
+ name,
+ State::Next(StateName::DocumentContainerExistingAfter),
+ State::Next(StateName::DocumentContainerExistingMissing),
)
}
// Otherwise, check new containers.
@@ -239,8 +239,8 @@ pub fn container_new_before(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
StateName::BlockQuoteStart,
- State::Fn(StateName::DocumentContainerNewAfter),
- State::Fn(StateName::DocumentContainerNewBeforeNotBlockQuote),
+ State::Next(StateName::DocumentContainerNewAfter),
+ State::Next(StateName::DocumentContainerNewBeforeNotBlockQuote),
)
}
@@ -255,8 +255,8 @@ pub fn container_new_before_not_block_quote(tokenizer: &mut Tokenizer) -> State
tokenizer.attempt(
StateName::ListStart,
- State::Fn(StateName::DocumentContainerNewAfter),
- State::Fn(StateName::DocumentContainersAfter),
+ State::Next(StateName::DocumentContainerNewAfter),
+ State::Next(StateName::DocumentContainersAfter),
)
}
@@ -340,11 +340,11 @@ pub fn flow_inside(tokenizer: &mut Tokenizer) -> State {
Some(b'\n') => {
tokenizer.consume();
tokenizer.exit(Token::Data);
- State::Fn(StateName::DocumentFlowEnd)
+ State::Next(StateName::DocumentFlowEnd)
}
Some(_) => {
tokenizer.consume();
- State::Fn(StateName::DocumentFlowInside)
+ State::Next(StateName::DocumentFlowInside)
}
}
}
@@ -371,10 +371,10 @@ pub fn flow_end(tokenizer: &mut Tokenizer) -> State {
.tokenize_state
.document_child_state
.take()
- .unwrap_or(State::Fn(StateName::FlowStart));
+ .unwrap_or(State::Next(StateName::FlowStart));
- let state_name = match state {
- State::Fn(state_name) => state_name,
+ let name = match state {
+ State::Next(name) => name,
_ => unreachable!("expected state name"),
};
@@ -382,10 +382,10 @@ pub fn flow_end(tokenizer: &mut Tokenizer) -> State {
// To do: handle VS?
// if position.start.vs > 0 {
// }
- let state = child.push(position.start.index, position.end.index, state_name);
+ let state = child.push(position.start.index, position.end.index, name);
interrupt = child.interrupt;
- paragraph = matches!(state, State::Fn(StateName::ParagraphInside))
+ paragraph = matches!(state, State::Next(StateName::ParagraphInside))
|| (!child.events.is_empty()
&& child.events[skip::opt_back(
&child.events,
@@ -439,7 +439,7 @@ fn exit_containers(tokenizer: &mut Tokenizer, phase: &Phase) {
.tokenize_state
.document_child_state
.take()
- .unwrap_or(State::Fn(StateName::FlowStart));
+ .unwrap_or(State::Next(StateName::FlowStart));
child.flush(state, false);
}
diff --git a/src/content/flow.rs b/src/content/flow.rs
index b3fb866..cd913c6 100644
--- a/src/content/flow.rs
+++ b/src/content/flow.rs
@@ -36,41 +36,41 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'`' | b'~') => tokenizer.attempt(
StateName::CodeFencedStart,
- State::Fn(StateName::FlowAfter),
- State::Fn(StateName::FlowBeforeParagraph),
+ State::Next(StateName::FlowAfter),
+ State::Next(StateName::FlowBeforeParagraph),
),
Some(b'<') => tokenizer.attempt(
StateName::HtmlFlowStart,
- State::Fn(StateName::FlowAfter),
- State::Fn(StateName::FlowBeforeParagraph),
+ State::Next(StateName::FlowAfter),
+ State::Next(StateName::FlowBeforeParagraph),
),
Some(b'#') => tokenizer.attempt(
StateName::HeadingAtxStart,
- State::Fn(StateName::FlowAfter),
- State::Fn(StateName::FlowBeforeParagraph),
+ State::Next(StateName::FlowAfter),
+ State::Next(StateName::FlowBeforeParagraph),
),
// Note: `-` is also used in thematic breaks, so it’s not included here.
Some(b'=') => tokenizer.attempt(
StateName::HeadingSetextStart,
- State::Fn(StateName::FlowAfter),
- State::Fn(StateName::FlowBeforeParagraph),
+ State::Next(StateName::FlowAfter),
+ State::Next(StateName::FlowBeforeParagraph),
),
Some(b'*' | b'_') => tokenizer.attempt(
StateName::ThematicBreakStart,
- State::Fn(StateName::FlowAfter),
- State::Fn(StateName::FlowBeforeParagraph),
+ State::Next(StateName::FlowAfter),
+ State::Next(StateName::FlowBeforeParagraph),
),
Some(b'[') => tokenizer.attempt(
StateName::DefinitionStart,
- State::Fn(StateName::FlowAfter),
- State::Fn(StateName::FlowBeforeParagraph),
+ State::Next(StateName::FlowAfter),
+ State::Next(StateName::FlowBeforeParagraph),
),
// Actual parsing: blank line? Indented code? Indented anything?
// Also includes `-` which can be a setext heading underline or a thematic break.
None | Some(b'\t' | b'\n' | b' ' | b'-') => before_blank_line(tokenizer),
Some(_) => tokenizer.attempt(
StateName::ParagraphStart,
- State::Fn(StateName::FlowAfter),
+ State::Next(StateName::FlowAfter),
State::Nok,
),
}
@@ -79,8 +79,8 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
pub fn before_blank_line(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
StateName::BlankLineStart,
- State::Fn(StateName::FlowBlankLineAfter),
- State::Fn(StateName::FlowBeforeCodeIndented),
+ State::Next(StateName::FlowBlankLineAfter),
+ State::Next(StateName::FlowBeforeCodeIndented),
)
}
@@ -99,56 +99,56 @@ pub fn before_blank_line(tokenizer: &mut Tokenizer) -> State {
pub fn before_code_indented(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
StateName::CodeIndentedStart,
- State::Fn(StateName::FlowAfter),
- State::Fn(StateName::FlowBeforeCodeFenced),
+ State::Next(StateName::FlowAfter),
+ State::Next(StateName::FlowBeforeCodeFenced),
)
}
pub fn before_code_fenced(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
StateName::CodeFencedStart,
- State::Fn(StateName::FlowAfter),
- State::Fn(StateName::FlowBeforeHtml),
+ State::Next(StateName::FlowAfter),
+ State::Next(StateName::FlowBeforeHtml),
)
}
pub fn before_html(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
StateName::HtmlFlowStart,
- State::Fn(StateName::FlowAfter),
- State::Fn(StateName::FlowBeforeHeadingAtx),
+ State::Next(StateName::FlowAfter),
+ State::Next(StateName::FlowBeforeHeadingAtx),
)
}
pub fn before_heading_atx(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
StateName::HeadingAtxStart,
- State::Fn(StateName::FlowAfter),
- State::Fn(StateName::FlowBeforeHeadingSetext),
+ State::Next(StateName::FlowAfter),
+ State::Next(StateName::FlowBeforeHeadingSetext),
)
}
pub fn before_heading_setext(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
StateName::HeadingSetextStart,
- State::Fn(StateName::FlowAfter),
- State::Fn(StateName::FlowBeforeThematicBreak),
+ State::Next(StateName::FlowAfter),
+ State::Next(StateName::FlowBeforeThematicBreak),
)
}
pub fn before_thematic_break(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
StateName::ThematicBreakStart,
- State::Fn(StateName::FlowAfter),
- State::Fn(StateName::FlowBeforeDefinition),
+ State::Next(StateName::FlowAfter),
+ State::Next(StateName::FlowBeforeDefinition),
)
}
pub fn before_definition(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
StateName::DefinitionStart,
- State::Fn(StateName::FlowAfter),
- State::Fn(StateName::FlowBeforeParagraph),
+ State::Next(StateName::FlowAfter),
+ State::Next(StateName::FlowBeforeParagraph),
)
}
@@ -168,7 +168,7 @@ pub fn blank_line_after(tokenizer: &mut Tokenizer) -> State {
tokenizer.exit(Token::BlankLineEnding);
// Feel free to interrupt.
tokenizer.interrupt = false;
- State::Fn(StateName::FlowStart)
+ State::Next(StateName::FlowStart)
}
_ => unreachable!("expected eol/eof"),
}
@@ -190,7 +190,7 @@ pub fn after(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::LineEnding);
tokenizer.consume();
tokenizer.exit(Token::LineEnding);
- State::Fn(StateName::FlowStart)
+ State::Next(StateName::FlowStart)
}
_ => unreachable!("expected eol/eof"),
}
@@ -204,7 +204,7 @@ pub fn after(tokenizer: &mut Tokenizer) -> State {
pub fn before_paragraph(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
StateName::ParagraphStart,
- State::Fn(StateName::FlowAfter),
+ State::Next(StateName::FlowAfter),
State::Nok,
)
}
diff --git a/src/content/string.rs b/src/content/string.rs
index b3df6ec..fda9b51 100644
--- a/src/content/string.rs
+++ b/src/content/string.rs
@@ -30,13 +30,13 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
None => State::Ok,
Some(b'&') => tokenizer.attempt(
StateName::CharacterReferenceStart,
- State::Fn(StateName::StringBefore),
- State::Fn(StateName::StringBeforeData),
+ State::Next(StateName::StringBefore),
+ State::Next(StateName::StringBeforeData),
),
Some(b'\\') => tokenizer.attempt(
StateName::CharacterEscapeStart,
- State::Fn(StateName::StringBefore),
- State::Fn(StateName::StringBeforeData),
+ State::Next(StateName::StringBefore),
+ State::Next(StateName::StringBeforeData),
),
_ => before_data(tokenizer),
}
@@ -46,7 +46,7 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
pub fn before_data(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
StateName::DataStart,
- State::Fn(StateName::StringBefore),
+ State::Next(StateName::StringBefore),
State::Nok,
)
}
diff --git a/src/content/text.rs b/src/content/text.rs
index ff8c9eb..eb5a231 100644
--- a/src/content/text.rs
+++ b/src/content/text.rs
@@ -48,44 +48,44 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
None => State::Ok,
Some(b'!') => tokenizer.attempt(
StateName::LabelStartImageStart,
- State::Fn(StateName::TextBefore),
- State::Fn(StateName::TextBeforeData),
+ State::Next(StateName::TextBefore),
+ State::Next(StateName::TextBeforeData),
),
Some(b'&') => tokenizer.attempt(
StateName::CharacterReferenceStart,
- State::Fn(StateName::TextBefore),
- State::Fn(StateName::TextBeforeData),
+ State::Next(StateName::TextBefore),
+ State::Next(StateName::TextBeforeData),
),
Some(b'*' | b'_') => tokenizer.attempt(
StateName::AttentionStart,
- State::Fn(StateName::TextBefore),
- State::Fn(StateName::TextBeforeData),
+ State::Next(StateName::TextBefore),
+ State::Next(StateName::TextBeforeData),
),
// `autolink`, `html_text` (order does not matter)
Some(b'<') => tokenizer.attempt(
StateName::AutolinkStart,
- State::Fn(StateName::TextBefore),
- State::Fn(StateName::TextBeforeHtml),
+ State::Next(StateName::TextBefore),
+ State::Next(StateName::TextBeforeHtml),
),
Some(b'[') => tokenizer.attempt(
StateName::LabelStartLinkStart,
- State::Fn(StateName::TextBefore),
- State::Fn(StateName::TextBeforeData),
+ State::Next(StateName::TextBefore),
+ State::Next(StateName::TextBeforeData),
),
Some(b'\\') => tokenizer.attempt(
StateName::CharacterEscapeStart,
- State::Fn(StateName::TextBefore),
- State::Fn(StateName::TextBeforeHardBreakEscape),
+ State::Next(StateName::TextBefore),
+ State::Next(StateName::TextBeforeHardBreakEscape),
),
Some(b']') => tokenizer.attempt(
StateName::LabelEndStart,
- State::Fn(StateName::TextBefore),
- State::Fn(StateName::TextBeforeData),
+ State::Next(StateName::TextBefore),
+ State::Next(StateName::TextBeforeData),
),
Some(b'`') => tokenizer.attempt(
StateName::CodeTextStart,
- State::Fn(StateName::TextBefore),
- State::Fn(StateName::TextBeforeData),
+ State::Next(StateName::TextBefore),
+ State::Next(StateName::TextBeforeData),
),
_ => before_data(tokenizer),
}
@@ -95,8 +95,8 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
pub fn before_html(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
StateName::HtmlTextStart,
- State::Fn(StateName::TextBefore),
- State::Fn(StateName::TextBeforeData),
+ State::Next(StateName::TextBefore),
+ State::Next(StateName::TextBeforeData),
)
}
@@ -104,8 +104,8 @@ pub fn before_html(tokenizer: &mut Tokenizer) -> State {
pub fn before_hard_break_escape(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
StateName::HardBreakEscapeStart,
- State::Fn(StateName::TextBefore),
- State::Fn(StateName::TextBeforeData),
+ State::Next(StateName::TextBefore),
+ State::Next(StateName::TextBeforeData),
)
}
@@ -117,7 +117,7 @@ pub fn before_hard_break_escape(tokenizer: &mut Tokenizer) -> State {
pub fn before_data(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
StateName::DataStart,
- State::Fn(StateName::TextBefore),
+ State::Next(StateName::TextBefore),
State::Nok,
)
}