aboutsummaryrefslogtreecommitdiffstats
path: root/src/content
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-10 10:54:43 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-10 10:54:43 +0200
commit3a90a49518bbc53876d3f46d8763b2fe0f03d789 (patch)
treeeecaaf9586cf8632b3b6fe22794dae1f492849f5 /src/content
parent8162222295d71ea7fd9270c7b3b9497b91db3f1f (diff)
downloadmarkdown-rs-3a90a49518bbc53876d3f46d8763b2fe0f03d789.tar.gz
markdown-rs-3a90a49518bbc53876d3f46d8763b2fe0f03d789.tar.bz2
markdown-rs-3a90a49518bbc53876d3f46d8763b2fe0f03d789.zip
Add `State::Retry`
Diffstat (limited to 'src/content')
-rw-r--r--src/content/document.rs20
-rw-r--r--src/content/flow.rs4
-rw-r--r--src/content/string.rs4
-rw-r--r--src/content/text.rs4
4 files changed, 16 insertions, 16 deletions
diff --git a/src/content/document.rs b/src/content/document.rs
index 5ce4dc4..d47a31a 100644
--- a/src/content/document.rs
+++ b/src/content/document.rs
@@ -125,7 +125,7 @@ pub fn line_start(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.document_continued = 0;
// Containers would only be interrupting if we’ve continued.
tokenizer.interrupt = false;
- container_existing_before(tokenizer)
+ State::Retry(StateName::DocumentContainerExistingBefore)
}
/// Before existing containers.
@@ -158,7 +158,7 @@ pub fn container_existing_before(tokenizer: &mut Tokenizer) -> State {
}
// Otherwise, check new containers.
else {
- container_new_before(tokenizer)
+ State::Retry(StateName::DocumentContainerNewBefore)
}
}
@@ -175,7 +175,7 @@ pub fn container_existing_missing(tokenizer: &mut Tokenizer) -> State {
.tokenize_state
.document_container_stack
.insert(tokenizer.tokenize_state.document_continued, container);
- container_new_before(tokenizer)
+ State::Retry(StateName::DocumentContainerNewBefore)
}
/// After an existing container.
@@ -192,7 +192,7 @@ pub fn container_existing_after(tokenizer: &mut Tokenizer) -> State {
.document_container_stack
.insert(tokenizer.tokenize_state.document_continued, container);
tokenizer.tokenize_state.document_continued += 1;
- container_existing_before(tokenizer)
+ State::Retry(StateName::DocumentContainerExistingBefore)
}
/// Before a new container.
@@ -225,7 +225,7 @@ pub fn container_new_before(tokenizer: &mut Tokenizer) -> State {
.unwrap()
.concrete
{
- return containers_after(tokenizer);
+ return State::Retry(StateName::DocumentContainersAfter);
}
}
@@ -287,7 +287,7 @@ pub fn container_new_after(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.document_continued += 1;
tokenizer.tokenize_state.document_interrupt_before = false;
tokenizer.interrupt = false;
- container_new_before(tokenizer)
+ State::Retry(StateName::DocumentContainerNewBefore)
}
/// After containers, before flow.
@@ -308,7 +308,7 @@ pub fn containers_after(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
// Note: EOL is part of data.
- None => flow_end(tokenizer),
+ None => State::Retry(StateName::DocumentFlowEnd),
Some(_) => {
let current = tokenizer.events.len();
let previous = tokenizer.tokenize_state.document_data_index.take();
@@ -324,7 +324,7 @@ pub fn containers_after(tokenizer: &mut Tokenizer) -> State {
content_type: ContentType::Flow,
}),
);
- flow_inside(tokenizer)
+ State::Retry(StateName::DocumentFlowInside)
}
}
}
@@ -334,7 +334,7 @@ pub fn flow_inside(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None => {
tokenizer.exit(Token::Data);
- flow_end(tokenizer)
+ State::Retry(StateName::DocumentFlowEnd)
}
// Note: EOL is part of data.
Some(b'\n') => {
@@ -420,7 +420,7 @@ pub fn flow_end(tokenizer: &mut Tokenizer) -> State {
Some(_) => {
tokenizer.tokenize_state.document_paragraph_before = paragraph;
tokenizer.tokenize_state.document_interrupt_before = interrupt;
- line_start(tokenizer)
+ State::Retry(StateName::DocumentLineStart)
}
}
}
diff --git a/src/content/flow.rs b/src/content/flow.rs
index cd913c6..886b5f0 100644
--- a/src/content/flow.rs
+++ b/src/content/flow.rs
@@ -67,7 +67,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
),
// Actual parsing: blank line? Indented code? Indented anything?
// Also includes `-` which can be a setext heading underline or a thematic break.
- None | Some(b'\t' | b'\n' | b' ' | b'-') => before_blank_line(tokenizer),
+ None | Some(b'\t' | b'\n' | b' ' | b'-') => State::Retry(StateName::FlowBlankLineBefore),
Some(_) => tokenizer.attempt(
StateName::ParagraphStart,
State::Next(StateName::FlowAfter),
@@ -76,7 +76,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
}
}
-pub fn before_blank_line(tokenizer: &mut Tokenizer) -> State {
+pub fn blank_line_before(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
StateName::BlankLineStart,
State::Next(StateName::FlowBlankLineAfter),
diff --git a/src/content/string.rs b/src/content/string.rs
index fda9b51..75cd56a 100644
--- a/src/content/string.rs
+++ b/src/content/string.rs
@@ -21,7 +21,7 @@ const MARKERS: [u8; 2] = [b'&', b'\\'];
pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.register_resolver("whitespace".to_string(), Box::new(resolve));
tokenizer.tokenize_state.stop = &MARKERS;
- before(tokenizer)
+ State::Retry(StateName::StringBefore)
}
/// Before string.
@@ -38,7 +38,7 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
State::Next(StateName::StringBefore),
State::Next(StateName::StringBeforeData),
),
- _ => before_data(tokenizer),
+ _ => State::Retry(StateName::StringBeforeData),
}
}
diff --git a/src/content/text.rs b/src/content/text.rs
index eb5a231..ee70f33 100644
--- a/src/content/text.rs
+++ b/src/content/text.rs
@@ -39,7 +39,7 @@ const MARKERS: [u8; 9] = [
pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.register_resolver("whitespace".to_string(), Box::new(resolve));
tokenizer.tokenize_state.stop = &MARKERS;
- before(tokenizer)
+ State::Retry(StateName::TextBefore)
}
/// Before text.
@@ -87,7 +87,7 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
State::Next(StateName::TextBefore),
State::Next(StateName::TextBeforeData),
),
- _ => before_data(tokenizer),
+ _ => State::Retry(StateName::TextBeforeData),
}
}