aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/document.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-22 17:32:40 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-22 17:32:40 +0200
commit49b6a4e72516e8b2a8768e761a60a4f461802d69 (patch)
tree1baeb4eba94d9d49ccfd8bd15d0fb3fefd45a993 /src/construct/document.rs
parent8774b207b7251730eaa7fbfe4f144122a472dda0 (diff)
downloadmarkdown-rs-49b6a4e72516e8b2a8768e761a60a4f461802d69.tar.gz
markdown-rs-49b6a4e72516e8b2a8768e761a60a4f461802d69.tar.bz2
markdown-rs-49b6a4e72516e8b2a8768e761a60a4f461802d69.zip
Fix lazy paragraph after definition
Diffstat (limited to 'src/construct/document.rs')
-rw-r--r--src/construct/document.rs68
1 files changed, 59 insertions, 9 deletions
diff --git a/src/construct/document.rs b/src/construct/document.rs
index 4ef6acc..b438808 100644
--- a/src/construct/document.rs
+++ b/src/construct/document.rs
@@ -337,16 +337,65 @@ pub fn flow_end(tokenizer: &mut Tokenizer) -> State {
state,
);
- let paragraph = matches!(state, State::Next(StateName::ParagraphInside))
- || (!child.events.is_empty()
- && child.events
- [skip::opt_back(&child.events, child.events.len() - 1, &[Name::LineEnding])]
- .name
- == Name::Paragraph);
-
tokenizer.tokenize_state.document_child_state = Some(state);
- if child.lazy && paragraph && tokenizer.tokenize_state.document_paragraph_before {
+ // If we’re in a lazy line, and the previous (lazy or not) line is something
+ // that can be lazy, and this line is that too, allow it.
+ //
+ // Accept:
+ //
+ // ```markdown
+ // | * a
+ // > | b
+ // ^
+ // | ```
+ // ```
+ //
+ // Do not accept:
+ //
+ // ```markdown
+ // | * # a
+ // > | b
+ // ^
+ // | ```
+ // ```
+ //
+ // Do not accept:
+ //
+ // ```markdown
+ // | * a
+ // > | # b
+ // ^
+ // | ```
+ // ```
+ let mut document_lazy_continuation_current = false;
+ let mut stack_index = child.stack.len();
+
+ // Use two algo’s: one for when we’re suspended or in multiline things
+ // like definitions, another (b) for when we fed the line ending and closed
+ // a)
+ while !document_lazy_continuation_current && stack_index > 0 {
+ stack_index -= 1;
+ let name = &child.stack[stack_index];
+ if name == &Name::Paragraph || name == &Name::Definition {
+ document_lazy_continuation_current = true;
+ }
+ }
+
+ // …another because we parse each “rest” line as a paragraph, and we passed
+ // a EOL already.
+ if !document_lazy_continuation_current && !child.events.is_empty() {
+ let before = skip::opt_back(&child.events, child.events.len() - 1, &[Name::LineEnding]);
+ let name = &child.events[before].name;
+ if name == &Name::Paragraph {
+ document_lazy_continuation_current = true;
+ }
+ }
+
+ if child.lazy
+ && tokenizer.tokenize_state.document_lazy_accepting_before
+ && document_lazy_continuation_current
+ {
tokenizer.tokenize_state.document_continued =
tokenizer.tokenize_state.document_container_stack.len();
}
@@ -366,7 +415,8 @@ pub fn flow_end(tokenizer: &mut Tokenizer) -> State {
}
Some(_) => {
tokenizer.tokenize_state.document_continued = 0;
- tokenizer.tokenize_state.document_paragraph_before = paragraph;
+ tokenizer.tokenize_state.document_lazy_accepting_before =
+ document_lazy_continuation_current;
// Containers would only be interrupting if we’ve continued.
tokenizer.interrupt = false;
State::Retry(StateName::DocumentContainerExistingBefore)