aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/document.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-31 16:50:20 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-31 16:50:20 +0200
commitb1590a4fb0c28fdb6af866ea79c186ea57284493 (patch)
tree61264dc36135e7dae34a04992a99b9f3f71e7b8e /src/construct/document.rs
parent670f1d82e01ea2394b21d7d1857f41bdc67b3fce (diff)
downloadmarkdown-rs-b1590a4fb0c28fdb6af866ea79c186ea57284493.tar.gz
markdown-rs-b1590a4fb0c28fdb6af866ea79c186ea57284493.tar.bz2
markdown-rs-b1590a4fb0c28fdb6af866ea79c186ea57284493.zip
Add support for GFM tables
Diffstat (limited to '')
-rw-r--r--src/construct/document.rs16
1 files changed, 13 insertions, 3 deletions
diff --git a/src/construct/document.rs b/src/construct/document.rs
index 9c76e46..e31e58d 100644
--- a/src/construct/document.rs
+++ b/src/construct/document.rs
@@ -269,6 +269,14 @@ pub fn container_new_after(tokenizer: &mut Tokenizer) -> State {
exit_containers(tokenizer, &Phase::Prefix);
}
+ // We are “piercing” into the flow with a new container.
+ tokenizer
+ .tokenize_state
+ .document_child
+ .as_mut()
+ .unwrap()
+ .pierce = true;
+
tokenizer
.tokenize_state
.document_container_stack
@@ -398,12 +406,11 @@ pub fn flow_end(tokenizer: &mut Tokenizer) -> State {
let mut stack_index = child.stack.len();
// Use two algo’s: one for when we’re suspended or in multiline things
- // like definitions, another (b) for when we fed the line ending and closed
- // a)
+ // like definitions, another for when we fed the line ending and closed.
while !document_lazy_continuation_current && stack_index > 0 {
stack_index -= 1;
let name = &child.stack[stack_index];
- if name == &Name::Paragraph || name == &Name::Definition {
+ if name == &Name::Paragraph || name == &Name::Definition || name == &Name::GfmTableHead {
document_lazy_continuation_current = true;
}
}
@@ -418,6 +425,9 @@ pub fn flow_end(tokenizer: &mut Tokenizer) -> State {
}
}
+ // Reset “piercing”.
+ child.pierce = false;
+
if child.lazy
&& tokenizer.tokenize_state.document_lazy_accepting_before
&& document_lazy_continuation_current