aboutsummaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-07-11 14:53:45 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-07-11 15:04:32 +0200
commitb7bd2b734fae09c40d738fcd57d5ee6876f0f504 (patch)
treeea3b7dc9b6e668823323187dbe2f2030d6965905 /src
parent8e78fc8f209d20e8f9a76321fcbebf77fa4c57fc (diff)
downloadmarkdown-rs-b7bd2b734fae09c40d738fcd57d5ee6876f0f504.tar.gz
markdown-rs-b7bd2b734fae09c40d738fcd57d5ee6876f0f504.tar.bz2
markdown-rs-b7bd2b734fae09c40d738fcd57d5ee6876f0f504.zip
Fix block quote bugs
Diffstat (limited to 'src')
-rw-r--r--src/compiler.rs15
-rw-r--r--src/construct/heading_setext.rs5
-rw-r--r--src/content/document.rs8
-rw-r--r--src/tokenizer.rs11
4 files changed, 21 insertions, 18 deletions
diff --git a/src/compiler.rs b/src/compiler.rs
index d675c48..8a28654 100644
--- a/src/compiler.rs
+++ b/src/compiler.rs
@@ -237,6 +237,7 @@ struct CompileContext<'a> {
pub character_reference_kind: Option<CharacterReferenceKind>,
pub media_stack: Vec<Media>,
pub definitions: HashMap<String, Definition>,
+ pub tight_stack: Vec<bool>,
/// Fields used to influance the current compilation.
pub slurp_one_line_ending: bool,
pub tags: bool,
@@ -270,6 +271,7 @@ impl<'a> CompileContext<'a> {
character_reference_kind: None,
media_stack: vec![],
definitions: HashMap::new(),
+ tight_stack: vec![],
slurp_one_line_ending: false,
tags: true,
ignore_encode: false,
@@ -604,7 +606,7 @@ fn on_enter_buffer(context: &mut CompileContext) {
/// Handle [`Enter`][EventType::Enter]:[`BlockQuote`][Token::BlockQuote].
fn on_enter_block_quote(context: &mut CompileContext) {
- // tightStack.push(false)
+ context.tight_stack.push(false);
context.line_ending_if_needed();
context.tag("<blockquote>".to_string());
}
@@ -761,7 +763,7 @@ fn on_exit_break(context: &mut CompileContext) {
/// Handle [`Exit`][EventType::Exit]:[`BlockQuote`][Token::BlockQuote].
fn on_exit_block_quote(context: &mut CompileContext) {
- // tightStack.pop()
+ context.tight_stack.pop();
context.line_ending_if_needed();
context.tag("</blockquote>".to_string());
// let mut slurp_all_line_endings = false;
@@ -842,15 +844,16 @@ fn on_exit_code_flow(context: &mut CompileContext) {
.take()
.expect("`code_flow_seen_data` must be defined");
- // To do: containers.
// One special case is if we are inside a container, and the fenced code was
// not closed (meaning it runs to the end).
// In that case, the following line ending, is considered *outside* the
// fenced code and block quote by micromark, but CM wants to treat that
// ending as part of the code.
- // if fenced_count != None && fenced_count < 2 && tightStack.length > 0 && !last_was_tag {
- // line_ending();
- // }
+ if let Some(count) = context.code_fenced_fences_count {
+ if count == 1 && !context.tight_stack.is_empty() && !context.last_was_tag {
+ context.line_ending();
+ }
+ }
// But in most cases, it’s simpler: when we’ve seen some data, emit an extra
// line ending when needed.
diff --git a/src/construct/heading_setext.rs b/src/construct/heading_setext.rs
index 633f7de..2078338 100644
--- a/src/construct/heading_setext.rs
+++ b/src/construct/heading_setext.rs
@@ -130,11 +130,6 @@ pub fn start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
let paragraph_before =
previous > 1 && tokenizer.events[previous].token_type == Token::Paragraph;
- println!(
- "setext-start: {:?} {:?} {:?}",
- tokenizer.interrupt, tokenizer.lazy, paragraph_before
- );
-
// Require a paragraph before and do not allow on a lazy line.
if paragraph_before && !tokenizer.lazy {
// To do: allow arbitrary when code (indented) is turned off.
diff --git a/src/content/document.rs b/src/content/document.rs
index 0112d52..f093a04 100644
--- a/src/content/document.rs
+++ b/src/content/document.rs
@@ -158,6 +158,7 @@ fn document_continue(
let size = info.continued;
info = exit_containers(tokenizer, info, size, true);
+ tokenizer.expect(code, true);
// // Fix positions.
// let index = indexBeforeExits
@@ -241,6 +242,7 @@ fn there_is_a_new_container(
println!("there_is_a_new_container");
let size = info.continued;
info = exit_containers(tokenizer, info, size, true);
+ tokenizer.expect(code, true);
// Remove from the event stack.
// We’ll properly add exits at different points manually.
@@ -251,7 +253,7 @@ fn there_is_a_new_container(
unreachable!("todo: cont {:?}", name)
};
- println!("creating exit for `{:?}`", name);
+ println!("creating exit (a) for `{:?}`", name);
let token_types = end();
@@ -329,7 +331,7 @@ fn exit_containers(
unreachable!("todo: cont {:?}", name)
};
- println!("creating exit for `{:?}`", name);
+ println!("creating exit (b) for `{:?}`", name);
let token_types = end();
@@ -429,6 +431,7 @@ fn flow_start(tokenizer: &mut Tokenizer, code: Code, mut info: DocumentInfo) ->
// Exit containers.
let size = info.continued;
info = exit_containers(tokenizer, info, size, true);
+ tokenizer.expect(code, true);
// Define start.
let point = tokenizer.point.clone();
@@ -469,6 +472,7 @@ fn flow_end(
State::Ok => {
println!("State::Ok");
info = exit_containers(tokenizer, info, 0, false);
+ tokenizer.expect(code, true);
// println!("document:inject: {:?}", info.inject);
let mut map = EditMap::new();
diff --git a/src/tokenizer.rs b/src/tokenizer.rs
index c984a75..80786ea 100644
--- a/src/tokenizer.rs
+++ b/src/tokenizer.rs
@@ -247,8 +247,10 @@ impl<'a> Tokenizer<'a> {
}
/// Prepare for a next code to get consumed.
- fn expect(&mut self, code: Code) {
- assert!(self.consumed, "expected previous character to be consumed");
+ pub fn expect(&mut self, code: Code, force: bool) {
+ if !force {
+ assert!(self.consumed, "expected previous character to be consumed");
+ }
self.consumed = false;
self.current = code;
}
@@ -609,7 +611,6 @@ fn attempt_impl(
// Should it be before?
// How to match `eof`?
if !codes.is_empty() && pause(tokenizer.previous) {
- tokenizer.consumed = true;
println!("pause!: {:?}", (codes.clone(), vec![code]));
return done(
(codes, vec![code]),
@@ -674,7 +675,7 @@ fn feed_impl(
}
State::Fn(func) => {
log::debug!("main: passing `{:?}`", code);
- tokenizer.expect(code);
+ tokenizer.expect(code, false);
let (next, remainder) = check_statefn_result(func(tokenizer, code));
state = next;
index = index + 1
@@ -706,7 +707,7 @@ fn flush_impl(
State::Fn(func) => {
let code = Code::None;
log::debug!("main: passing eof");
- tokenizer.expect(code);
+ tokenizer.expect(code, false);
let (next, remainder) = check_statefn_result(func(tokenizer, code));
assert!(remainder.is_none(), "expected no remainder");
state = next;