From 86834a02b301bba48c2bd568beb156e604470167 Mon Sep 17 00:00:00 2001 From: Titus Wormer Date: Tue, 12 Jul 2022 19:04:31 +0200 Subject: Fix some issues around empty lists --- src/construct/list.rs | 69 ++++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 57 insertions(+), 12 deletions(-) (limited to 'src/construct') diff --git a/src/construct/list.rs b/src/construct/list.rs index b81a5cc..27180a8 100644 --- a/src/construct/list.rs +++ b/src/construct/list.rs @@ -123,11 +123,6 @@ fn before(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult { } } -/// To do. -fn nok(_tokenizer: &mut Tokenizer, _code: Code) -> StateFnResult { - (State::Nok, None) -} - /// To do. fn before_unordered(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult { // To do: check if this is a thematic break? @@ -160,13 +155,42 @@ fn marker(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult { tokenizer.enter(Token::ListItemMarker); tokenizer.consume(code); tokenizer.exit(Token::ListItemMarker); - // To do: check blank line, if true `State::Nok` else `on_blank`. + println!("check:blank_line:before"); (State::Fn(Box::new(marker_after)), None) } /// To do. fn marker_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult { - tokenizer.attempt(list_item_prefix_whitespace, |ok| { + let interrupt = tokenizer.interrupt; + + tokenizer.check(blank_line, move |ok| { + println!("check:blank_line:after {:?} {:?}", ok, interrupt); + let func = if ok { + if interrupt { + nok + } else { + on_blank + } + } else { + marker_after_after + }; + Box::new(func) + })(tokenizer, code) +} + +/// To do. +fn on_blank(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult { + // self.containerState.initialBlankLine = true + // initialSize++ + prefix_end(tokenizer, code) +} + +/// To do. +fn marker_after_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult { + println!("marker:after:before"); + let interrupt = tokenizer.interrupt; + tokenizer.attempt(list_item_prefix_whitespace, move |ok| { + println!("marker:after:after: {:?} {:?}", ok, interrupt); let func = if ok { prefix_end } else { prefix_other }; Box::new(func) })(tokenizer, code) @@ -209,16 +233,22 @@ fn list_item_prefix_whitespace_after(_tokenizer: &mut Tokenizer, code: Code) -> (State::Ok, Some(vec![code])) } +/// To do. +fn nok(_tokenizer: &mut Tokenizer, _code: Code) -> StateFnResult { + (State::Nok, None) +} + /// To do. pub fn cont(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult { + println!("cont:check:blank:before"); tokenizer.check(blank_line, |ok| { + println!("cont:check:blank:after: {:?}", ok); let func = if ok { blank_cont } else { not_blank_cont }; Box::new(func) })(tokenizer, code) } pub fn blank_cont(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult { - println!("cont: blank"); // self.containerState.furtherBlankLines = // self.containerState.furtherBlankLines || // self.containerState.initialBlankLine @@ -235,10 +265,25 @@ pub fn blank_cont_after(_tokenizer: &mut Tokenizer, code: Code) -> StateFnResult } pub fn not_blank_cont(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult { - println!("cont: not blank"); - // if (self.containerState.furtherBlankLines || !markdownSpace(code)) nok - // To do: eat exactly `size` whitespace. - tokenizer.go(space_or_tab_min_max(TAB_SIZE, TAB_SIZE), blank_cont_after)(tokenizer, code) + let index = tokenizer.events.len(); + let currently_blank = + index > 0 && tokenizer.events[index - 1].token_type == Token::BlankLineEnding; + let mut further_blank = false; + + if currently_blank { + let before = skip::opt_back(&tokenizer.events, index - 3, &[Token::SpaceOrTab]); + further_blank = tokenizer.events[before].token_type == Token::BlankLineEnding; + } + + if further_blank || !matches!(code, Code::VirtualSpace | Code::Char('\t' | ' ')) { + println!("cont: not blank after further blank, or not blank w/o whitespace"); + println!("cont:nok:1"); + (State::Nok, None) + } else { + println!("cont: not blank"); + // To do: eat exactly `size` whitespace. + tokenizer.go(space_or_tab_min_max(TAB_SIZE, TAB_SIZE), blank_cont_after)(tokenizer, code) + } } /// To do. -- cgit