diff options
Diffstat (limited to '')
-rw-r--r-- | src/content/document.rs | 42 | ||||
-rw-r--r-- | src/content/flow.rs | 97 | ||||
-rw-r--r-- | src/content/string.rs | 28 | ||||
-rw-r--r-- | src/content/text.rs | 85 |
4 files changed, 167 insertions, 85 deletions
diff --git a/src/content/document.rs b/src/content/document.rs index 7a43d48..d02021a 100644 --- a/src/content/document.rs +++ b/src/content/document.rs @@ -106,7 +106,11 @@ pub fn start(tokenizer: &mut Tokenizer) -> State { tokenizer.parse_state, ))); tokenizer.tokenize_state.document_child_state = Some(State::Fn(StateName::FlowStart)); - tokenizer.attempt_opt(StateName::BomStart, StateName::DocumentLineStart) + tokenizer.attempt( + StateName::BomStart, + State::Fn(StateName::DocumentLineStart), + State::Fn(StateName::DocumentLineStart), + ) } /// Start of a line. @@ -146,13 +150,11 @@ pub fn container_existing_before(tokenizer: &mut Tokenizer) -> State { }; tokenizer.container = Some(container); - tokenizer.attempt(state_name, |ok| { - State::Fn(if ok { - StateName::DocumentContainerExistingAfter - } else { - StateName::DocumentContainerExistingMissing - }) - }) + tokenizer.attempt( + state_name, + State::Fn(StateName::DocumentContainerExistingAfter), + State::Fn(StateName::DocumentContainerExistingMissing), + ) } // Otherwise, check new containers. else { @@ -235,13 +237,11 @@ pub fn container_new_before(tokenizer: &mut Tokenizer) -> State { size: 0, }); - tokenizer.attempt(StateName::BlockQuoteStart, |ok| { - State::Fn(if ok { - StateName::DocumentContainerNewAfter - } else { - StateName::DocumentContainerNewBeforeNotBlockQuote - }) - }) + tokenizer.attempt( + StateName::BlockQuoteStart, + State::Fn(StateName::DocumentContainerNewAfter), + State::Fn(StateName::DocumentContainerNewBeforeNotBlockQuote), + ) } /// To do. @@ -253,13 +253,11 @@ pub fn container_new_before_not_block_quote(tokenizer: &mut Tokenizer) -> State size: 0, }); - tokenizer.attempt(StateName::ListStart, |ok| { - State::Fn(if ok { - StateName::DocumentContainerNewAfter - } else { - StateName::DocumentContainersAfter - }) - }) + tokenizer.attempt( + StateName::ListStart, + State::Fn(StateName::DocumentContainerNewAfter), + State::Fn(StateName::DocumentContainersAfter), + ) } /// After a new container. diff --git a/src/content/flow.rs b/src/content/flow.rs index 6f62901..bfaf5e9 100644 --- a/src/content/flow.rs +++ b/src/content/flow.rs @@ -35,13 +35,11 @@ use crate::tokenizer::{State, StateName, Tokenizer}; pub fn start(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { None => State::Ok, - _ => tokenizer.attempt(StateName::BlankLineStart, |ok| { - State::Fn(if ok { - StateName::FlowBlankLineAfter - } else { - StateName::FlowBefore - }) - }), + _ => tokenizer.attempt( + StateName::BlankLineStart, + State::Fn(StateName::FlowBlankLineAfter), + State::Fn(StateName::FlowBefore), + ), } } @@ -58,27 +56,64 @@ pub fn start(tokenizer: &mut Tokenizer) -> State { /// |<div> /// ``` pub fn before(tokenizer: &mut Tokenizer) -> State { - match tokenizer.current { - None => State::Ok, - _ => tokenizer.attempt_n( - vec![ - StateName::CodeIndentedStart, - StateName::CodeFencedStart, - StateName::HtmlFlowStart, - StateName::HeadingAtxStart, - StateName::HeadingSetextStart, - StateName::ThematicBreakStart, - StateName::DefinitionStart, - ], - |ok| { - State::Fn(if ok { - StateName::FlowAfter - } else { - StateName::FlowBeforeParagraph - }) - }, - ), - } + // match tokenizer.current { + // None => State::Ok, + // _ => { + tokenizer.attempt( + StateName::CodeIndentedStart, + State::Fn(StateName::FlowAfter), + State::Fn(StateName::FlowBeforeCodeFenced), + ) + // } + // } +} + +pub fn before_code_fenced(tokenizer: &mut Tokenizer) -> State { + tokenizer.attempt( + StateName::CodeFencedStart, + State::Fn(StateName::FlowAfter), + State::Fn(StateName::FlowBeforeHtml), + ) +} + +pub fn before_html(tokenizer: &mut Tokenizer) -> State { + tokenizer.attempt( + StateName::HtmlFlowStart, + State::Fn(StateName::FlowAfter), + State::Fn(StateName::FlowBeforeHeadingAtx), + ) +} + +pub fn before_heading_atx(tokenizer: &mut Tokenizer) -> State { + tokenizer.attempt( + StateName::HeadingAtxStart, + State::Fn(StateName::FlowAfter), + State::Fn(StateName::FlowBeforeHeadingSetext), + ) +} + +pub fn before_heading_setext(tokenizer: &mut Tokenizer) -> State { + tokenizer.attempt( + StateName::HeadingSetextStart, + State::Fn(StateName::FlowAfter), + State::Fn(StateName::FlowBeforeThematicBreak), + ) +} + +pub fn before_thematic_break(tokenizer: &mut Tokenizer) -> State { + tokenizer.attempt( + StateName::ThematicBreakStart, + State::Fn(StateName::FlowAfter), + State::Fn(StateName::FlowBeforeDefinition), + ) +} + +pub fn before_definition(tokenizer: &mut Tokenizer) -> State { + tokenizer.attempt( + StateName::DefinitionStart, + State::Fn(StateName::FlowAfter), + State::Fn(StateName::FlowBeforeParagraph), + ) } /// After a blank line. @@ -131,5 +166,9 @@ pub fn after(tokenizer: &mut Tokenizer) -> State { /// |asd /// ``` pub fn before_paragraph(tokenizer: &mut Tokenizer) -> State { - tokenizer.go(StateName::ParagraphStart, StateName::FlowAfter) + tokenizer.attempt( + StateName::ParagraphStart, + State::Fn(StateName::FlowAfter), + State::Nok, + ) } diff --git a/src/content/string.rs b/src/content/string.rs index 697ec2c..b3df6ec 100644 --- a/src/content/string.rs +++ b/src/content/string.rs @@ -28,25 +28,27 @@ pub fn start(tokenizer: &mut Tokenizer) -> State { pub fn before(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { None => State::Ok, - _ => tokenizer.attempt_n( - vec![ - StateName::CharacterReferenceStart, - StateName::CharacterEscapeStart, - ], - |ok| { - State::Fn(if ok { - StateName::StringBefore - } else { - StateName::StringBeforeData - }) - }, + Some(b'&') => tokenizer.attempt( + StateName::CharacterReferenceStart, + State::Fn(StateName::StringBefore), + State::Fn(StateName::StringBeforeData), ), + Some(b'\\') => tokenizer.attempt( + StateName::CharacterEscapeStart, + State::Fn(StateName::StringBefore), + State::Fn(StateName::StringBeforeData), + ), + _ => before_data(tokenizer), } } /// At data. pub fn before_data(tokenizer: &mut Tokenizer) -> State { - tokenizer.go(StateName::DataStart, StateName::StringBefore) + tokenizer.attempt( + StateName::DataStart, + State::Fn(StateName::StringBefore), + State::Nok, + ) } /// Resolve whitespace. diff --git a/src/content/text.rs b/src/content/text.rs index d8a2726..ff8c9eb 100644 --- a/src/content/text.rs +++ b/src/content/text.rs @@ -46,37 +46,80 @@ pub fn start(tokenizer: &mut Tokenizer) -> State { pub fn before(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { None => State::Ok, - _ => tokenizer.attempt_n( - vec![ - StateName::AttentionStart, - StateName::AutolinkStart, - StateName::CharacterEscapeStart, - StateName::CharacterReferenceStart, - StateName::CodeTextStart, - StateName::HardBreakEscapeStart, - StateName::HtmlTextStart, - StateName::LabelEndStart, - StateName::LabelStartImageStart, - StateName::LabelStartLinkStart, - ], - |ok| { - State::Fn(if ok { - StateName::TextBefore - } else { - StateName::TextBeforeData - }) - }, + Some(b'!') => tokenizer.attempt( + StateName::LabelStartImageStart, + State::Fn(StateName::TextBefore), + State::Fn(StateName::TextBeforeData), ), + Some(b'&') => tokenizer.attempt( + StateName::CharacterReferenceStart, + State::Fn(StateName::TextBefore), + State::Fn(StateName::TextBeforeData), + ), + Some(b'*' | b'_') => tokenizer.attempt( + StateName::AttentionStart, + State::Fn(StateName::TextBefore), + State::Fn(StateName::TextBeforeData), + ), + // `autolink`, `html_text` (order does not matter) + Some(b'<') => tokenizer.attempt( + StateName::AutolinkStart, + State::Fn(StateName::TextBefore), + State::Fn(StateName::TextBeforeHtml), + ), + Some(b'[') => tokenizer.attempt( + StateName::LabelStartLinkStart, + State::Fn(StateName::TextBefore), + State::Fn(StateName::TextBeforeData), + ), + Some(b'\\') => tokenizer.attempt( + StateName::CharacterEscapeStart, + State::Fn(StateName::TextBefore), + State::Fn(StateName::TextBeforeHardBreakEscape), + ), + Some(b']') => tokenizer.attempt( + StateName::LabelEndStart, + State::Fn(StateName::TextBefore), + State::Fn(StateName::TextBeforeData), + ), + Some(b'`') => tokenizer.attempt( + StateName::CodeTextStart, + State::Fn(StateName::TextBefore), + State::Fn(StateName::TextBeforeData), + ), + _ => before_data(tokenizer), } } +/// To do. +pub fn before_html(tokenizer: &mut Tokenizer) -> State { + tokenizer.attempt( + StateName::HtmlTextStart, + State::Fn(StateName::TextBefore), + State::Fn(StateName::TextBeforeData), + ) +} + +/// To do. +pub fn before_hard_break_escape(tokenizer: &mut Tokenizer) -> State { + tokenizer.attempt( + StateName::HardBreakEscapeStart, + State::Fn(StateName::TextBefore), + State::Fn(StateName::TextBeforeData), + ) +} + /// At data. /// /// ```markdown /// |qwe /// ``` pub fn before_data(tokenizer: &mut Tokenizer) -> State { - tokenizer.go(StateName::DataStart, StateName::TextBefore) + tokenizer.attempt( + StateName::DataStart, + State::Fn(StateName::TextBefore), + State::Nok, + ) } /// Resolve whitespace. |