diff options
author | Titus Wormer <tituswormer@gmail.com> | 2022-08-12 14:21:53 +0200 |
---|---|---|
committer | Titus Wormer <tituswormer@gmail.com> | 2022-08-12 14:21:53 +0200 |
commit | 504729a4a0c8f3e0d8fc9159e0273150b169e184 (patch) | |
tree | a6bf291322decccd6011580337b1feed6151b554 /src/content | |
parent | db5a491e6c2223d1db9b458307431a54db3c40f2 (diff) | |
download | markdown-rs-504729a4a0c8f3e0d8fc9159e0273150b169e184.tar.gz markdown-rs-504729a4a0c8f3e0d8fc9159e0273150b169e184.tar.bz2 markdown-rs-504729a4a0c8f3e0d8fc9159e0273150b169e184.zip |
Refactor to improve docs of each function
Diffstat (limited to 'src/content')
-rw-r--r-- | src/content/document.rs | 20 | ||||
-rw-r--r-- | src/content/flow.rs | 105 | ||||
-rw-r--r-- | src/content/string.rs | 18 | ||||
-rw-r--r-- | src/content/text.rs | 36 |
4 files changed, 129 insertions, 50 deletions
diff --git a/src/content/document.rs b/src/content/document.rs index 04f9dc6..59e6e7c 100644 --- a/src/content/document.rs +++ b/src/content/document.rs @@ -46,7 +46,7 @@ enum Phase { Eof, } -/// Turn `codes` as the document content type into events. +/// Parse a document. pub fn document(parse_state: &mut ParseState, point: Point) -> Vec<Event> { let mut tokenizer = Tokenizer::new(point, parse_state); @@ -66,9 +66,7 @@ pub fn document(parse_state: &mut ParseState, point: Point) -> Vec<Event> { events } -/// At the beginning. -/// -/// Perhaps a BOM? +/// Start of document, at an optional BOM. /// /// ```markdown /// > | a @@ -88,7 +86,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::BomStart) } -/// Before existing containers. +/// At optional existing containers. // /// ```markdown /// | * a @@ -121,7 +119,7 @@ pub fn container_existing_before(tokenizer: &mut Tokenizer) -> State { } } -/// After an existing container. +/// After continued existing container. // /// ```markdown /// | * a @@ -133,7 +131,7 @@ pub fn container_existing_after(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::DocumentContainerExistingBefore) } -/// Before a new container. +/// At new containers. // /// ```markdown /// > | * a @@ -183,7 +181,7 @@ pub fn container_new_before(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::BlockQuoteStart) } -/// Maybe before a new container, but not a block quote. +/// At new container, but not a block quote. // /// ```markdown /// > | * a @@ -206,7 +204,7 @@ pub fn container_new_before_not_block_quote(tokenizer: &mut Tokenizer) -> State State::Retry(StateName::ListStart) } -/// Maybe before a new container, but not a list. +/// At new container, but not a list (or block quote). // /// ```markdown /// > | a @@ -224,7 +222,7 @@ pub fn container_new_before_not_list(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::DocumentContainersAfter) } -/// After a new container. +/// After new container. /// /// ```markdown /// > | * a @@ -258,7 +256,7 @@ pub fn container_new_after(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::DocumentContainerNewBefore) } -/// After containers, before flow. +/// After containers, at flow. // /// ```markdown /// > | * a diff --git a/src/content/flow.rs b/src/content/flow.rs index c6bd398..08c7891 100644 --- a/src/content/flow.rs +++ b/src/content/flow.rs @@ -23,15 +23,15 @@ use crate::event::Name; use crate::state::{Name as StateName, State}; use crate::tokenizer::Tokenizer; -/// Before flow. -/// -/// First we assume a blank line. +/// Start of flow. // /// ```markdown -/// | -/// |## alpha -/// | bravo -/// |*** +/// > | ## alpha +/// ^ +/// > | bravo +/// ^ +/// > | *** +/// ^ /// ``` pub fn start(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { @@ -81,6 +81,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State { // Actual parsing: blank line? Indented code? Indented anything? // Also includes `-` which can be a setext heading underline or a thematic break. None | Some(b'\t' | b'\n' | b' ' | b'-') => State::Retry(StateName::FlowBlankLineBefore), + // Must be a paragraph. Some(_) => { tokenizer.attempt(State::Next(StateName::FlowAfter), State::Nok); State::Retry(StateName::ParagraphStart) @@ -88,6 +89,12 @@ pub fn start(tokenizer: &mut Tokenizer) -> State { } } +/// At blank line. +/// +/// ```markdown +/// > | ␠␠␊ +/// ^ +/// ``` pub fn blank_line_before(tokenizer: &mut Tokenizer) -> State { tokenizer.attempt( State::Next(StateName::FlowBlankLineAfter), @@ -96,17 +103,11 @@ pub fn blank_line_before(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::BlankLineStart) } -/// Before flow (initial). -/// -/// “Initial” flow means unprefixed flow, so right at the start of a line. -/// Interestingly, the only flow (initial) construct is indented code. -/// Move to `before` afterwards. +/// At code (indented). /// /// ```markdown -/// |qwe -/// | asd -/// |~~~js -/// |<div> +/// > | ␠␠␠␠a +/// ^ /// ``` pub fn before_code_indented(tokenizer: &mut Tokenizer) -> State { tokenizer.attempt( @@ -116,6 +117,12 @@ pub fn before_code_indented(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::CodeIndentedStart) } +/// At code (fenced). +/// +/// ````markdown +/// > | ``` +/// ^ +/// ```` pub fn before_code_fenced(tokenizer: &mut Tokenizer) -> State { tokenizer.attempt( State::Next(StateName::FlowAfter), @@ -124,6 +131,12 @@ pub fn before_code_fenced(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::CodeFencedStart) } +/// At html (flow). +/// +/// ```markdown +/// > | <a> +/// ^ +/// ``` pub fn before_html(tokenizer: &mut Tokenizer) -> State { tokenizer.attempt( State::Next(StateName::FlowAfter), @@ -132,6 +145,12 @@ pub fn before_html(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::HtmlFlowStart) } +/// At heading (atx). +/// +/// ```markdown +/// > | # a +/// ^ +/// ``` pub fn before_heading_atx(tokenizer: &mut Tokenizer) -> State { tokenizer.attempt( State::Next(StateName::FlowAfter), @@ -140,6 +159,13 @@ pub fn before_heading_atx(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::HeadingAtxStart) } +/// At heading (setext). +/// +/// ```markdown +/// | a +/// > | = +/// ^ +/// ``` pub fn before_heading_setext(tokenizer: &mut Tokenizer) -> State { tokenizer.attempt( State::Next(StateName::FlowAfter), @@ -148,6 +174,12 @@ pub fn before_heading_setext(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::HeadingSetextStart) } +/// At thematic break. +/// +/// ```markdown +/// > | *** +/// ^ +/// ``` pub fn before_thematic_break(tokenizer: &mut Tokenizer) -> State { tokenizer.attempt( State::Next(StateName::FlowAfter), @@ -156,6 +188,12 @@ pub fn before_thematic_break(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::ThematicBreakStart) } +/// At definition. +/// +/// ```markdown +/// > | [a]: b +/// ^ +/// ``` pub fn before_definition(tokenizer: &mut Tokenizer) -> State { tokenizer.attempt( State::Next(StateName::FlowAfter), @@ -164,12 +202,22 @@ pub fn before_definition(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::DefinitionStart) } -/// After a blank line. +/// At paragraph. /// -/// Move to `start` afterwards. +/// ```markdown +/// > | a +/// ^ +/// ``` +pub fn before_paragraph(tokenizer: &mut Tokenizer) -> State { + tokenizer.attempt(State::Next(StateName::FlowAfter), State::Nok); + State::Retry(StateName::ParagraphStart) +} + +/// After blank line. /// /// ```markdown -/// ␠␠| +/// > | ␠␠␊ +/// ^ /// ``` pub fn blank_line_after(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { @@ -186,14 +234,11 @@ pub fn blank_line_after(tokenizer: &mut Tokenizer) -> State { } } -/// After something. +/// After flow. /// /// ```markdown -/// ## alpha| -/// | -/// ~~~js -/// asd -/// ~~~| +/// > | # a␊ +/// ^ /// ``` pub fn after(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { @@ -207,13 +252,3 @@ pub fn after(tokenizer: &mut Tokenizer) -> State { _ => unreachable!("expected eol/eof"), } } - -/// Before a paragraph. -/// -/// ```markdown -/// |asd -/// ``` -pub fn before_paragraph(tokenizer: &mut Tokenizer) -> State { - tokenizer.attempt(State::Next(StateName::FlowAfter), State::Nok); - State::Retry(StateName::ParagraphStart) -} diff --git a/src/content/string.rs b/src/content/string.rs index 1eefd30..ec4fce2 100644 --- a/src/content/string.rs +++ b/src/content/string.rs @@ -17,9 +17,15 @@ use crate::resolve::Name as ResolveName; use crate::state::{Name as StateName, State}; use crate::tokenizer::Tokenizer; +/// Characters that can start something in string. const MARKERS: [u8; 2] = [b'&', b'\\']; /// Start of string. +/// +/// ````markdown +/// > | ```js +/// ^ +/// ```` pub fn start(tokenizer: &mut Tokenizer) -> State { tokenizer.register_resolver(ResolveName::String); tokenizer.tokenize_state.markers = &MARKERS; @@ -27,6 +33,11 @@ pub fn start(tokenizer: &mut Tokenizer) -> State { } /// Before string. +/// +/// ````markdown +/// > | ```js +/// ^ +/// ```` pub fn before(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { None => State::Ok, @@ -49,12 +60,17 @@ pub fn before(tokenizer: &mut Tokenizer) -> State { } /// At data. +/// +/// ````markdown +/// > | ```js +/// ^ +/// ```` pub fn before_data(tokenizer: &mut Tokenizer) -> State { tokenizer.attempt(State::Next(StateName::StringBefore), State::Nok); State::Retry(StateName::DataStart) } -/// Resolve whitespace. +/// Resolve whitespace in string. pub fn resolve(tokenizer: &mut Tokenizer) { resolve_whitespace(tokenizer, false, false); } diff --git a/src/content/text.rs b/src/content/text.rs index 6509d30..5c13dba 100644 --- a/src/content/text.rs +++ b/src/content/text.rs @@ -25,6 +25,7 @@ use crate::resolve::Name as ResolveName; use crate::state::{Name as StateName, State}; use crate::tokenizer::Tokenizer; +/// Characters that can start something in text. const MARKERS: [u8; 9] = [ b'!', // `label_start_image` b'&', // `character_reference` @@ -38,6 +39,11 @@ const MARKERS: [u8; 9] = [ ]; /// Start of text. +/// +/// ```markdown +/// > | abc +/// ^ +/// ``` pub fn start(tokenizer: &mut Tokenizer) -> State { tokenizer.register_resolver(ResolveName::Text); tokenizer.tokenize_state.markers = &MARKERS; @@ -45,6 +51,11 @@ pub fn start(tokenizer: &mut Tokenizer) -> State { } /// Before text. +/// +/// ```markdown +/// > | abc +/// ^ +/// ``` pub fn before(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { None => State::Ok, @@ -109,7 +120,14 @@ pub fn before(tokenizer: &mut Tokenizer) -> State { } } -/// At `<`, which wasn’t an autolink: before HTML? +/// Before html (text). +/// +/// At `<`, which wasn’t an autolink. +/// +/// ```markdown +/// > | a <b> +/// ^ +/// ``` pub fn before_html(tokenizer: &mut Tokenizer) -> State { tokenizer.attempt( State::Next(StateName::TextBefore), @@ -118,7 +136,14 @@ pub fn before_html(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::HtmlTextStart) } -/// At `\`, which wasn’t a character escape: before a hard break? +/// Before hard break escape. +/// +/// At `\`, which wasn’t a character escape. +/// +/// ```markdown +/// > | a \␊ +/// ^ +/// ``` pub fn before_hard_break_escape(tokenizer: &mut Tokenizer) -> State { tokenizer.attempt( State::Next(StateName::TextBefore), @@ -127,7 +152,12 @@ pub fn before_hard_break_escape(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::HardBreakEscapeStart) } -/// At data. +/// Before data. +/// +/// ```markdown +/// > | a +/// ^ +/// ``` pub fn before_data(tokenizer: &mut Tokenizer) -> State { tokenizer.attempt(State::Next(StateName::TextBefore), State::Nok); State::Retry(StateName::DataStart) |