aboutsummaryrefslogtreecommitdiffstats
path: root/src/content
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--src/content/content.rs22
-rw-r--r--src/content/flow.rs19
2 files changed, 27 insertions, 14 deletions
diff --git a/src/content/content.rs b/src/content/content.rs
index 4660fbe..4ca69ee 100644
--- a/src/content/content.rs
+++ b/src/content/content.rs
@@ -27,7 +27,7 @@ pub fn start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
Code::None | Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
unreachable!("expected non-eol/eof");
}
- _ => paragraph_initial(tokenizer, code)
+ _ => after_definitions(tokenizer, code)
// To do: definition.
// _ => tokenizer.attempt(definition, |ok| {
// Box::new(if ok {
@@ -44,10 +44,26 @@ pub fn start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// ```markdown
/// |asd
/// ```
+fn after_definitions(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+ match code {
+ Code::None => (State::Ok, None),
+ Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
+ unreachable!("to do: handle eol after definition");
+ }
+ _ => paragraph_initial(tokenizer, code),
+ }
+}
+
+/// Before a paragraph.
+///
+/// ```markdown
+/// |asd
+/// ```
fn paragraph_initial(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
- Code::None | Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
- unreachable!("expected non-eol/eof");
+ Code::None => (State::Ok, None),
+ Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
+ unreachable!("to do: handle eol after definition");
}
_ => {
tokenizer.enter(TokenType::Paragraph);
diff --git a/src/content/flow.rs b/src/content/flow.rs
index 4d2ece1..d7509d7 100644
--- a/src/content/flow.rs
+++ b/src/content/flow.rs
@@ -14,17 +14,18 @@
//! * [Code (fenced)][crate::construct::code_fenced]
//! * [Code (indented)][crate::construct::code_indented]
//! * [Heading (atx)][crate::construct::heading_atx]
+//! * [Heading (setext)][crate::construct::heading_setext]
//! * [HTML (flow)][crate::construct::html_flow]
//! * [Thematic break][crate::construct::thematic_break]
//!
-//! <!-- To do: `setext` in content? Link to content. -->
+//! <!-- To do: Link to content. -->
use crate::constant::TAB_SIZE;
use crate::construct::{
blank_line::start as blank_line, code_fenced::start as code_fenced,
code_indented::start as code_indented, heading_atx::start as heading_atx,
- html_flow::start as html_flow, partial_whitespace::start as whitespace,
- thematic_break::start as thematic_break,
+ heading_setext::start as heading_setext, html_flow::start as html_flow,
+ partial_whitespace::start as whitespace, thematic_break::start as thematic_break,
};
use crate::subtokenize::subtokenize;
use crate::tokenizer::{Code, Event, Point, State, StateFnResult, TokenType, Tokenizer};
@@ -144,24 +145,20 @@ pub fn before(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// |***
/// ```
pub fn before_after_prefix(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
- tokenizer.attempt_2(heading_atx, thematic_break, |ok| {
+ tokenizer.attempt_3(heading_atx, thematic_break, heading_setext, |ok| {
Box::new(if ok { after } else { content_before })
})(tokenizer, code)
}
-/// Before flow, but not before a heading (atx) or thematic break.
-///
-/// At this point, we’re at content (zero or more definitions and zero or one
-/// paragraph/setext heading).
+/// Before content.
///
/// ```markdown
/// |qwe
/// ```
-// To do: currently only parses a single line.
+///
// To do:
// - Multiline
// - One or more definitions.
-// - Setext heading.
fn content_before(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None | Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
@@ -174,12 +171,12 @@ fn content_before(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
}
}
}
+
/// In content.
///
/// ```markdown
/// al|pha
/// ```
-// To do: lift limitations as documented above.
fn content(tokenizer: &mut Tokenizer, code: Code, previous: usize) -> StateFnResult {
match code {
Code::None => content_end(tokenizer, code),