aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct
diff options
context:
space:
mode:
Diffstat (limited to 'src/construct')
-rw-r--r--src/construct/blank_line.rs6
-rw-r--r--src/construct/block_quote.rs12
-rw-r--r--src/construct/code_fenced.rs60
-rw-r--r--src/construct/code_indented.rs36
-rw-r--r--src/construct/definition.rs59
-rw-r--r--src/construct/heading_atx.rs18
-rw-r--r--src/construct/heading_setext.rs12
-rw-r--r--src/construct/html_flow.rs26
-rw-r--r--src/construct/html_text.rs6
-rw-r--r--src/construct/label_end.rs84
-rw-r--r--src/construct/list.rs60
-rw-r--r--src/construct/partial_label.rs12
-rw-r--r--src/construct/partial_space_or_tab.rs18
-rw-r--r--src/construct/partial_title.rs12
-rw-r--r--src/construct/thematic_break.rs12
15 files changed, 265 insertions, 168 deletions
diff --git a/src/construct/blank_line.rs b/src/construct/blank_line.rs
index b12c2c4..22dfdc0 100644
--- a/src/construct/blank_line.rs
+++ b/src/construct/blank_line.rs
@@ -47,7 +47,11 @@ use crate::tokenizer::{State, StateName, Tokenizer};
/// ```
pub fn start(tokenizer: &mut Tokenizer) -> State {
let state_name = space_or_tab(tokenizer);
- tokenizer.attempt_opt(state_name, StateName::BlankLineAfter)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::BlankLineAfter),
+ State::Fn(StateName::BlankLineAfter),
+ )
}
/// After zero or more spaces or tabs, before a line ending or EOF.
diff --git a/src/construct/block_quote.rs b/src/construct/block_quote.rs
index df58d62..cec3dce 100644
--- a/src/construct/block_quote.rs
+++ b/src/construct/block_quote.rs
@@ -55,7 +55,11 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
usize::MAX
},
);
- tokenizer.go(state_name, StateName::BlockQuoteBefore)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::BlockQuoteBefore),
+ State::Nok,
+ )
} else {
State::Nok
}
@@ -94,7 +98,11 @@ pub fn cont_start(tokenizer: &mut Tokenizer) -> State {
usize::MAX
},
);
- tokenizer.go(state_name, StateName::BlockQuoteContBefore)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::BlockQuoteContBefore),
+ State::Nok,
+ )
}
/// After whitespace, before `>`.
diff --git a/src/construct/code_fenced.rs b/src/construct/code_fenced.rs
index 46c5f9f..91fd8e4 100644
--- a/src/construct/code_fenced.rs
+++ b/src/construct/code_fenced.rs
@@ -128,7 +128,11 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
usize::MAX
},
);
- tokenizer.go(state_name, StateName::CodeFencedBeforeSequenceOpen)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::CodeFencedBeforeSequenceOpen),
+ State::Nok,
+ )
} else {
State::Nok
}
@@ -184,7 +188,11 @@ pub fn sequence_open(tokenizer: &mut Tokenizer) -> State {
_ if tokenizer.tokenize_state.size >= CODE_FENCED_SEQUENCE_SIZE_MIN => {
tokenizer.exit(Token::CodeFencedFenceSequence);
let state_name = space_or_tab(tokenizer);
- tokenizer.attempt_opt(state_name, StateName::CodeFencedInfoBefore)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::CodeFencedInfoBefore),
+ State::Fn(StateName::CodeFencedInfoBefore),
+ )
}
_ => {
tokenizer.tokenize_state.marker = 0;
@@ -241,7 +249,11 @@ pub fn info(tokenizer: &mut Tokenizer) -> State {
tokenizer.exit(Token::Data);
tokenizer.exit(Token::CodeFencedFenceInfo);
let state_name = space_or_tab(tokenizer);
- tokenizer.attempt_opt(state_name, StateName::CodeFencedMetaBefore)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::CodeFencedMetaBefore),
+ State::Fn(StateName::CodeFencedMetaBefore),
+ )
}
Some(b'`') if tokenizer.tokenize_state.marker == b'`' => {
tokenizer.concrete = false;
@@ -323,13 +335,11 @@ pub fn meta(tokenizer: &mut Tokenizer) -> State {
/// | ~~~
/// ```
pub fn at_break(tokenizer: &mut Tokenizer) -> State {
- tokenizer.check(StateName::NonLazyContinuationStart, |ok| {
- State::Fn(if ok {
- StateName::CodeFencedAtNonLazyBreak
- } else {
- StateName::CodeFencedAfter
- })
- })
+ tokenizer.check(
+ StateName::NonLazyContinuationStart,
+ State::Fn(StateName::CodeFencedAtNonLazyBreak),
+ State::Fn(StateName::CodeFencedAfter),
+ )
}
/// At an eol/eof in code, before a non-lazy closing fence or content.
@@ -342,13 +352,11 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
/// | ~~~
/// ```
pub fn at_non_lazy_break(tokenizer: &mut Tokenizer) -> State {
- tokenizer.attempt(StateName::CodeFencedCloseBefore, |ok| {
- State::Fn(if ok {
- StateName::CodeFencedAfter
- } else {
- StateName::CodeFencedContentBefore
- })
- })
+ tokenizer.attempt(
+ StateName::CodeFencedCloseBefore,
+ State::Fn(StateName::CodeFencedAfter),
+ State::Fn(StateName::CodeFencedContentBefore),
+ )
}
/// Before a closing fence, at the line ending.
@@ -390,7 +398,11 @@ pub fn close_start(tokenizer: &mut Tokenizer) -> State {
usize::MAX
},
);
- tokenizer.go(state_name, StateName::CodeFencedBeforeSequenceClose)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::CodeFencedBeforeSequenceClose),
+ State::Nok,
+ )
}
/// In a closing fence, after optional whitespace, before sequence.
@@ -432,7 +444,11 @@ pub fn sequence_close(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.size_other = 0;
tokenizer.exit(Token::CodeFencedFenceSequence);
let state_name = space_or_tab(tokenizer);
- tokenizer.attempt_opt(state_name, StateName::CodeFencedAfterSequenceClose)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::CodeFencedAfterSequenceClose),
+ State::Fn(StateName::CodeFencedAfterSequenceClose),
+ )
}
_ => {
tokenizer.tokenize_state.size_other = 0;
@@ -483,7 +499,11 @@ pub fn content_before(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn content_start(tokenizer: &mut Tokenizer) -> State {
let state_name = space_or_tab_min_max(tokenizer, 0, tokenizer.tokenize_state.prefix);
- tokenizer.go(state_name, StateName::CodeFencedBeforeContentChunk)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::CodeFencedBeforeContentChunk),
+ State::Nok,
+ )
}
/// Before code content, after a prefix.
diff --git a/src/construct/code_indented.rs b/src/construct/code_indented.rs
index 516b493..0906e5f 100644
--- a/src/construct/code_indented.rs
+++ b/src/construct/code_indented.rs
@@ -65,7 +65,11 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
if !tokenizer.interrupt && tokenizer.parse_state.constructs.code_indented {
tokenizer.enter(Token::CodeIndented);
let state_name = space_or_tab_min_max(tokenizer, TAB_SIZE, TAB_SIZE);
- tokenizer.go(state_name, StateName::CodeIndentedAtBreak)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::CodeIndentedAtBreak),
+ State::Nok,
+ )
} else {
State::Nok
}
@@ -80,13 +84,11 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
pub fn at_break(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None => after(tokenizer),
- Some(b'\n') => tokenizer.attempt(StateName::CodeIndentedFurtherStart, |ok| {
- State::Fn(if ok {
- StateName::CodeIndentedAtBreak
- } else {
- StateName::CodeIndentedAfter
- })
- }),
+ Some(b'\n') => tokenizer.attempt(
+ StateName::CodeIndentedFurtherStart,
+ State::Fn(StateName::CodeIndentedAtBreak),
+ State::Fn(StateName::CodeIndentedAfter),
+ ),
_ => {
tokenizer.enter(Token::CodeFlowChunk);
inside(tokenizer)
@@ -143,13 +145,11 @@ pub fn further_start(tokenizer: &mut Tokenizer) -> State {
}
_ if !tokenizer.lazy => {
let state_name = space_or_tab_min_max(tokenizer, TAB_SIZE, TAB_SIZE);
- tokenizer.attempt(state_name, |ok| {
- State::Fn(if ok {
- StateName::CodeIndentedFurtherEnd
- } else {
- StateName::CodeIndentedFurtherBegin
- })
- })
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::CodeIndentedFurtherEnd),
+ State::Fn(StateName::CodeIndentedFurtherBegin),
+ )
}
_ => State::Nok,
}
@@ -175,7 +175,11 @@ pub fn further_end(_tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn further_begin(tokenizer: &mut Tokenizer) -> State {
let state_name = space_or_tab(tokenizer);
- tokenizer.attempt_opt(state_name, StateName::CodeIndentedFurtherAfter)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::CodeIndentedFurtherAfter),
+ State::Fn(StateName::CodeIndentedFurtherAfter),
+ )
}
/// After whitespace, not indented enough.
diff --git a/src/construct/definition.rs b/src/construct/definition.rs
index fbad99d..350992b 100644
--- a/src/construct/definition.rs
+++ b/src/construct/definition.rs
@@ -120,7 +120,11 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::Definition);
// Note: arbitrary whitespace allowed even if code (indented) is on.
let state_name = space_or_tab(tokenizer);
- tokenizer.attempt_opt(state_name, StateName::DefinitionBefore)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::DefinitionBefore),
+ State::Fn(StateName::DefinitionBefore),
+ )
} else {
State::Nok
}
@@ -138,7 +142,11 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_1 = Token::DefinitionLabel;
tokenizer.tokenize_state.token_2 = Token::DefinitionLabelMarker;
tokenizer.tokenize_state.token_3 = Token::DefinitionLabelString;
- tokenizer.go(StateName::LabelStart, StateName::DefinitionLabelAfter)
+ tokenizer.attempt(
+ StateName::LabelStart,
+ State::Fn(StateName::DefinitionLabelAfter),
+ State::Nok,
+ )
}
_ => State::Nok,
}
@@ -169,7 +177,11 @@ pub fn label_after(tokenizer: &mut Tokenizer) -> State {
/// To do.
pub fn marker_after(tokenizer: &mut Tokenizer) -> State {
let state_name = space_or_tab_eol(tokenizer);
- tokenizer.attempt_opt(state_name, StateName::DefinitionDestinationBefore)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::DefinitionDestinationBefore),
+ State::Fn(StateName::DefinitionDestinationBefore),
+ )
}
/// Before a destination.
@@ -185,13 +197,11 @@ pub fn destination_before(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_4 = Token::DefinitionDestinationRaw;
tokenizer.tokenize_state.token_5 = Token::DefinitionDestinationString;
tokenizer.tokenize_state.size_other = usize::MAX;
- tokenizer.attempt(StateName::DestinationStart, |ok| {
- State::Fn(if ok {
- StateName::DefinitionDestinationAfter
- } else {
- StateName::DefinitionDestinationMissing
- })
- })
+ tokenizer.attempt(
+ StateName::DestinationStart,
+ State::Fn(StateName::DefinitionDestinationAfter),
+ State::Fn(StateName::DefinitionDestinationMissing),
+ )
}
/// After a destination.
@@ -207,7 +217,11 @@ pub fn destination_after(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_4 = Token::Data;
tokenizer.tokenize_state.token_5 = Token::Data;
tokenizer.tokenize_state.size_other = 0;
- tokenizer.attempt_opt(StateName::DefinitionTitleBefore, StateName::DefinitionAfter)
+ tokenizer.attempt(
+ StateName::DefinitionTitleBefore,
+ State::Fn(StateName::DefinitionAfter),
+ State::Fn(StateName::DefinitionAfter),
+ )
}
/// Without destination.
@@ -231,7 +245,11 @@ pub fn destination_missing(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn after(tokenizer: &mut Tokenizer) -> State {
let state_name = space_or_tab(tokenizer);
- tokenizer.attempt_opt(state_name, StateName::DefinitionAfterWhitespace)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::DefinitionAfterWhitespace),
+ State::Fn(StateName::DefinitionAfterWhitespace),
+ )
}
/// After a definition, after optional whitespace.
@@ -264,7 +282,11 @@ pub fn after_whitespace(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn title_before(tokenizer: &mut Tokenizer) -> State {
let state_name = space_or_tab_eol(tokenizer);
- tokenizer.go(state_name, StateName::DefinitionTitleBeforeMarker)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::DefinitionTitleBeforeMarker),
+ State::Nok,
+ )
}
/// Before a title, after a line ending.
@@ -278,7 +300,11 @@ pub fn title_before_marker(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_1 = Token::DefinitionTitle;
tokenizer.tokenize_state.token_2 = Token::DefinitionTitleMarker;
tokenizer.tokenize_state.token_3 = Token::DefinitionTitleString;
- tokenizer.go(StateName::TitleStart, StateName::DefinitionTitleAfter)
+ tokenizer.attempt(
+ StateName::TitleStart,
+ State::Fn(StateName::DefinitionTitleAfter),
+ State::Nok,
+ )
}
/// After a title.
@@ -292,9 +318,10 @@ pub fn title_after(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_2 = Token::Data;
tokenizer.tokenize_state.token_3 = Token::Data;
let state_name = space_or_tab(tokenizer);
- tokenizer.attempt_opt(
+ tokenizer.attempt(
state_name,
- StateName::DefinitionTitleAfterOptionalWhitespace,
+ State::Fn(StateName::DefinitionTitleAfterOptionalWhitespace),
+ State::Fn(StateName::DefinitionTitleAfterOptionalWhitespace),
)
}
diff --git a/src/construct/heading_atx.rs b/src/construct/heading_atx.rs
index 45c4758..e5bc3bd 100644
--- a/src/construct/heading_atx.rs
+++ b/src/construct/heading_atx.rs
@@ -77,7 +77,11 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
usize::MAX
},
);
- tokenizer.go(state_name, StateName::HeadingAtxBefore)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::HeadingAtxBefore),
+ State::Nok,
+ )
} else {
State::Nok
}
@@ -120,7 +124,11 @@ pub fn sequence_open(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.size = 0;
tokenizer.exit(Token::HeadingAtxSequence);
let state_name = space_or_tab(tokenizer);
- tokenizer.go(state_name, StateName::HeadingAtxAtBreak)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::HeadingAtxAtBreak),
+ State::Nok,
+ )
}
_ => {
tokenizer.tokenize_state.size = 0;
@@ -146,7 +154,11 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
}
Some(b'\t' | b' ') => {
let state_name = space_or_tab(tokenizer);
- tokenizer.go(state_name, StateName::HeadingAtxAtBreak)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::HeadingAtxAtBreak),
+ State::Nok,
+ )
}
Some(b'#') => {
tokenizer.enter(Token::HeadingAtxSequence);
diff --git a/src/construct/heading_setext.rs b/src/construct/heading_setext.rs
index 50feba4..a8c707a 100644
--- a/src/construct/heading_setext.rs
+++ b/src/construct/heading_setext.rs
@@ -93,7 +93,11 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
},
);
- tokenizer.go(state_name, StateName::HeadingSetextBefore)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::HeadingSetextBefore),
+ State::Nok,
+ )
} else {
State::Nok
}
@@ -134,7 +138,11 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.marker = 0;
tokenizer.exit(Token::HeadingSetextUnderline);
let state_name = space_or_tab(tokenizer);
- tokenizer.attempt_opt(state_name, StateName::HeadingSetextAfter)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::HeadingSetextAfter),
+ State::Fn(StateName::HeadingSetextAfter),
+ )
}
}
}
diff --git a/src/construct/html_flow.rs b/src/construct/html_flow.rs
index 779146c..128fd2e 100644
--- a/src/construct/html_flow.rs
+++ b/src/construct/html_flow.rs
@@ -147,7 +147,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
},
);
- tokenizer.go(state_name, StateName::HtmlFlowBefore)
+ tokenizer.attempt(state_name, State::Fn(StateName::HtmlFlowBefore), State::Nok)
} else {
State::Nok
}
@@ -632,13 +632,11 @@ pub fn continuation(tokenizer: &mut Tokenizer) -> State {
|| tokenizer.tokenize_state.marker == COMPLETE =>
{
tokenizer.exit(Token::HtmlFlowData);
- tokenizer.check(StateName::HtmlFlowBlankLineBefore, |ok| {
- State::Fn(if ok {
- StateName::HtmlFlowContinuationAfter
- } else {
- StateName::HtmlFlowContinuationStart
- })
- })
+ tokenizer.check(
+ StateName::HtmlFlowBlankLineBefore,
+ State::Fn(StateName::HtmlFlowContinuationAfter),
+ State::Fn(StateName::HtmlFlowContinuationStart),
+ )
}
// Note: important that this is after the basic/complete case.
None | Some(b'\n') => {
@@ -680,13 +678,11 @@ pub fn continuation(tokenizer: &mut Tokenizer) -> State {
/// | asd
/// ```
pub fn continuation_start(tokenizer: &mut Tokenizer) -> State {
- tokenizer.check(StateName::NonLazyContinuationStart, |ok| {
- State::Fn(if ok {
- StateName::HtmlFlowContinuationStartNonLazy
- } else {
- StateName::HtmlFlowContinuationAfter
- })
- })
+ tokenizer.check(
+ StateName::NonLazyContinuationStart,
+ State::Fn(StateName::HtmlFlowContinuationStartNonLazy),
+ State::Fn(StateName::HtmlFlowContinuationAfter),
+ )
}
/// In continuation, at an eol, before non-lazy content.
diff --git a/src/construct/html_text.rs b/src/construct/html_text.rs
index 1c1f9e6..c1dfaca 100644
--- a/src/construct/html_text.rs
+++ b/src/construct/html_text.rs
@@ -659,7 +659,11 @@ pub fn line_ending_before(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn line_ending_after(tokenizer: &mut Tokenizer) -> State {
let state_name = space_or_tab(tokenizer);
- tokenizer.attempt_opt(state_name, StateName::HtmlTextLineEndingAfterPrefix)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::HtmlTextLineEndingAfterPrefix),
+ State::Fn(StateName::HtmlTextLineEndingAfterPrefix),
+ )
}
/// After a line ending, after indent.
diff --git a/src/construct/label_end.rs b/src/construct/label_end.rs
index ae9fe77..47ded36 100644
--- a/src/construct/label_end.rs
+++ b/src/construct/label_end.rs
@@ -237,25 +237,28 @@ pub fn after(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
// Resource (`[asd](fgh)`)?
- Some(b'(') => tokenizer.attempt(StateName::LabelEndResourceStart, move |is_ok| {
- State::Fn(if is_ok || defined {
+ Some(b'(') => tokenizer.attempt(
+ StateName::LabelEndResourceStart,
+ State::Fn(StateName::LabelEndOk),
+ State::Fn(if defined {
StateName::LabelEndOk
} else {
StateName::LabelEndNok
- })
- }),
+ }),
+ ),
// Full (`[asd][fgh]`) or collapsed (`[asd][]`) reference?
- Some(b'[') => tokenizer.attempt(StateName::LabelEndReferenceFull, move |is_ok| {
- State::Fn(if is_ok {
- StateName::LabelEndOk
- } else if defined {
+ Some(b'[') => tokenizer.attempt(
+ StateName::LabelEndReferenceFull,
+ State::Fn(StateName::LabelEndOk),
+ State::Fn(if defined {
StateName::LabelEndReferenceNotFull
} else {
StateName::LabelEndNok
- })
- }),
+ }),
+ ),
// Shortcut (`[asd]`) reference?
_ => {
+ // To do: use state names?
let func = if defined { ok } else { nok };
func(tokenizer)
}
@@ -273,13 +276,11 @@ pub fn after(tokenizer: &mut Tokenizer) -> State {
/// ^
/// ```
pub fn reference_not_full(tokenizer: &mut Tokenizer) -> State {
- tokenizer.attempt(StateName::LabelEndReferenceCollapsed, |is_ok| {
- State::Fn(if is_ok {
- StateName::LabelEndOk
- } else {
- StateName::LabelEndNok
- })
- })
+ tokenizer.attempt(
+ StateName::LabelEndReferenceCollapsed,
+ State::Fn(StateName::LabelEndOk),
+ State::Fn(StateName::LabelEndNok),
+ )
}
/// Done, we found something.
@@ -375,7 +376,11 @@ pub fn resource_start(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn resource_before(tokenizer: &mut Tokenizer) -> State {
let state_name = space_or_tab_eol(tokenizer);
- tokenizer.attempt_opt(state_name, StateName::LabelEndResourceOpen)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::LabelEndResourceOpen),
+ State::Fn(StateName::LabelEndResourceOpen),
+ )
}
/// At the start of a resource, after optional whitespace.
@@ -395,13 +400,11 @@ pub fn resource_open(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_5 = Token::ResourceDestinationString;
tokenizer.tokenize_state.size_other = RESOURCE_DESTINATION_BALANCE_MAX;
- tokenizer.attempt(StateName::DestinationStart, |ok| {
- State::Fn(if ok {
- StateName::LabelEndResourceDestinationAfter
- } else {
- StateName::LabelEndResourceDestinationMissing
- })
- })
+ tokenizer.attempt(
+ StateName::DestinationStart,
+ State::Fn(StateName::LabelEndResourceDestinationAfter),
+ State::Fn(StateName::LabelEndResourceDestinationMissing),
+ )
}
}
@@ -419,14 +422,11 @@ pub fn resource_destination_after(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_5 = Token::Data;
tokenizer.tokenize_state.size_other = 0;
let state_name = space_or_tab_eol(tokenizer);
-
- tokenizer.attempt(state_name, |ok| {
- State::Fn(if ok {
- StateName::LabelEndResourceBetween
- } else {
- StateName::LabelEndResourceEnd
- })
- })
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::LabelEndResourceBetween),
+ State::Fn(StateName::LabelEndResourceEnd),
+ )
}
/// Without destination.
@@ -452,7 +452,11 @@ pub fn resource_between(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_1 = Token::ResourceTitle;
tokenizer.tokenize_state.token_2 = Token::ResourceTitleMarker;
tokenizer.tokenize_state.token_3 = Token::ResourceTitleString;
- tokenizer.go(StateName::TitleStart, StateName::LabelEndResourceTitleAfter)
+ tokenizer.attempt(
+ StateName::TitleStart,
+ State::Fn(StateName::LabelEndResourceTitleAfter),
+ State::Nok,
+ )
}
_ => resource_end(tokenizer),
}
@@ -469,7 +473,11 @@ pub fn resource_title_after(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_2 = Token::Data;
tokenizer.tokenize_state.token_3 = Token::Data;
let state_name = space_or_tab_eol(tokenizer);
- tokenizer.attempt_opt(state_name, StateName::LabelEndResourceEnd)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::LabelEndResourceEnd),
+ State::Fn(StateName::LabelEndResourceEnd),
+ )
}
/// In a resource, at the `)`.
@@ -503,7 +511,11 @@ pub fn reference_full(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_1 = Token::Reference;
tokenizer.tokenize_state.token_2 = Token::ReferenceMarker;
tokenizer.tokenize_state.token_3 = Token::ReferenceString;
- tokenizer.go(StateName::LabelStart, StateName::LabelEndReferenceFullAfter)
+ tokenizer.attempt(
+ StateName::LabelStart,
+ State::Fn(StateName::LabelEndReferenceFullAfter),
+ State::Nok,
+ )
}
_ => unreachable!("expected `[`"),
}
diff --git a/src/construct/list.rs b/src/construct/list.rs
index 6ecfb04..9e4b105 100644
--- a/src/construct/list.rs
+++ b/src/construct/list.rs
@@ -71,7 +71,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
usize::MAX
},
);
- tokenizer.go(state_name, StateName::ListBefore)
+ tokenizer.attempt(state_name, State::Fn(StateName::ListBefore), State::Nok)
} else {
State::Nok
}
@@ -86,13 +86,11 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
pub fn before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
// Unordered.
- Some(b'*' | b'-') => tokenizer.check(StateName::ThematicBreakStart, |ok| {
- State::Fn(if ok {
- StateName::ListNok
- } else {
- StateName::ListBeforeUnordered
- })
- }),
+ Some(b'*' | b'-') => tokenizer.check(
+ StateName::ThematicBreakStart,
+ State::Fn(StateName::ListNok),
+ State::Fn(StateName::ListBeforeUnordered),
+ ),
Some(b'+') => before_unordered(tokenizer),
// Ordered.
Some(b'0'..=b'9') if !tokenizer.interrupt => before_ordered(tokenizer),
@@ -175,13 +173,11 @@ pub fn marker(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn marker_after(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.size = 1;
- tokenizer.check(StateName::BlankLineStart, |ok| {
- State::Fn(if ok {
- StateName::ListAfter
- } else {
- StateName::ListMarkerAfterFilled
- })
- })
+ tokenizer.check(
+ StateName::BlankLineStart,
+ State::Fn(StateName::ListAfter),
+ State::Fn(StateName::ListMarkerAfterFilled),
+ )
}
/// After a list item marker, not followed by a blank line.
@@ -194,13 +190,11 @@ pub fn marker_after_filled(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.size = 0;
// Attempt to parse up to the largest allowed indent, `nok` if there is more whitespace.
- tokenizer.attempt(StateName::ListWhitespace, |ok| {
- State::Fn(if ok {
- StateName::ListAfter
- } else {
- StateName::ListPrefixOther
- })
- })
+ tokenizer.attempt(
+ StateName::ListWhitespace,
+ State::Fn(StateName::ListAfter),
+ State::Fn(StateName::ListPrefixOther),
+ )
}
/// In whitespace after a marker.
@@ -211,7 +205,11 @@ pub fn marker_after_filled(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn whitespace(tokenizer: &mut Tokenizer) -> State {
let state_name = space_or_tab_min_max(tokenizer, 1, TAB_SIZE);
- tokenizer.go(state_name, StateName::ListWhitespaceAfter)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::ListWhitespaceAfter),
+ State::Nok,
+ )
}
/// After acceptable whitespace.
@@ -295,13 +293,11 @@ pub fn after(tokenizer: &mut Tokenizer) -> State {
/// ^
/// ```
pub fn cont_start(tokenizer: &mut Tokenizer) -> State {
- tokenizer.check(StateName::BlankLineStart, |ok| {
- State::Fn(if ok {
- StateName::ListContBlank
- } else {
- StateName::ListContFilled
- })
- })
+ tokenizer.check(
+ StateName::BlankLineStart,
+ State::Fn(StateName::ListContBlank),
+ State::Fn(StateName::ListContFilled),
+ )
}
/// Start of blank list item continuation.
@@ -321,7 +317,7 @@ pub fn cont_blank(tokenizer: &mut Tokenizer) -> State {
} else {
let state_name = space_or_tab_min_max(tokenizer, 0, size);
// Consume, optionally, at most `size`.
- tokenizer.go(state_name, StateName::ListOk)
+ tokenizer.attempt(state_name, State::Fn(StateName::ListOk), State::Nok)
}
}
@@ -340,7 +336,7 @@ pub fn cont_filled(tokenizer: &mut Tokenizer) -> State {
// Consume exactly `size`.
let state_name = space_or_tab_min_max(tokenizer, size, size);
- tokenizer.go(state_name, StateName::ListOk)
+ tokenizer.attempt(state_name, State::Fn(StateName::ListOk), State::Nok)
}
/// A state fn to yield [`State::Ok`].
diff --git a/src/construct/partial_label.rs b/src/construct/partial_label.rs
index 0c8366e..070bdc0 100644
--- a/src/construct/partial_label.rs
+++ b/src/construct/partial_label.rs
@@ -109,13 +109,11 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
connect: tokenizer.tokenize_state.connect,
},
);
- tokenizer.attempt(state_name, |ok| {
- State::Fn(if ok {
- StateName::LabelEolAfter
- } else {
- StateName::LabelAtBlankLine
- })
- })
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::LabelEolAfter),
+ State::Fn(StateName::LabelAtBlankLine),
+ )
}
Some(b']') => {
tokenizer.exit(tokenizer.tokenize_state.token_3.clone());
diff --git a/src/construct/partial_space_or_tab.rs b/src/construct/partial_space_or_tab.rs
index b0b35a6..0b63b0e 100644
--- a/src/construct/partial_space_or_tab.rs
+++ b/src/construct/partial_space_or_tab.rs
@@ -180,13 +180,11 @@ pub fn eol_start(tokenizer: &mut Tokenizer) -> State {
},
);
- tokenizer.attempt(state_name, move |ok| {
- State::Fn(if ok {
- StateName::SpaceOrTabEolAfterFirst
- } else {
- StateName::SpaceOrTabEolAtEol
- })
- })
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::SpaceOrTabEolAfterFirst),
+ State::Fn(StateName::SpaceOrTabEolAtEol),
+ )
}
pub fn eol_after_first(tokenizer: &mut Tokenizer) -> State {
@@ -269,7 +267,11 @@ pub fn eol_after_eol(tokenizer: &mut Tokenizer) -> State {
connect: tokenizer.tokenize_state.space_or_tab_eol_connect,
},
);
- tokenizer.attempt_opt(state_name, StateName::SpaceOrTabEolAfterMore)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::SpaceOrTabEolAfterMore),
+ State::Fn(StateName::SpaceOrTabEolAfterMore),
+ )
}
/// `space_or_tab_eol`: after more (optional) `space_or_tab`.
diff --git a/src/construct/partial_title.rs b/src/construct/partial_title.rs
index 8b72608..1d3e9b2 100644
--- a/src/construct/partial_title.rs
+++ b/src/construct/partial_title.rs
@@ -106,13 +106,11 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
},
);
- tokenizer.attempt(state_name, |ok| {
- State::Fn(if ok {
- StateName::TitleAfterEol
- } else {
- StateName::TitleAtBlankLine
- })
- })
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::TitleAfterEol),
+ State::Fn(StateName::TitleAtBlankLine),
+ )
}
Some(b'"' | b'\'' | b')')
if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker =>
diff --git a/src/construct/thematic_break.rs b/src/construct/thematic_break.rs
index 4ed25b6..30fabe4 100644
--- a/src/construct/thematic_break.rs
+++ b/src/construct/thematic_break.rs
@@ -72,7 +72,11 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
},
);
- tokenizer.go(state_name, StateName::ThematicBreakBefore)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::ThematicBreakBefore),
+ State::Nok,
+ )
} else {
State::Nok
}
@@ -142,7 +146,11 @@ pub fn sequence(tokenizer: &mut Tokenizer) -> State {
_ => {
tokenizer.exit(Token::ThematicBreakSequence);
let state_name = space_or_tab(tokenizer);
- tokenizer.attempt_opt(state_name, StateName::ThematicBreakAtBreak)
+ tokenizer.attempt(
+ state_name,
+ State::Fn(StateName::ThematicBreakAtBreak),
+ State::Fn(StateName::ThematicBreakAtBreak),
+ )
}
}
}