aboutsummaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-07-25 16:27:45 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-07-25 16:27:45 +0200
commit79fe341efc4d5e7467ec08cb7d0688b17a9efb05 (patch)
tree041c6fdf5821d27b8510005f847a3bf4488a3bf6 /src
parentca56f2742d8719358d2046fbdee4f1087add0568 (diff)
downloadmarkdown-rs-79fe341efc4d5e7467ec08cb7d0688b17a9efb05.tar.gz
markdown-rs-79fe341efc4d5e7467ec08cb7d0688b17a9efb05.tar.bz2
markdown-rs-79fe341efc4d5e7467ec08cb7d0688b17a9efb05.zip
Remove no longer needed field in `State::Ok`
Diffstat (limited to 'src')
-rw-r--r--src/construct/attention.rs2
-rw-r--r--src/construct/autolink.rs2
-rw-r--r--src/construct/blank_line.rs2
-rw-r--r--src/construct/block_quote.rs4
-rw-r--r--src/construct/character_escape.rs2
-rw-r--r--src/construct/character_reference.rs2
-rw-r--r--src/construct/code_fenced.rs4
-rw-r--r--src/construct/code_indented.rs4
-rw-r--r--src/construct/code_text.rs2
-rw-r--r--src/construct/definition.rs4
-rw-r--r--src/construct/hard_break_escape.rs2
-rw-r--r--src/construct/hard_break_trailing.rs2
-rw-r--r--src/construct/heading_atx.rs2
-rw-r--r--src/construct/heading_setext.rs2
-rw-r--r--src/construct/html_flow.rs2
-rw-r--r--src/construct/html_text.rs2
-rw-r--r--src/construct/label_end.rs8
-rw-r--r--src/construct/label_start_image.rs2
-rw-r--r--src/construct/label_start_link.rs2
-rw-r--r--src/construct/list.rs6
-rw-r--r--src/construct/paragraph.rs2
-rw-r--r--src/construct/partial_data.rs4
-rw-r--r--src/construct/partial_destination.rs6
-rw-r--r--src/construct/partial_label.rs2
-rw-r--r--src/construct/partial_non_lazy_continuation.rs2
-rw-r--r--src/construct/partial_space_or_tab.rs8
-rw-r--r--src/construct/partial_title.rs2
-rw-r--r--src/construct/partial_whitespace.rs2
-rw-r--r--src/construct/thematic_break.rs2
-rw-r--r--src/content/document.rs6
-rw-r--r--src/content/flow.rs8
-rw-r--r--src/content/string.rs2
-rw-r--r--src/content/text.rs2
-rw-r--r--src/tokenizer.rs18
34 files changed, 61 insertions, 63 deletions
diff --git a/src/construct/attention.rs b/src/construct/attention.rs
index eb93810..a53a328 100644
--- a/src/construct/attention.rs
+++ b/src/construct/attention.rs
@@ -193,7 +193,7 @@ fn inside(tokenizer: &mut Tokenizer, code: Code, marker: MarkerKind) -> State {
_ => {
tokenizer.exit(Token::AttentionSequence);
tokenizer.register_resolver("attention".to_string(), Box::new(resolve_attention));
- State::Ok(0)
+ State::Ok
}
}
}
diff --git a/src/construct/autolink.rs b/src/construct/autolink.rs
index 606fc9b..0c5a328 100644
--- a/src/construct/autolink.rs
+++ b/src/construct/autolink.rs
@@ -306,7 +306,7 @@ fn end(tokenizer: &mut Tokenizer, code: Code) -> State {
tokenizer.consume(code);
tokenizer.exit(Token::AutolinkMarker);
tokenizer.exit(Token::Autolink);
- State::Ok(0)
+ State::Ok
}
_ => unreachable!("expected `>`"),
}
diff --git a/src/construct/blank_line.rs b/src/construct/blank_line.rs
index dc36784..48cf2e6 100644
--- a/src/construct/blank_line.rs
+++ b/src/construct/blank_line.rs
@@ -59,7 +59,7 @@ pub fn start(tokenizer: &mut Tokenizer, code: Code) -> State {
/// ```
fn after(_tokenizer: &mut Tokenizer, code: Code) -> State {
match code {
- Code::None | Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => State::Ok(0),
+ Code::None | Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => State::Ok,
_ => State::Nok,
}
}
diff --git a/src/construct/block_quote.rs b/src/construct/block_quote.rs
index 9925a5a..b99665f 100644
--- a/src/construct/block_quote.rs
+++ b/src/construct/block_quote.rs
@@ -124,11 +124,11 @@ fn cont_after(tokenizer: &mut Tokenizer, code: Code) -> State {
tokenizer.consume(code);
tokenizer.exit(Token::SpaceOrTab);
tokenizer.exit(Token::BlockQuotePrefix);
- State::Ok(0)
+ State::Ok
}
_ => {
tokenizer.exit(Token::BlockQuotePrefix);
- State::Ok(0)
+ State::Ok
}
}
}
diff --git a/src/construct/character_escape.rs b/src/construct/character_escape.rs
index 8403765..f317854 100644
--- a/src/construct/character_escape.rs
+++ b/src/construct/character_escape.rs
@@ -68,7 +68,7 @@ fn inside(tokenizer: &mut Tokenizer, code: Code) -> State {
tokenizer.consume(code);
tokenizer.exit(Token::CharacterEscapeValue);
tokenizer.exit(Token::CharacterEscape);
- State::Ok(0)
+ State::Ok
}
_ => State::Nok,
}
diff --git a/src/construct/character_reference.rs b/src/construct/character_reference.rs
index b2146e7..6e44502 100644
--- a/src/construct/character_reference.rs
+++ b/src/construct/character_reference.rs
@@ -227,7 +227,7 @@ fn value(tokenizer: &mut Tokenizer, code: Code, mut info: Info) -> State {
tokenizer.consume(code);
tokenizer.exit(Token::CharacterReferenceMarkerSemi);
tokenizer.exit(Token::CharacterReference);
- State::Ok(0)
+ State::Ok
}
}
Code::Char(char) => {
diff --git a/src/construct/code_fenced.rs b/src/construct/code_fenced.rs
index a814142..10968c2 100644
--- a/src/construct/code_fenced.rs
+++ b/src/construct/code_fenced.rs
@@ -506,7 +506,7 @@ fn close_sequence_after(tokenizer: &mut Tokenizer, code: Code) -> State {
match code {
Code::None | Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
tokenizer.exit(Token::CodeFencedFence);
- State::Ok(0)
+ State::Ok
}
_ => State::Nok,
}
@@ -595,5 +595,5 @@ fn after(tokenizer: &mut Tokenizer, _code: Code) -> State {
tokenizer.interrupt = false;
// No longer concrete.
tokenizer.concrete = false;
- State::Ok(0)
+ State::Ok
}
diff --git a/src/construct/code_indented.rs b/src/construct/code_indented.rs
index 6c528ff..f223633 100644
--- a/src/construct/code_indented.rs
+++ b/src/construct/code_indented.rs
@@ -119,7 +119,7 @@ fn after(tokenizer: &mut Tokenizer, _code: Code) -> State {
tokenizer.exit(Token::CodeIndented);
// Feel free to interrupt.
tokenizer.interrupt = false;
- State::Ok(0)
+ State::Ok
}
/// Right at a line ending, trying to parse another indent.
@@ -155,7 +155,7 @@ fn further_start(tokenizer: &mut Tokenizer, code: Code) -> State {
/// ^
/// ```
fn further_end(_tokenizer: &mut Tokenizer, _code: Code) -> State {
- State::Ok(0)
+ State::Ok
}
/// At the beginning of a line that is not indented enough.
diff --git a/src/construct/code_text.rs b/src/construct/code_text.rs
index 451ef45..978ad01 100644
--- a/src/construct/code_text.rs
+++ b/src/construct/code_text.rs
@@ -190,7 +190,7 @@ fn sequence_close(tokenizer: &mut Tokenizer, code: Code, size_open: usize, size:
_ if size_open == size => {
tokenizer.exit(Token::CodeTextSequence);
tokenizer.exit(Token::CodeText);
- State::Ok(0)
+ State::Ok
}
_ => {
let index = tokenizer.events.len();
diff --git a/src/construct/definition.rs b/src/construct/definition.rs
index 766bd8a..093b9e5 100644
--- a/src/construct/definition.rs
+++ b/src/construct/definition.rs
@@ -237,7 +237,7 @@ fn after_whitespace(tokenizer: &mut Tokenizer, code: Code) -> State {
tokenizer.exit(Token::Definition);
// You’d be interrupting.
tokenizer.interrupt = true;
- State::Ok(0)
+ State::Ok
}
_ => State::Nok,
}
@@ -297,7 +297,7 @@ fn title_after(tokenizer: &mut Tokenizer, code: Code) -> State {
/// ```
fn title_after_after_optional_whitespace(_tokenizer: &mut Tokenizer, code: Code) -> State {
match code {
- Code::None | Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => State::Ok(0),
+ Code::None | Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => State::Ok,
_ => State::Nok,
}
}
diff --git a/src/construct/hard_break_escape.rs b/src/construct/hard_break_escape.rs
index 2ac693e..85a64f7 100644
--- a/src/construct/hard_break_escape.rs
+++ b/src/construct/hard_break_escape.rs
@@ -74,7 +74,7 @@ fn inside(tokenizer: &mut Tokenizer, code: Code) -> State {
match code {
Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
tokenizer.exit(Token::HardBreakEscape);
- State::Ok(0)
+ State::Ok
}
_ => State::Nok,
}
diff --git a/src/construct/hard_break_trailing.rs b/src/construct/hard_break_trailing.rs
index 35097ec..1d1f10f 100644
--- a/src/construct/hard_break_trailing.rs
+++ b/src/construct/hard_break_trailing.rs
@@ -81,7 +81,7 @@ fn inside(tokenizer: &mut Tokenizer, code: Code, size: usize) -> State {
{
tokenizer.exit(Token::HardBreakTrailingSpace);
tokenizer.exit(Token::HardBreakTrailing);
- State::Ok(0)
+ State::Ok
}
_ => State::Nok,
}
diff --git a/src/construct/heading_atx.rs b/src/construct/heading_atx.rs
index 4ef1192..65022d5 100644
--- a/src/construct/heading_atx.rs
+++ b/src/construct/heading_atx.rs
@@ -134,7 +134,7 @@ fn at_break(tokenizer: &mut Tokenizer, code: Code) -> State {
tokenizer.register_resolver("heading_atx".to_string(), Box::new(resolve));
// Feel free to interrupt.
tokenizer.interrupt = false;
- State::Ok(0)
+ State::Ok
}
Code::VirtualSpace | Code::Char('\t' | ' ') => {
tokenizer.go(space_or_tab(), at_break)(tokenizer, code)
diff --git a/src/construct/heading_setext.rs b/src/construct/heading_setext.rs
index 83c41e2..7ac2eb6 100644
--- a/src/construct/heading_setext.rs
+++ b/src/construct/heading_setext.rs
@@ -189,7 +189,7 @@ fn after(tokenizer: &mut Tokenizer, code: Code) -> State {
// Feel free to interrupt.
tokenizer.interrupt = false;
tokenizer.register_resolver("heading_setext".to_string(), Box::new(resolve));
- State::Ok(0)
+ State::Ok
}
_ => State::Nok,
}
diff --git a/src/construct/html_flow.rs b/src/construct/html_flow.rs
index add2308..30c14a1 100644
--- a/src/construct/html_flow.rs
+++ b/src/construct/html_flow.rs
@@ -930,7 +930,7 @@ fn continuation_after(tokenizer: &mut Tokenizer, _code: Code) -> State {
tokenizer.interrupt = false;
// No longer concrete.
tokenizer.concrete = false;
- State::Ok(0)
+ State::Ok
}
/// Before a line ending, expecting a blank line.
diff --git a/src/construct/html_text.rs b/src/construct/html_text.rs
index f1ed5c7..8fbe862 100644
--- a/src/construct/html_text.rs
+++ b/src/construct/html_text.rs
@@ -612,7 +612,7 @@ fn end(tokenizer: &mut Tokenizer, code: Code) -> State {
tokenizer.consume(code);
tokenizer.exit(Token::HtmlTextData);
tokenizer.exit(Token::HtmlText);
- State::Ok(0)
+ State::Ok
}
_ => State::Nok,
}
diff --git a/src/construct/label_end.rs b/src/construct/label_end.rs
index 13af833..960e95e 100644
--- a/src/construct/label_end.rs
+++ b/src/construct/label_end.rs
@@ -345,7 +345,7 @@ fn ok(tokenizer: &mut Tokenizer, _code: Code, mut info: Info) -> State {
info.media.end.1 = tokenizer.events.len() - 1;
tokenizer.media_list.push(info.media);
tokenizer.register_resolver_before("media".to_string(), Box::new(resolve_media));
- State::Ok(0)
+ State::Ok
}
/// Done, it’s nothing.
@@ -488,7 +488,7 @@ fn resource_end(tokenizer: &mut Tokenizer, code: Code) -> State {
tokenizer.consume(code);
tokenizer.exit(Token::ResourceMarker);
tokenizer.exit(Token::Resource);
- State::Ok(0)
+ State::Ok
}
_ => State::Nok,
}
@@ -558,7 +558,7 @@ fn full_reference_after(tokenizer: &mut Tokenizer, _code: Code) -> State {
false,
)))
{
- State::Ok(0)
+ State::Ok
} else {
State::Nok
}
@@ -600,7 +600,7 @@ fn collapsed_reference_open(tokenizer: &mut Tokenizer, code: Code) -> State {
tokenizer.consume(code);
tokenizer.exit(Token::ReferenceMarker);
tokenizer.exit(Token::Reference);
- State::Ok(0)
+ State::Ok
}
_ => State::Nok,
}
diff --git a/src/construct/label_start_image.rs b/src/construct/label_start_image.rs
index 6738ebe..54521d4 100644
--- a/src/construct/label_start_image.rs
+++ b/src/construct/label_start_image.rs
@@ -71,7 +71,7 @@ pub fn open(tokenizer: &mut Tokenizer, code: Code) -> State {
inactive: false,
});
tokenizer.register_resolver_before("media".to_string(), Box::new(resolve_media));
- State::Ok(0)
+ State::Ok
}
_ => State::Nok,
}
diff --git a/src/construct/label_start_link.rs b/src/construct/label_start_link.rs
index 9462ba7..f4c2ce8 100644
--- a/src/construct/label_start_link.rs
+++ b/src/construct/label_start_link.rs
@@ -52,7 +52,7 @@ pub fn start(tokenizer: &mut Tokenizer, code: Code) -> State {
inactive: false,
});
tokenizer.register_resolver_before("media".to_string(), Box::new(resolve_media));
- State::Ok(0)
+ State::Ok
}
_ => State::Nok,
}
diff --git a/src/construct/list.rs b/src/construct/list.rs
index ae3fc34..4dfd202 100644
--- a/src/construct/list.rs
+++ b/src/construct/list.rs
@@ -276,7 +276,7 @@ fn whitespace_after(_tokenizer: &mut Tokenizer, code: Code) -> State {
if matches!(code, Code::VirtualSpace | Code::Char('\t' | ' ')) {
State::Nok
} else {
- State::Ok(0)
+ State::Ok
}
}
@@ -322,7 +322,7 @@ fn after(tokenizer: &mut Tokenizer, _code: Code, blank: bool) -> State {
tokenizer.exit(Token::ListItemPrefix);
tokenizer.register_resolver_before("list_item".to_string(), Box::new(resolve_list_item));
- State::Ok(0)
+ State::Ok
}
}
@@ -378,7 +378,7 @@ pub fn not_blank_cont(tokenizer: &mut Tokenizer, code: Code) -> State {
/// A state fn to yield [`State::Ok`].
pub fn ok(_tokenizer: &mut Tokenizer, _code: Code) -> State {
- State::Ok(0)
+ State::Ok
}
/// A state fn to yield [`State::Nok`].
diff --git a/src/construct/paragraph.rs b/src/construct/paragraph.rs
index bc980b2..d320779 100644
--- a/src/construct/paragraph.rs
+++ b/src/construct/paragraph.rs
@@ -69,7 +69,7 @@ fn inside(tokenizer: &mut Tokenizer, code: Code) -> State {
tokenizer.register_resolver_before("paragraph".to_string(), Box::new(resolve));
// You’d be interrupting.
tokenizer.interrupt = true;
- State::Ok(0)
+ State::Ok
}
_ => {
tokenizer.consume(code);
diff --git a/src/construct/partial_data.rs b/src/construct/partial_data.rs
index ce10763..f399bac 100644
--- a/src/construct/partial_data.rs
+++ b/src/construct/partial_data.rs
@@ -33,7 +33,7 @@ pub fn start(tokenizer: &mut Tokenizer, code: Code, stop: &'static [Code]) -> St
/// ```
fn at_break(tokenizer: &mut Tokenizer, code: Code, stop: &'static [Code]) -> State {
match code {
- Code::None => State::Ok(0),
+ Code::None => State::Ok,
Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
tokenizer.enter(Token::LineEnding);
tokenizer.consume(code);
@@ -42,7 +42,7 @@ fn at_break(tokenizer: &mut Tokenizer, code: Code, stop: &'static [Code]) -> Sta
}
_ if stop.contains(&code) => {
tokenizer.register_resolver("data".to_string(), Box::new(resolve_data));
- State::Ok(0)
+ State::Ok
}
_ => {
tokenizer.enter(Token::Data);
diff --git a/src/construct/partial_destination.rs b/src/construct/partial_destination.rs
index 4a43ec2..d567876 100644
--- a/src/construct/partial_destination.rs
+++ b/src/construct/partial_destination.rs
@@ -152,7 +152,7 @@ fn enclosed_before(tokenizer: &mut Tokenizer, code: Code, info: Info) -> State {
tokenizer.exit(info.options.marker.clone());
tokenizer.exit(info.options.literal.clone());
tokenizer.exit(info.options.destination);
- State::Ok(0)
+ State::Ok
} else {
tokenizer.enter(info.options.string.clone());
tokenizer.enter_with_content(Token::Data, Some(ContentType::String));
@@ -224,7 +224,7 @@ fn raw(tokenizer: &mut Tokenizer, code: Code, mut info: Info) -> State {
tokenizer.exit(info.options.string.clone());
tokenizer.exit(info.options.raw.clone());
tokenizer.exit(info.options.destination);
- State::Ok(0)
+ State::Ok
} else {
tokenizer.consume(code);
info.balance -= 1;
@@ -242,7 +242,7 @@ fn raw(tokenizer: &mut Tokenizer, code: Code, mut info: Info) -> State {
tokenizer.exit(info.options.string.clone());
tokenizer.exit(info.options.raw.clone());
tokenizer.exit(info.options.destination);
- State::Ok(0)
+ State::Ok
}
}
Code::Char(char) if char.is_ascii_control() => State::Nok,
diff --git a/src/construct/partial_label.rs b/src/construct/partial_label.rs
index d2219cd..3bb3abf 100644
--- a/src/construct/partial_label.rs
+++ b/src/construct/partial_label.rs
@@ -133,7 +133,7 @@ fn at_break(tokenizer: &mut Tokenizer, code: Code, mut info: Info) -> State {
tokenizer.consume(code);
tokenizer.exit(info.options.marker.clone());
tokenizer.exit(info.options.label);
- State::Ok(0)
+ State::Ok
}
Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => tokenizer.go(
space_or_tab_eol_with_options(EolOptions {
diff --git a/src/construct/partial_non_lazy_continuation.rs b/src/construct/partial_non_lazy_continuation.rs
index 62e8989..5835848 100644
--- a/src/construct/partial_non_lazy_continuation.rs
+++ b/src/construct/partial_non_lazy_continuation.rs
@@ -43,6 +43,6 @@ fn after(tokenizer: &mut Tokenizer, _code: Code) -> State {
if tokenizer.lazy {
State::Nok
} else {
- State::Ok(0)
+ State::Ok
}
}
diff --git a/src/construct/partial_space_or_tab.rs b/src/construct/partial_space_or_tab.rs
index f13414a..30c63e0 100644
--- a/src/construct/partial_space_or_tab.rs
+++ b/src/construct/partial_space_or_tab.rs
@@ -149,7 +149,7 @@ fn start(tokenizer: &mut Tokenizer, code: Code, mut info: Info) -> State {
}
_ => {
if info.options.min == 0 {
- State::Ok(0)
+ State::Ok
} else {
State::Nok
}
@@ -173,7 +173,7 @@ fn inside(tokenizer: &mut Tokenizer, code: Code, mut info: Info) -> State {
_ => {
tokenizer.exit(info.options.kind.clone());
if info.size >= info.options.min {
- State::Ok(0)
+ State::Ok
} else {
State::Nok
}
@@ -204,7 +204,7 @@ fn after_space_or_tab(tokenizer: &mut Tokenizer, code: Code, mut info: EolInfo)
tokenizer.exit(Token::LineEnding);
State::Fn(Box::new(|t, c| after_eol(t, c, info)))
}
- _ if info.ok => State::Ok(0),
+ _ if info.ok => State::Ok,
_ => State::Nok,
}
}
@@ -245,6 +245,6 @@ fn after_more_space_or_tab(_tokenizer: &mut Tokenizer, code: Code) -> State {
) {
State::Nok
} else {
- State::Ok(0)
+ State::Ok
}
}
diff --git a/src/construct/partial_title.rs b/src/construct/partial_title.rs
index 8510391..0483681 100644
--- a/src/construct/partial_title.rs
+++ b/src/construct/partial_title.rs
@@ -168,7 +168,7 @@ fn begin(tokenizer: &mut Tokenizer, code: Code, info: Info) -> State {
tokenizer.consume(code);
tokenizer.exit(info.options.marker.clone());
tokenizer.exit(info.options.title);
- State::Ok(0)
+ State::Ok
}
_ => {
tokenizer.enter(info.options.string.clone());
diff --git a/src/construct/partial_whitespace.rs b/src/construct/partial_whitespace.rs
index acdd4d1..023c52f 100644
--- a/src/construct/partial_whitespace.rs
+++ b/src/construct/partial_whitespace.rs
@@ -58,5 +58,5 @@ fn at_eol(tokenizer: &mut Tokenizer, code: Code) -> State {
/// Fine.
fn ok(_tokenizer: &mut Tokenizer, _code: Code) -> State {
- State::Ok(0)
+ State::Ok
}
diff --git a/src/construct/thematic_break.rs b/src/construct/thematic_break.rs
index 66edaf8..3c9085a 100644
--- a/src/construct/thematic_break.rs
+++ b/src/construct/thematic_break.rs
@@ -183,7 +183,7 @@ fn at_break(tokenizer: &mut Tokenizer, code: Code, info: Info) -> State {
tokenizer.exit(Token::ThematicBreak);
// Feel free to interrupt.
tokenizer.interrupt = false;
- State::Ok(0)
+ State::Ok
}
Code::Char(char) if char == info.kind.as_char() => {
tokenizer.enter(Token::ThematicBreakSequence);
diff --git a/src/content/document.rs b/src/content/document.rs
index 29aaa84..d3159d3 100644
--- a/src/content/document.rs
+++ b/src/content/document.rs
@@ -383,9 +383,7 @@ fn flow_end(tokenizer: &mut Tokenizer, code: Code, mut info: DocumentInfo, resul
info.interrupt_before = tokenizer.interrupt;
match result {
- State::Ok(back) => {
- assert_eq!(back, 0);
-
+ State::Ok => {
if !info.stack.is_empty() {
info.continued = 0;
info = exit_containers(tokenizer, info, &Phase::Eof);
@@ -417,7 +415,7 @@ fn exit_containers(
let next = info.next;
info.next = Box::new(flow); // This is weird but Rust needs a function there.
let result = tokenizer.flush(next);
- assert!(matches!(result, State::Ok(0)));
+ assert!(matches!(result, State::Ok));
if *phase == Phase::Prefix {
info.index = tokenizer.events.len();
diff --git a/src/content/flow.rs b/src/content/flow.rs
index 8aed92b..2b2ee6d 100644
--- a/src/content/flow.rs
+++ b/src/content/flow.rs
@@ -41,7 +41,7 @@ use crate::tokenizer::{Code, State, Tokenizer};
/// ```
pub fn start(tokenizer: &mut Tokenizer, code: Code) -> State {
match code {
- Code::None => State::Ok(0),
+ Code::None => State::Ok,
_ => tokenizer.attempt(blank_line, |ok| {
Box::new(if ok { blank_line_after } else { initial_before })
})(tokenizer, code),
@@ -62,7 +62,7 @@ pub fn start(tokenizer: &mut Tokenizer, code: Code) -> State {
/// ```
fn initial_before(tokenizer: &mut Tokenizer, code: Code) -> State {
match code {
- Code::None => State::Ok(0),
+ Code::None => State::Ok,
_ => tokenizer.attempt_n(
vec![
Box::new(code_indented),
@@ -87,7 +87,7 @@ fn initial_before(tokenizer: &mut Tokenizer, code: Code) -> State {
/// ```
fn blank_line_after(tokenizer: &mut Tokenizer, code: Code) -> State {
match code {
- Code::None => State::Ok(0),
+ Code::None => State::Ok,
Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
tokenizer.enter(Token::BlankLineEnding);
tokenizer.consume(code);
@@ -111,7 +111,7 @@ fn blank_line_after(tokenizer: &mut Tokenizer, code: Code) -> State {
/// ```
fn after(tokenizer: &mut Tokenizer, code: Code) -> State {
match code {
- Code::None => State::Ok(0),
+ Code::None => State::Ok,
Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
tokenizer.enter(Token::LineEnding);
tokenizer.consume(code);
diff --git a/src/content/string.rs b/src/content/string.rs
index fad2b6a..5a3149a 100644
--- a/src/content/string.rs
+++ b/src/content/string.rs
@@ -29,7 +29,7 @@ const MARKERS: [Code; 5] = [
/// Before string.
pub fn start(tokenizer: &mut Tokenizer, code: Code) -> State {
match code {
- Code::None => State::Ok(0),
+ Code::None => State::Ok,
_ => tokenizer.attempt_n(
vec![
Box::new(character_reference),
diff --git a/src/content/text.rs b/src/content/text.rs
index 0d90cb4..4e49a3d 100644
--- a/src/content/text.rs
+++ b/src/content/text.rs
@@ -47,7 +47,7 @@ const MARKERS: [Code; 12] = [
/// Before text.
pub fn start(tokenizer: &mut Tokenizer, code: Code) -> State {
match code {
- Code::None => State::Ok(0),
+ Code::None => State::Ok,
_ => tokenizer.attempt_n(
vec![
Box::new(attention),
diff --git a/src/tokenizer.rs b/src/tokenizer.rs
index 637b34f..d3a40d3 100644
--- a/src/tokenizer.rs
+++ b/src/tokenizer.rs
@@ -102,7 +102,7 @@ pub enum State {
/// There is a future state: a boxed [`StateFn`][] to pass the next code to.
Fn(Box<StateFn>),
/// The state is successful.
- Ok(usize),
+ Ok,
/// The state is not successful.
Nok,
}
@@ -490,7 +490,7 @@ impl<'a> Tokenizer<'a> {
None,
self.index,
|result: (usize, usize), tokenizer: &mut Tokenizer, state| {
- if matches!(state, State::Ok(_)) {
+ if matches!(state, State::Ok) {
tokenizer.index = result.1;
tokenizer.consumed = true;
State::Fn(Box::new(after))
@@ -546,7 +546,7 @@ impl<'a> Tokenizer<'a> {
tokenizer.free(previous);
tokenizer.index = result.0;
tokenizer.consumed = true;
- State::Fn(done(matches!(state, State::Ok(_))))
+ State::Fn(done(matches!(state, State::Ok)))
},
)
}
@@ -574,7 +574,7 @@ impl<'a> Tokenizer<'a> {
None,
self.index,
|result: (usize, usize), tokenizer: &mut Tokenizer, state| {
- let ok = matches!(state, State::Ok(_));
+ let ok = matches!(state, State::Ok);
if !ok {
tokenizer.free(previous);
@@ -685,8 +685,8 @@ fn attempt_impl(
let state = state(tokenizer, code);
match state {
- State::Ok(back) => {
- let stop = tokenizer.index - back;
+ State::Ok => {
+ let stop = tokenizer.index;
assert!(
stop >= start,
"`back` must not result in an index smaller than `start`"
@@ -715,7 +715,7 @@ fn feed_impl(
let code = tokenizer.parse_state.codes[tokenizer.index];
match state {
- State::Ok(_) | State::Nok => {
+ State::Ok | State::Nok => {
break;
}
State::Fn(func) => {
@@ -740,7 +740,7 @@ fn flush_impl(
loop {
match state {
- State::Ok(_) | State::Nok => break,
+ State::Ok | State::Nok => break,
State::Fn(func) => {
let code = if tokenizer.index < max {
tokenizer.parse_state.codes[tokenizer.index]
@@ -755,7 +755,7 @@ fn flush_impl(
}
match state {
- State::Ok(back) => assert_eq!(back, 0, "expected final `back` to be `0`"),
+ State::Ok => {}
_ => unreachable!("expected final state to be `State::Ok`"),
}