aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-11 13:45:24 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-11 13:45:24 +0200
commit6eb2f644057f371841fe25330a57ee185f91c7af (patch)
tree7b4d02586339d1a7f82104b4473d9ac243b3abf9 /src/construct
parent2d35cbfceace81a217cd0fbdae7a8777c7a6465e (diff)
downloadmarkdown-rs-6eb2f644057f371841fe25330a57ee185f91c7af.tar.gz
markdown-rs-6eb2f644057f371841fe25330a57ee185f91c7af.tar.bz2
markdown-rs-6eb2f644057f371841fe25330a57ee185f91c7af.zip
Refactor to move some code to `state.rs`
Diffstat (limited to 'src/construct')
-rw-r--r--src/construct/attention.rs7
-rw-r--r--src/construct/autolink.rs37
-rw-r--r--src/construct/blank_line.rs7
-rw-r--r--src/construct/block_quote.rs17
-rw-r--r--src/construct/character_escape.rs7
-rw-r--r--src/construct/character_reference.rs19
-rw-r--r--src/construct/code_fenced.rs71
-rw-r--r--src/construct/code_indented.rs35
-rw-r--r--src/construct/code_text.rs23
-rw-r--r--src/construct/definition.rs43
-rw-r--r--src/construct/hard_break_escape.rs5
-rw-r--r--src/construct/heading_atx.rs29
-rw-r--r--src/construct/heading_setext.rs17
-rw-r--r--src/construct/html_flow.rs151
-rw-r--r--src/construct/html_text.rs155
-rw-r--r--src/construct/label_end.rs67
-rw-r--r--src/construct/label_start_image.rs5
-rw-r--r--src/construct/label_start_link.rs3
-rw-r--r--src/construct/list.rs57
-rw-r--r--src/construct/paragraph.rs7
-rw-r--r--src/construct/partial_bom.rs7
-rw-r--r--src/construct/partial_data.rs15
-rw-r--r--src/construct/partial_destination.rs31
-rw-r--r--src/construct/partial_label.rs25
-rw-r--r--src/construct/partial_non_lazy_continuation.rs5
-rw-r--r--src/construct/partial_space_or_tab.rs37
-rw-r--r--src/construct/partial_title.rs29
-rw-r--r--src/construct/thematic_break.rs19
28 files changed, 468 insertions, 462 deletions
diff --git a/src/construct/attention.rs b/src/construct/attention.rs
index d61813d..7e873ca 100644
--- a/src/construct/attention.rs
+++ b/src/construct/attention.rs
@@ -51,8 +51,9 @@
//! [html-em]: https://html.spec.whatwg.org/multipage/text-level-semantics.html#the-em-element
//! [html-strong]: https://html.spec.whatwg.org/multipage/text-level-semantics.html#the-strong-element
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{Event, EventType, Point, State, StateName, Tokenizer};
+use crate::tokenizer::{Event, EventType, Point, Tokenizer};
use crate::unicode::PUNCTUATION;
use crate::util::slice::Slice;
@@ -120,7 +121,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
Some(b'*' | b'_') if tokenizer.parse_state.constructs.attention => {
tokenizer.tokenize_state.marker = tokenizer.current.unwrap();
tokenizer.enter(Token::AttentionSequence);
- State::Retry(StateName::AttentionInside)
+ State::Retry(Name::AttentionInside)
}
_ => State::Nok,
}
@@ -136,7 +137,7 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'*' | b'_') if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker => {
tokenizer.consume();
- State::Next(StateName::AttentionInside)
+ State::Next(Name::AttentionInside)
}
_ => {
tokenizer.exit(Token::AttentionSequence);
diff --git a/src/construct/autolink.rs b/src/construct/autolink.rs
index eef3840..b635d96 100644
--- a/src/construct/autolink.rs
+++ b/src/construct/autolink.rs
@@ -102,8 +102,9 @@
//! [html-a]: https://html.spec.whatwg.org/multipage/text-level-semantics.html#the-a-element
use crate::constant::{AUTOLINK_DOMAIN_SIZE_MAX, AUTOLINK_SCHEME_SIZE_MAX};
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{State, StateName, Tokenizer};
+use crate::tokenizer::Tokenizer;
/// Start of an autolink.
///
@@ -121,7 +122,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.consume();
tokenizer.exit(Token::AutolinkMarker);
tokenizer.enter(Token::AutolinkProtocol);
- State::Next(StateName::AutolinkOpen)
+ State::Next(Name::AutolinkOpen)
}
_ => State::Nok,
}
@@ -140,9 +141,9 @@ pub fn open(tokenizer: &mut Tokenizer) -> State {
// ASCII alphabetic.
Some(b'A'..=b'Z' | b'a'..=b'z') => {
tokenizer.consume();
- State::Next(StateName::AutolinkSchemeOrEmailAtext)
+ State::Next(Name::AutolinkSchemeOrEmailAtext)
}
- _ => State::Retry(StateName::AutolinkEmailAtext),
+ _ => State::Retry(Name::AutolinkEmailAtext),
}
}
@@ -160,9 +161,9 @@ pub fn scheme_or_email_atext(tokenizer: &mut Tokenizer) -> State {
Some(b'+' | b'-' | b'.' | b'0'..=b'9' | b'A'..=b'Z' | b'a'..=b'z') => {
// Count the previous alphabetical from `open` too.
tokenizer.tokenize_state.size = 1;
- State::Retry(StateName::AutolinkSchemeInsideOrEmailAtext)
+ State::Retry(Name::AutolinkSchemeInsideOrEmailAtext)
}
- _ => State::Retry(StateName::AutolinkEmailAtext),
+ _ => State::Retry(Name::AutolinkEmailAtext),
}
}
@@ -179,7 +180,7 @@ pub fn scheme_inside_or_email_atext(tokenizer: &mut Tokenizer) -> State {
Some(b':') => {
tokenizer.consume();
tokenizer.tokenize_state.size = 0;
- State::Next(StateName::AutolinkUrlInside)
+ State::Next(Name::AutolinkUrlInside)
}
// ASCII alphanumeric and `+`, `-`, and `.`.
Some(b'+' | b'-' | b'.' | b'0'..=b'9' | b'A'..=b'Z' | b'a'..=b'z')
@@ -187,11 +188,11 @@ pub fn scheme_inside_or_email_atext(tokenizer: &mut Tokenizer) -> State {
{
tokenizer.tokenize_state.size += 1;
tokenizer.consume();
- State::Next(StateName::AutolinkSchemeInsideOrEmailAtext)
+ State::Next(Name::AutolinkSchemeInsideOrEmailAtext)
}
_ => {
tokenizer.tokenize_state.size = 0;
- State::Retry(StateName::AutolinkEmailAtext)
+ State::Retry(Name::AutolinkEmailAtext)
}
}
}
@@ -216,7 +217,7 @@ pub fn url_inside(tokenizer: &mut Tokenizer) -> State {
None | Some(b'\0'..=0x1F | b' ' | b'<' | 0x7F) => State::Nok,
Some(_) => {
tokenizer.consume();
- State::Next(StateName::AutolinkUrlInside)
+ State::Next(Name::AutolinkUrlInside)
}
}
}
@@ -231,7 +232,7 @@ pub fn email_atext(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'@') => {
tokenizer.consume();
- State::Next(StateName::AutolinkEmailAtSignOrDot)
+ State::Next(Name::AutolinkEmailAtSignOrDot)
}
// ASCII atext.
//
@@ -254,7 +255,7 @@ pub fn email_atext(tokenizer: &mut Tokenizer) -> State {
b'#'..=b'\'' | b'*' | b'+' | b'-'..=b'9' | b'=' | b'?' | b'A'..=b'Z' | b'^'..=b'~',
) => {
tokenizer.consume();
- State::Next(StateName::AutolinkEmailAtext)
+ State::Next(Name::AutolinkEmailAtext)
}
_ => State::Nok,
}
@@ -269,9 +270,7 @@ pub fn email_atext(tokenizer: &mut Tokenizer) -> State {
pub fn email_at_sign_or_dot(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
// ASCII alphanumeric.
- Some(b'0'..=b'9' | b'A'..=b'Z' | b'a'..=b'z') => {
- State::Retry(StateName::AutolinkEmailValue)
- }
+ Some(b'0'..=b'9' | b'A'..=b'Z' | b'a'..=b'z') => State::Retry(Name::AutolinkEmailValue),
_ => State::Nok,
}
}
@@ -287,7 +286,7 @@ pub fn email_label(tokenizer: &mut Tokenizer) -> State {
Some(b'.') => {
tokenizer.tokenize_state.size = 0;
tokenizer.consume();
- State::Next(StateName::AutolinkEmailAtSignOrDot)
+ State::Next(Name::AutolinkEmailAtSignOrDot)
}
Some(b'>') => {
tokenizer.tokenize_state.size = 0;
@@ -302,7 +301,7 @@ pub fn email_label(tokenizer: &mut Tokenizer) -> State {
tokenizer.exit(Token::Autolink);
State::Ok
}
- _ => State::Retry(StateName::AutolinkEmailValue),
+ _ => State::Retry(Name::AutolinkEmailValue),
}
}
@@ -321,9 +320,9 @@ pub fn email_value(tokenizer: &mut Tokenizer) -> State {
if tokenizer.tokenize_state.size < AUTOLINK_DOMAIN_SIZE_MAX =>
{
let name = if matches!(tokenizer.current, Some(b'-')) {
- StateName::AutolinkEmailValue
+ Name::AutolinkEmailValue
} else {
- StateName::AutolinkEmailLabel
+ Name::AutolinkEmailLabel
};
tokenizer.tokenize_state.size += 1;
tokenizer.consume();
diff --git a/src/construct/blank_line.rs b/src/construct/blank_line.rs
index d7d4817..e8a06e9 100644
--- a/src/construct/blank_line.rs
+++ b/src/construct/blank_line.rs
@@ -33,7 +33,8 @@
//! [flow]: crate::content::flow
use crate::construct::partial_space_or_tab::space_or_tab;
-use crate::tokenizer::{State, StateName, Tokenizer};
+use crate::state::{Name, State};
+use crate::tokenizer::Tokenizer;
/// Start of a blank line.
///
@@ -49,8 +50,8 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::BlankLineAfter),
- State::Next(StateName::BlankLineAfter),
+ State::Next(Name::BlankLineAfter),
+ State::Next(Name::BlankLineAfter),
)
}
diff --git a/src/construct/block_quote.rs b/src/construct/block_quote.rs
index bbfad5b..7b8ce82 100644
--- a/src/construct/block_quote.rs
+++ b/src/construct/block_quote.rs
@@ -35,8 +35,9 @@
use crate::constant::TAB_SIZE;
use crate::construct::partial_space_or_tab::space_or_tab_min_max;
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{State, StateName, Tokenizer};
+use crate::tokenizer::Tokenizer;
/// Start of block quote.
///
@@ -55,7 +56,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
usize::MAX
},
);
- tokenizer.attempt(name, State::Next(StateName::BlockQuoteBefore), State::Nok)
+ tokenizer.attempt(name, State::Next(Name::BlockQuoteBefore), State::Nok)
} else {
State::Nok
}
@@ -71,9 +72,9 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'>') => {
tokenizer.enter(Token::BlockQuote);
- State::Retry(StateName::BlockQuoteContBefore)
+ State::Retry(Name::BlockQuoteContBefore)
}
- _ => State::Retry(StateName::BlockQuoteContBefore),
+ _ => State::Retry(Name::BlockQuoteContBefore),
}
}
@@ -94,11 +95,7 @@ pub fn cont_start(tokenizer: &mut Tokenizer) -> State {
usize::MAX
},
);
- tokenizer.attempt(
- name,
- State::Next(StateName::BlockQuoteContBefore),
- State::Nok,
- )
+ tokenizer.attempt(name, State::Next(Name::BlockQuoteContBefore), State::Nok)
}
/// After whitespace, before `>`.
@@ -115,7 +112,7 @@ pub fn cont_before(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::BlockQuoteMarker);
tokenizer.consume();
tokenizer.exit(Token::BlockQuoteMarker);
- State::Next(StateName::BlockQuoteContAfter)
+ State::Next(Name::BlockQuoteContAfter)
}
_ => State::Nok,
}
diff --git a/src/construct/character_escape.rs b/src/construct/character_escape.rs
index 52b2873..c3d5458 100644
--- a/src/construct/character_escape.rs
+++ b/src/construct/character_escape.rs
@@ -33,8 +33,9 @@
//! [character_reference]: crate::construct::character_reference
//! [hard_break_escape]: crate::construct::hard_break_escape
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{State, StateName, Tokenizer};
+use crate::tokenizer::Tokenizer;
/// Start of a character escape.
///
@@ -49,7 +50,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::CharacterEscapeMarker);
tokenizer.consume();
tokenizer.exit(Token::CharacterEscapeMarker);
- State::Next(StateName::CharacterEscapeInside)
+ State::Next(Name::CharacterEscapeInside)
}
_ => State::Nok,
}
@@ -61,7 +62,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
/// > | a\*b
/// ^
/// ```
-// StateName::CharacterEscapeInside
+// Name::CharacterEscapeInside
pub fn inside(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
// ASCII punctuation.
diff --git a/src/construct/character_reference.rs b/src/construct/character_reference.rs
index e1c7e79..435c115 100644
--- a/src/construct/character_reference.rs
+++ b/src/construct/character_reference.rs
@@ -65,8 +65,9 @@ use crate::constant::{
CHARACTER_REFERENCES, CHARACTER_REFERENCE_DECIMAL_SIZE_MAX,
CHARACTER_REFERENCE_HEXADECIMAL_SIZE_MAX, CHARACTER_REFERENCE_NAMED_SIZE_MAX,
};
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{State, StateName, Tokenizer};
+use crate::tokenizer::Tokenizer;
use crate::util::slice::Slice;
/// Start of a character reference.
@@ -86,7 +87,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::CharacterReferenceMarker);
tokenizer.consume();
tokenizer.exit(Token::CharacterReferenceMarker);
- State::Next(StateName::CharacterReferenceOpen)
+ State::Next(Name::CharacterReferenceOpen)
}
_ => State::Nok,
}
@@ -103,17 +104,17 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
/// > | a&#x9;b
/// ^
/// ```
-// StateName::CharacterReferenceOpen
+// Name::CharacterReferenceOpen
pub fn open(tokenizer: &mut Tokenizer) -> State {
if let Some(b'#') = tokenizer.current {
tokenizer.enter(Token::CharacterReferenceMarkerNumeric);
tokenizer.consume();
tokenizer.exit(Token::CharacterReferenceMarkerNumeric);
- State::Next(StateName::CharacterReferenceNumeric)
+ State::Next(Name::CharacterReferenceNumeric)
} else {
tokenizer.tokenize_state.marker = b'&';
tokenizer.enter(Token::CharacterReferenceValue);
- State::Retry(StateName::CharacterReferenceValue)
+ State::Retry(Name::CharacterReferenceValue)
}
}
@@ -126,7 +127,7 @@ pub fn open(tokenizer: &mut Tokenizer) -> State {
/// > | a&#x9;b
/// ^
/// ```
-// StateName::CharacterReferenceNumeric
+// Name::CharacterReferenceNumeric
pub fn numeric(tokenizer: &mut Tokenizer) -> State {
if let Some(b'x' | b'X') = tokenizer.current {
tokenizer.enter(Token::CharacterReferenceMarkerHexadecimal);
@@ -134,11 +135,11 @@ pub fn numeric(tokenizer: &mut Tokenizer) -> State {
tokenizer.exit(Token::CharacterReferenceMarkerHexadecimal);
tokenizer.enter(Token::CharacterReferenceValue);
tokenizer.tokenize_state.marker = b'x';
- State::Next(StateName::CharacterReferenceValue)
+ State::Next(Name::CharacterReferenceValue)
} else {
tokenizer.enter(Token::CharacterReferenceValue);
tokenizer.tokenize_state.marker = b'#';
- State::Retry(StateName::CharacterReferenceValue)
+ State::Retry(Name::CharacterReferenceValue)
}
}
@@ -202,7 +203,7 @@ pub fn value(tokenizer: &mut Tokenizer) -> State {
if tokenizer.tokenize_state.size < max && test(&byte) {
tokenizer.tokenize_state.size += 1;
tokenizer.consume();
- return State::Next(StateName::CharacterReferenceValue);
+ return State::Next(Name::CharacterReferenceValue);
}
}
diff --git a/src/construct/code_fenced.rs b/src/construct/code_fenced.rs
index 26e1148..0ce8d02 100644
--- a/src/construct/code_fenced.rs
+++ b/src/construct/code_fenced.rs
@@ -103,8 +103,9 @@
use crate::constant::{CODE_FENCED_SEQUENCE_SIZE_MIN, TAB_SIZE};
use crate::construct::partial_space_or_tab::{space_or_tab, space_or_tab_min_max};
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{ContentType, State, StateName, Tokenizer};
+use crate::tokenizer::{ContentType, Tokenizer};
use crate::util::slice::{Position, Slice};
/// Start of fenced code.
@@ -130,7 +131,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
);
tokenizer.attempt(
name,
- State::Next(StateName::CodeFencedBeforeSequenceOpen),
+ State::Next(Name::CodeFencedBeforeSequenceOpen),
State::Nok,
)
} else {
@@ -164,7 +165,7 @@ pub fn before_sequence_open(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.marker = tokenizer.current.unwrap();
tokenizer.tokenize_state.size_c = prefix;
tokenizer.enter(Token::CodeFencedFenceSequence);
- State::Retry(StateName::CodeFencedSequenceOpen)
+ State::Retry(Name::CodeFencedSequenceOpen)
} else {
State::Nok
}
@@ -183,15 +184,15 @@ pub fn sequence_open(tokenizer: &mut Tokenizer) -> State {
Some(b'`' | b'~') if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker => {
tokenizer.tokenize_state.size += 1;
tokenizer.consume();
- State::Next(StateName::CodeFencedSequenceOpen)
+ State::Next(Name::CodeFencedSequenceOpen)
}
_ if tokenizer.tokenize_state.size >= CODE_FENCED_SEQUENCE_SIZE_MIN => {
tokenizer.exit(Token::CodeFencedFenceSequence);
let name = space_or_tab(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::CodeFencedInfoBefore),
- State::Next(StateName::CodeFencedInfoBefore),
+ State::Next(Name::CodeFencedInfoBefore),
+ State::Next(Name::CodeFencedInfoBefore),
)
}
_ => {
@@ -218,15 +219,15 @@ pub fn info_before(tokenizer: &mut Tokenizer) -> State {
// Do not form containers.
tokenizer.concrete = true;
tokenizer.check(
- StateName::NonLazyContinuationStart,
- State::Next(StateName::CodeFencedAtNonLazyBreak),
- State::Next(StateName::CodeFencedAfter),
+ Name::NonLazyContinuationStart,
+ State::Next(Name::CodeFencedAtNonLazyBreak),
+ State::Next(Name::CodeFencedAfter),
)
}
_ => {
tokenizer.enter(Token::CodeFencedFenceInfo);
tokenizer.enter_with_content(Token::Data, Some(ContentType::String));
- State::Retry(StateName::CodeFencedInfo)
+ State::Retry(Name::CodeFencedInfo)
}
}
}
@@ -244,7 +245,7 @@ pub fn info(tokenizer: &mut Tokenizer) -> State {
None | Some(b'\n') => {
tokenizer.exit(Token::Data);
tokenizer.exit(Token::CodeFencedFenceInfo);
- State::Retry(StateName::CodeFencedInfoBefore)
+ State::Retry(Name::CodeFencedInfoBefore)
}
Some(b'\t' | b' ') => {
tokenizer.exit(Token::Data);
@@ -252,8 +253,8 @@ pub fn info(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::CodeFencedMetaBefore),
- State::Next(StateName::CodeFencedMetaBefore),
+ State::Next(Name::CodeFencedMetaBefore),
+ State::Next(Name::CodeFencedMetaBefore),
)
}
Some(b'`') if tokenizer.tokenize_state.marker == b'`' => {
@@ -265,7 +266,7 @@ pub fn info(tokenizer: &mut Tokenizer) -> State {
}
Some(_) => {
tokenizer.consume();
- State::Next(StateName::CodeFencedInfo)
+ State::Next(Name::CodeFencedInfo)
}
}
}
@@ -280,11 +281,11 @@ pub fn info(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn meta_before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
- None | Some(b'\n') => State::Retry(StateName::CodeFencedInfoBefore),
+ None | Some(b'\n') => State::Retry(Name::CodeFencedInfoBefore),
_ => {
tokenizer.enter(Token::CodeFencedFenceMeta);
tokenizer.enter_with_content(Token::Data, Some(ContentType::String));
- State::Retry(StateName::CodeFencedMeta)
+ State::Retry(Name::CodeFencedMeta)
}
}
}
@@ -302,7 +303,7 @@ pub fn meta(tokenizer: &mut Tokenizer) -> State {
None | Some(b'\n') => {
tokenizer.exit(Token::Data);
tokenizer.exit(Token::CodeFencedFenceMeta);
- State::Retry(StateName::CodeFencedInfoBefore)
+ State::Retry(Name::CodeFencedInfoBefore)
}
Some(b'`') if tokenizer.tokenize_state.marker == b'`' => {
tokenizer.concrete = false;
@@ -313,7 +314,7 @@ pub fn meta(tokenizer: &mut Tokenizer) -> State {
}
_ => {
tokenizer.consume();
- State::Next(StateName::CodeFencedMeta)
+ State::Next(Name::CodeFencedMeta)
}
}
}
@@ -329,9 +330,9 @@ pub fn meta(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn at_non_lazy_break(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
- StateName::CodeFencedCloseBefore,
- State::Next(StateName::CodeFencedAfter),
- State::Next(StateName::CodeFencedContentBefore),
+ Name::CodeFencedCloseBefore,
+ State::Next(Name::CodeFencedAfter),
+ State::Next(Name::CodeFencedContentBefore),
)
}
@@ -349,7 +350,7 @@ pub fn close_before(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::LineEnding);
tokenizer.consume();
tokenizer.exit(Token::LineEnding);
- State::Next(StateName::CodeFencedCloseStart)
+ State::Next(Name::CodeFencedCloseStart)
}
_ => unreachable!("expected eol"),
}
@@ -376,7 +377,7 @@ pub fn close_start(tokenizer: &mut Tokenizer) -> State {
);
tokenizer.attempt(
name,
- State::Next(StateName::CodeFencedBeforeSequenceClose),
+ State::Next(Name::CodeFencedBeforeSequenceClose),
State::Nok,
)
}
@@ -393,7 +394,7 @@ pub fn before_sequence_close(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'`' | b'~') if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker => {
tokenizer.enter(Token::CodeFencedFenceSequence);
- State::Retry(StateName::CodeFencedSequenceClose)
+ State::Retry(Name::CodeFencedSequenceClose)
}
_ => State::Nok,
}
@@ -412,7 +413,7 @@ pub fn sequence_close(tokenizer: &mut Tokenizer) -> State {
Some(b'`' | b'~') if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker => {
tokenizer.tokenize_state.size_b += 1;
tokenizer.consume();
- State::Next(StateName::CodeFencedSequenceClose)
+ State::Next(Name::CodeFencedSequenceClose)
}
_ if tokenizer.tokenize_state.size_b >= CODE_FENCED_SEQUENCE_SIZE_MIN
&& tokenizer.tokenize_state.size_b >= tokenizer.tokenize_state.size =>
@@ -422,8 +423,8 @@ pub fn sequence_close(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::CodeFencedAfterSequenceClose),
- State::Next(StateName::CodeFencedAfterSequenceClose),
+ State::Next(Name::CodeFencedAfterSequenceClose),
+ State::Next(Name::CodeFencedAfterSequenceClose),
)
}
_ => {
@@ -463,7 +464,7 @@ pub fn content_before(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::LineEnding);
tokenizer.consume();
tokenizer.exit(Token::LineEnding);
- State::Next(StateName::CodeFencedContentStart)
+ State::Next(Name::CodeFencedContentStart)
}
/// Before code content, definitely not before a closing fence.
///
@@ -477,7 +478,7 @@ pub fn content_start(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab_min_max(tokenizer, 0, tokenizer.tokenize_state.size_c);
tokenizer.attempt(
name,
- State::Next(StateName::CodeFencedBeforeContentChunk),
+ State::Next(Name::CodeFencedBeforeContentChunk),
State::Nok,
)
}
@@ -493,13 +494,13 @@ pub fn content_start(tokenizer: &mut Tokenizer) -> State {
pub fn before_content_chunk(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => tokenizer.check(
- StateName::NonLazyContinuationStart,
- State::Next(StateName::CodeFencedAtNonLazyBreak),
- State::Next(StateName::CodeFencedAfter),
+ Name::NonLazyContinuationStart,
+ State::Next(Name::CodeFencedAtNonLazyBreak),
+ State::Next(Name::CodeFencedAfter),
),
_ => {
tokenizer.enter(Token::CodeFlowChunk);
- State::Retry(StateName::CodeFencedContentChunk)
+ State::Retry(Name::CodeFencedContentChunk)
}
}
}
@@ -516,11 +517,11 @@ pub fn content_chunk(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => {
tokenizer.exit(Token::CodeFlowChunk);
- State::Retry(StateName::CodeFencedBeforeContentChunk)
+ State::Retry(Name::CodeFencedBeforeContentChunk)
}
_ => {
tokenizer.consume();
- State::Next(StateName::CodeFencedContentChunk)
+ State::Next(Name::CodeFencedContentChunk)
}
}
}
diff --git a/src/construct/code_indented.rs b/src/construct/code_indented.rs
index 36ae4c6..f442f27 100644
--- a/src/construct/code_indented.rs
+++ b/src/construct/code_indented.rs
@@ -47,8 +47,9 @@
use super::partial_space_or_tab::{space_or_tab, space_or_tab_min_max};
use crate::constant::TAB_SIZE;
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{State, StateName, Tokenizer};
+use crate::tokenizer::Tokenizer;
/// Start of code (indented).
///
@@ -65,11 +66,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
if !tokenizer.interrupt && tokenizer.parse_state.constructs.code_indented {
tokenizer.enter(Token::CodeIndented);
let name = space_or_tab_min_max(tokenizer, TAB_SIZE, TAB_SIZE);
- tokenizer.attempt(
- name,
- State::Next(StateName::CodeIndentedAtBreak),
- State::Nok,
- )
+ tokenizer.attempt(name, State::Next(Name::CodeIndentedAtBreak), State::Nok)
} else {
State::Nok
}
@@ -83,15 +80,15 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn at_break(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
- None => State::Retry(StateName::CodeIndentedAfter),
+ None => State::Retry(Name::CodeIndentedAfter),
Some(b'\n') => tokenizer.attempt(
- StateName::CodeIndentedFurtherStart,
- State::Next(StateName::CodeIndentedAtBreak),
- State::Next(StateName::CodeIndentedAfter),
+ Name::CodeIndentedFurtherStart,
+ State::Next(Name::CodeIndentedAtBreak),
+ State::Next(Name::CodeIndentedAfter),
),
_ => {
tokenizer.enter(Token::CodeFlowChunk);
- State::Retry(StateName::CodeIndentedInside)
+ State::Retry(Name::CodeIndentedInside)
}
}
}
@@ -106,11 +103,11 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => {
tokenizer.exit(Token::CodeFlowChunk);
- State::Retry(StateName::CodeIndentedAtBreak)
+ State::Retry(Name::CodeIndentedAtBreak)
}
_ => {
tokenizer.consume();
- State::Next(StateName::CodeIndentedInside)
+ State::Next(Name::CodeIndentedInside)
}
}
}
@@ -141,14 +138,14 @@ pub fn further_start(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::LineEnding);
tokenizer.consume();
tokenizer.exit(Token::LineEnding);
- State::Next(StateName::CodeIndentedFurtherStart)
+ State::Next(Name::CodeIndentedFurtherStart)
}
_ if !tokenizer.lazy => {
let name = space_or_tab_min_max(tokenizer, TAB_SIZE, TAB_SIZE);
tokenizer.attempt(
name,
- State::Next(StateName::CodeIndentedFurtherEnd),
- State::Next(StateName::CodeIndentedFurtherBegin),
+ State::Next(Name::CodeIndentedFurtherEnd),
+ State::Next(Name::CodeIndentedFurtherBegin),
)
}
_ => State::Nok,
@@ -177,8 +174,8 @@ pub fn further_begin(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::CodeIndentedFurtherAfter),
- State::Next(StateName::CodeIndentedFurtherAfter),
+ State::Next(Name::CodeIndentedFurtherAfter),
+ State::Next(Name::CodeIndentedFurtherAfter),
)
}
@@ -191,7 +188,7 @@ pub fn further_begin(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn further_after(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
- Some(b'\n') => State::Retry(StateName::CodeIndentedFurtherStart),
+ Some(b'\n') => State::Retry(Name::CodeIndentedFurtherStart),
_ => State::Nok,
}
}
diff --git a/src/construct/code_text.rs b/src/construct/code_text.rs
index d7ada3d..f626cfb 100644
--- a/src/construct/code_text.rs
+++ b/src/construct/code_text.rs
@@ -83,8 +83,9 @@
//! [code_fenced]: crate::construct::code_fenced
//! [html-code]: https://html.spec.whatwg.org/multipage/text-level-semantics.html#the-code-element
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{State, StateName, Tokenizer};
+use crate::tokenizer::Tokenizer;
/// Start of code (text).
///
@@ -105,7 +106,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
{
tokenizer.enter(Token::CodeText);
tokenizer.enter(Token::CodeTextSequence);
- State::Retry(StateName::CodeTextSequenceOpen)
+ State::Retry(Name::CodeTextSequenceOpen)
}
_ => State::Nok,
}
@@ -121,10 +122,10 @@ pub fn sequence_open(tokenizer: &mut Tokenizer) -> State {
if let Some(b'`') = tokenizer.current {
tokenizer.tokenize_state.size += 1;
tokenizer.consume();
- State::Next(StateName::CodeTextSequenceOpen)
+ State::Next(Name::CodeTextSequenceOpen)
} else {
tokenizer.exit(Token::CodeTextSequence);
- State::Retry(StateName::CodeTextBetween)
+ State::Retry(Name::CodeTextBetween)
}
}
@@ -144,15 +145,15 @@ pub fn between(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::LineEnding);
tokenizer.consume();
tokenizer.exit(Token::LineEnding);
- State::Next(StateName::CodeTextBetween)
+ State::Next(Name::CodeTextBetween)
}
Some(b'`') => {
tokenizer.enter(Token::CodeTextSequence);
- State::Retry(StateName::CodeTextSequenceClose)
+ State::Retry(Name::CodeTextSequenceClose)
}
_ => {
tokenizer.enter(Token::CodeTextData);
- State::Retry(StateName::CodeTextData)
+ State::Retry(Name::CodeTextData)
}
}
}
@@ -167,11 +168,11 @@ pub fn data(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n' | b'`') => {
tokenizer.exit(Token::CodeTextData);
- State::Retry(StateName::CodeTextBetween)
+ State::Retry(Name::CodeTextBetween)
}
_ => {
tokenizer.consume();
- State::Next(StateName::CodeTextData)
+ State::Next(Name::CodeTextData)
}
}
}
@@ -187,7 +188,7 @@ pub fn sequence_close(tokenizer: &mut Tokenizer) -> State {
Some(b'`') => {
tokenizer.tokenize_state.size_b += 1;
tokenizer.consume();
- State::Next(StateName::CodeTextSequenceClose)
+ State::Next(Name::CodeTextSequenceClose)
}
_ => {
if tokenizer.tokenize_state.size == tokenizer.tokenize_state.size_b {
@@ -203,7 +204,7 @@ pub fn sequence_close(tokenizer: &mut Tokenizer) -> State {
tokenizer.events[index - 1].token_type = Token::CodeTextData;
tokenizer.events[index].token_type = Token::CodeTextData;
tokenizer.tokenize_state.size_b = 0;
- State::Retry(StateName::CodeTextBetween)
+ State::Retry(Name::CodeTextBetween)
}
}
}
diff --git a/src/construct/definition.rs b/src/construct/definition.rs
index 5db611b..394375f 100644
--- a/src/construct/definition.rs
+++ b/src/construct/definition.rs
@@ -94,8 +94,9 @@
//! [html-img]: https://html.spec.whatwg.org/multipage/embedded-content.html#the-img-element
use crate::construct::partial_space_or_tab::{space_or_tab, space_or_tab_eol};
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{State, StateName, Tokenizer};
+use crate::tokenizer::Tokenizer;
use crate::util::skip::opt_back as skip_opt_back;
/// At the start of a definition.
@@ -122,8 +123,8 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::DefinitionBefore),
- State::Next(StateName::DefinitionBefore),
+ State::Next(Name::DefinitionBefore),
+ State::Next(Name::DefinitionBefore),
)
} else {
State::Nok
@@ -143,8 +144,8 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_2 = Token::DefinitionLabelMarker;
tokenizer.tokenize_state.token_3 = Token::DefinitionLabelString;
tokenizer.attempt(
- StateName::LabelStart,
- State::Next(StateName::DefinitionLabelAfter),
+ Name::LabelStart,
+ State::Next(Name::DefinitionLabelAfter),
State::Nok,
)
}
@@ -168,7 +169,7 @@ pub fn label_after(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::DefinitionMarker);
tokenizer.consume();
tokenizer.exit(Token::DefinitionMarker);
- State::Next(StateName::DefinitionMarkerAfter)
+ State::Next(Name::DefinitionMarkerAfter)
}
_ => State::Nok,
}
@@ -184,8 +185,8 @@ pub fn marker_after(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab_eol(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::DefinitionDestinationBefore),
- State::Next(StateName::DefinitionDestinationBefore),
+ State::Next(Name::DefinitionDestinationBefore),
+ State::Next(Name::DefinitionDestinationBefore),
)
}
@@ -203,9 +204,9 @@ pub fn destination_before(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_5 = Token::DefinitionDestinationString;
tokenizer.tokenize_state.size_b = usize::MAX;
tokenizer.attempt(
- StateName::DestinationStart,
- State::Next(StateName::DefinitionDestinationAfter),
- State::Next(StateName::DefinitionDestinationMissing),
+ Name::DestinationStart,
+ State::Next(Name::DefinitionDestinationAfter),
+ State::Next(Name::DefinitionDestinationMissing),
)
}
@@ -223,9 +224,9 @@ pub fn destination_after(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_5 = Token::Data;
tokenizer.tokenize_state.size_b = 0;
tokenizer.attempt(
- StateName::DefinitionTitleBefore,
- State::Next(StateName::DefinitionAfter),
- State::Next(StateName::DefinitionAfter),
+ Name::DefinitionTitleBefore,
+ State::Next(Name::DefinitionAfter),
+ State::Next(Name::DefinitionAfter),
)
}
@@ -252,8 +253,8 @@ pub fn after(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::DefinitionAfterWhitespace),
- State::Next(StateName::DefinitionAfterWhitespace),
+ State::Next(Name::DefinitionAfterWhitespace),
+ State::Next(Name::DefinitionAfterWhitespace),
)
}
@@ -289,7 +290,7 @@ pub fn title_before(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab_eol(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::DefinitionTitleBeforeMarker),
+ State::Next(Name::DefinitionTitleBeforeMarker),
State::Nok,
)
}
@@ -306,8 +307,8 @@ pub fn title_before_marker(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_2 = Token::DefinitionTitleMarker;
tokenizer.tokenize_state.token_3 = Token::DefinitionTitleString;
tokenizer.attempt(
- StateName::TitleStart,
- State::Next(StateName::DefinitionTitleAfter),
+ Name::TitleStart,
+ State::Next(Name::DefinitionTitleAfter),
State::Nok,
)
}
@@ -325,8 +326,8 @@ pub fn title_after(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::DefinitionTitleAfterOptionalWhitespace),
- State::Next(StateName::DefinitionTitleAfterOptionalWhitespace),
+ State::Next(Name::DefinitionTitleAfterOptionalWhitespace),
+ State::Next(Name::DefinitionTitleAfterOptionalWhitespace),
)
}
diff --git a/src/construct/hard_break_escape.rs b/src/construct/hard_break_escape.rs
index fc2cbdf..482648f 100644
--- a/src/construct/hard_break_escape.rs
+++ b/src/construct/hard_break_escape.rs
@@ -39,8 +39,9 @@
//! [hard_break_trailing]: crate::construct::partial_whitespace
//! [html]: https://html.spec.whatwg.org/multipage/text-level-semantics.html#the-br-element
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{State, StateName, Tokenizer};
+use crate::tokenizer::Tokenizer;
/// Start of a hard break (escape).
///
@@ -54,7 +55,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
Some(b'\\') if tokenizer.parse_state.constructs.hard_break_escape => {
tokenizer.enter(Token::HardBreakEscape);
tokenizer.consume();
- State::Next(StateName::HardBreakEscapeAfter)
+ State::Next(Name::HardBreakEscapeAfter)
}
_ => State::Nok,
}
diff --git a/src/construct/heading_atx.rs b/src/construct/heading_atx.rs
index d70f7db..12bc5b1 100644
--- a/src/construct/heading_atx.rs
+++ b/src/construct/heading_atx.rs
@@ -54,10 +54,11 @@
//! [wiki-setext]: https://en.wikipedia.org/wiki/Setext
//! [atx]: http://www.aaronsw.com/2002/atx/
-use super::partial_space_or_tab::{space_or_tab, space_or_tab_min_max};
use crate::constant::{HEADING_ATX_OPENING_FENCE_SIZE_MAX, TAB_SIZE};
+use crate::construct::partial_space_or_tab::{space_or_tab, space_or_tab_min_max};
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{ContentType, Event, EventType, State, StateName, Tokenizer};
+use crate::tokenizer::{ContentType, Event, EventType, Tokenizer};
/// Start of a heading (atx).
///
@@ -77,7 +78,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
usize::MAX
},
);
- tokenizer.attempt(name, State::Next(StateName::HeadingAtxBefore), State::Nok)
+ tokenizer.attempt(name, State::Next(Name::HeadingAtxBefore), State::Nok)
} else {
State::Nok
}
@@ -92,7 +93,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
pub fn before(tokenizer: &mut Tokenizer) -> State {
if Some(b'#') == tokenizer.current {
tokenizer.enter(Token::HeadingAtxSequence);
- State::Retry(StateName::HeadingAtxSequenceOpen)
+ State::Retry(Name::HeadingAtxSequenceOpen)
} else {
State::Nok
}
@@ -109,18 +110,18 @@ pub fn sequence_open(tokenizer: &mut Tokenizer) -> State {
None | Some(b'\n') if tokenizer.tokenize_state.size > 0 => {
tokenizer.tokenize_state.size = 0;
tokenizer.exit(Token::HeadingAtxSequence);
- State::Retry(StateName::HeadingAtxAtBreak)
+ State::Retry(Name::HeadingAtxAtBreak)
}
Some(b'#') if tokenizer.tokenize_state.size < HEADING_ATX_OPENING_FENCE_SIZE_MAX => {
tokenizer.tokenize_state.size += 1;
tokenizer.consume();
- State::Next(StateName::HeadingAtxSequenceOpen)
+ State::Next(Name::HeadingAtxSequenceOpen)
}
_ if tokenizer.tokenize_state.size > 0 => {
tokenizer.tokenize_state.size = 0;
tokenizer.exit(Token::HeadingAtxSequence);
let name = space_or_tab(tokenizer);
- tokenizer.attempt(name, State::Next(StateName::HeadingAtxAtBreak), State::Nok)
+ tokenizer.attempt(name, State::Next(Name::HeadingAtxAtBreak), State::Nok)
}
_ => {
tokenizer.tokenize_state.size = 0;
@@ -146,15 +147,15 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
}
Some(b'\t' | b' ') => {
let name = space_or_tab(tokenizer);
- tokenizer.attempt(name, State::Next(StateName::HeadingAtxAtBreak), State::Nok)
+ tokenizer.attempt(name, State::Next(Name::HeadingAtxAtBreak), State::Nok)
}
Some(b'#') => {
tokenizer.enter(Token::HeadingAtxSequence);
- State::Retry(StateName::HeadingAtxSequenceFurther)
+ State::Retry(Name::HeadingAtxSequenceFurther)
}
Some(_) => {
tokenizer.enter_with_content(Token::Data, Some(ContentType::Text));
- State::Retry(StateName::HeadingAtxData)
+ State::Retry(Name::HeadingAtxData)
}
}
}
@@ -170,10 +171,10 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
pub fn sequence_further(tokenizer: &mut Tokenizer) -> State {
if let Some(b'#') = tokenizer.current {
tokenizer.consume();
- State::Next(StateName::HeadingAtxSequenceFurther)
+ State::Next(Name::HeadingAtxSequenceFurther)
} else {
tokenizer.exit(Token::HeadingAtxSequence);
- State::Retry(StateName::HeadingAtxAtBreak)
+ State::Retry(Name::HeadingAtxAtBreak)
}
}
@@ -188,11 +189,11 @@ pub fn data(tokenizer: &mut Tokenizer) -> State {
// Note: `#` for closing sequence must be preceded by whitespace, otherwise it’s just text.
None | Some(b'\t' | b'\n' | b' ') => {
tokenizer.exit(Token::Data);
- State::Retry(StateName::HeadingAtxAtBreak)
+ State::Retry(Name::HeadingAtxAtBreak)
}
_ => {
tokenizer.consume();
- State::Next(StateName::HeadingAtxData)
+ State::Next(Name::HeadingAtxData)
}
}
}
diff --git a/src/construct/heading_setext.rs b/src/construct/heading_setext.rs
index f653d75..8b45fff 100644
--- a/src/construct/heading_setext.rs
+++ b/src/construct/heading_setext.rs
@@ -59,8 +59,9 @@
use crate::constant::TAB_SIZE;
use crate::construct::partial_space_or_tab::{space_or_tab, space_or_tab_min_max};
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{EventType, State, StateName, Tokenizer};
+use crate::tokenizer::{EventType, Tokenizer};
use crate::util::skip::opt_back as skip_opt_back;
/// At a line ending, presumably an underline.
@@ -93,11 +94,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
},
);
- tokenizer.attempt(
- name,
- State::Next(StateName::HeadingSetextBefore),
- State::Nok,
- )
+ tokenizer.attempt(name, State::Next(Name::HeadingSetextBefore), State::Nok)
} else {
State::Nok
}
@@ -115,7 +112,7 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
Some(b'-' | b'=') => {
tokenizer.tokenize_state.marker = tokenizer.current.unwrap();
tokenizer.enter(Token::HeadingSetextUnderline);
- State::Retry(StateName::HeadingSetextInside)
+ State::Retry(Name::HeadingSetextInside)
}
_ => State::Nok,
}
@@ -132,7 +129,7 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'-' | b'=') if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker => {
tokenizer.consume();
- State::Next(StateName::HeadingSetextInside)
+ State::Next(Name::HeadingSetextInside)
}
_ => {
tokenizer.tokenize_state.marker = 0;
@@ -140,8 +137,8 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::HeadingSetextAfter),
- State::Next(StateName::HeadingSetextAfter),
+ State::Next(Name::HeadingSetextAfter),
+ State::Next(Name::HeadingSetextAfter),
)
}
}
diff --git a/src/construct/html_flow.rs b/src/construct/html_flow.rs
index 7a346e9..c9f8610 100644
--- a/src/construct/html_flow.rs
+++ b/src/construct/html_flow.rs
@@ -104,8 +104,9 @@ use crate::constant::{
use crate::construct::partial_space_or_tab::{
space_or_tab_with_options, Options as SpaceOrTabOptions,
};
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{State, StateName, Tokenizer};
+use crate::tokenizer::Tokenizer;
use crate::util::slice::Slice;
/// Symbol for `<script>` (condition 1).
@@ -147,7 +148,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
},
);
- tokenizer.attempt(name, State::Next(StateName::HtmlFlowBefore), State::Nok)
+ tokenizer.attempt(name, State::Next(Name::HtmlFlowBefore), State::Nok)
} else {
State::Nok
}
@@ -163,7 +164,7 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
if Some(b'<') == tokenizer.current {
tokenizer.enter(Token::HtmlFlowData);
tokenizer.consume();
- State::Next(StateName::HtmlFlowOpen)
+ State::Next(Name::HtmlFlowOpen)
} else {
State::Nok
}
@@ -183,13 +184,13 @@ pub fn open(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'!') => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowDeclarationOpen)
+ State::Next(Name::HtmlFlowDeclarationOpen)
}
Some(b'/') => {
tokenizer.consume();
tokenizer.tokenize_state.seen = true;
tokenizer.tokenize_state.start = tokenizer.point.index;
- State::Next(StateName::HtmlFlowTagCloseStart)
+ State::Next(Name::HtmlFlowTagCloseStart)
}
Some(b'?') => {
tokenizer.tokenize_state.marker = INSTRUCTION;
@@ -198,12 +199,12 @@ pub fn open(tokenizer: &mut Tokenizer) -> State {
tokenizer.concrete = true;
// While we’re in an instruction instead of a declaration, we’re on a `?`
// right now, so we do need to search for `>`, similar to declarations.
- State::Next(StateName::HtmlFlowContinuationDeclarationInside)
+ State::Next(Name::HtmlFlowContinuationDeclarationInside)
}
// ASCII alphabetical.
Some(b'A'..=b'Z' | b'a'..=b'z') => {
tokenizer.tokenize_state.start = tokenizer.point.index;
- State::Retry(StateName::HtmlFlowTagName)
+ State::Retry(Name::HtmlFlowTagName)
}
_ => State::Nok,
}
@@ -224,19 +225,19 @@ pub fn declaration_open(tokenizer: &mut Tokenizer) -> State {
Some(b'-') => {
tokenizer.consume();
tokenizer.tokenize_state.marker = COMMENT;
- State::Next(StateName::HtmlFlowCommentOpenInside)
+ State::Next(Name::HtmlFlowCommentOpenInside)
}
Some(b'A'..=b'Z' | b'a'..=b'z') => {
tokenizer.consume();
tokenizer.tokenize_state.marker = DECLARATION;
// Do not form containers.
tokenizer.concrete = true;
- State::Next(StateName::HtmlFlowContinuationDeclarationInside)
+ State::Next(Name::HtmlFlowContinuationDeclarationInside)
}
Some(b'[') => {
tokenizer.consume();
tokenizer.tokenize_state.marker = CDATA;
- State::Next(StateName::HtmlFlowCdataOpenInside)
+ State::Next(Name::HtmlFlowCdataOpenInside)
}
_ => State::Nok,
}
@@ -253,7 +254,7 @@ pub fn comment_open_inside(tokenizer: &mut Tokenizer) -> State {
tokenizer.consume();
// Do not form containers.
tokenizer.concrete = true;
- State::Next(StateName::HtmlFlowContinuationDeclarationInside)
+ State::Next(Name::HtmlFlowContinuationDeclarationInside)
} else {
tokenizer.tokenize_state.marker = 0;
State::Nok
@@ -275,9 +276,9 @@ pub fn cdata_open_inside(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.size = 0;
// Do not form containers.
tokenizer.concrete = true;
- State::Next(StateName::HtmlFlowContinuation)
+ State::Next(Name::HtmlFlowContinuation)
} else {
- State::Next(StateName::HtmlFlowCdataOpenInside)
+ State::Next(Name::HtmlFlowCdataOpenInside)
}
} else {
tokenizer.tokenize_state.marker = 0;
@@ -295,7 +296,7 @@ pub fn cdata_open_inside(tokenizer: &mut Tokenizer) -> State {
pub fn tag_close_start(tokenizer: &mut Tokenizer) -> State {
if let Some(b'A'..=b'Z' | b'a'..=b'z') = tokenizer.current {
tokenizer.consume();
- State::Next(StateName::HtmlFlowTagName)
+ State::Next(Name::HtmlFlowTagName)
} else {
tokenizer.tokenize_state.seen = false;
tokenizer.tokenize_state.start = 0;
@@ -334,17 +335,17 @@ pub fn tag_name(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.marker = RAW;
// Do not form containers.
tokenizer.concrete = true;
- State::Retry(StateName::HtmlFlowContinuation)
+ State::Retry(Name::HtmlFlowContinuation)
} else if HTML_BLOCK_NAMES.contains(&name.as_str()) {
tokenizer.tokenize_state.marker = BASIC;
if slash {
tokenizer.consume();
- State::Next(StateName::HtmlFlowBasicSelfClosing)
+ State::Next(Name::HtmlFlowBasicSelfClosing)
} else {
// Do not form containers.
tokenizer.concrete = true;
- State::Retry(StateName::HtmlFlowContinuation)
+ State::Retry(Name::HtmlFlowContinuation)
}
} else {
tokenizer.tokenize_state.marker = COMPLETE;
@@ -354,16 +355,16 @@ pub fn tag_name(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.marker = 0;
State::Nok
} else if closing_tag {
- State::Retry(StateName::HtmlFlowCompleteClosingTagAfter)
+ State::Retry(Name::HtmlFlowCompleteClosingTagAfter)
} else {
- State::Retry(StateName::HtmlFlowCompleteAttributeNameBefore)
+ State::Retry(Name::HtmlFlowCompleteAttributeNameBefore)
}
}
}
// ASCII alphanumerical and `-`.
Some(b'-' | b'0'..=b'9' | b'A'..=b'Z' | b'a'..=b'z') => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowTagName)
+ State::Next(Name::HtmlFlowTagName)
}
Some(_) => {
tokenizer.tokenize_state.seen = false;
@@ -383,7 +384,7 @@ pub fn basic_self_closing(tokenizer: &mut Tokenizer) -> State {
tokenizer.consume();
// Do not form containers.
tokenizer.concrete = true;
- State::Next(StateName::HtmlFlowContinuation)
+ State::Next(Name::HtmlFlowContinuation)
} else {
tokenizer.tokenize_state.marker = 0;
State::Nok
@@ -400,9 +401,9 @@ pub fn complete_closing_tag_after(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'\t' | b' ') => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowCompleteClosingTagAfter)
+ State::Next(Name::HtmlFlowCompleteClosingTagAfter)
}
- _ => State::Retry(StateName::HtmlFlowCompleteEnd),
+ _ => State::Retry(Name::HtmlFlowCompleteEnd),
}
}
@@ -429,18 +430,18 @@ pub fn complete_attribute_name_before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'\t' | b' ') => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowCompleteAttributeNameBefore)
+ State::Next(Name::HtmlFlowCompleteAttributeNameBefore)
}
Some(b'/') => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowCompleteEnd)
+ State::Next(Name::HtmlFlowCompleteEnd)
}
// ASCII alphanumerical and `:` and `_`.
Some(b'0'..=b'9' | b':' | b'A'..=b'Z' | b'_' | b'a'..=b'z') => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowCompleteAttributeName)
+ State::Next(Name::HtmlFlowCompleteAttributeName)
}
- _ => State::Retry(StateName::HtmlFlowCompleteEnd),
+ _ => State::Retry(Name::HtmlFlowCompleteEnd),
}
}
@@ -459,9 +460,9 @@ pub fn complete_attribute_name(tokenizer: &mut Tokenizer) -> State {
// ASCII alphanumerical and `-`, `.`, `:`, and `_`.
Some(b'-' | b'.' | b'0'..=b'9' | b':' | b'A'..=b'Z' | b'_' | b'a'..=b'z') => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowCompleteAttributeName)
+ State::Next(Name::HtmlFlowCompleteAttributeName)
}
- _ => State::Retry(StateName::HtmlFlowCompleteAttributeNameAfter),
+ _ => State::Retry(Name::HtmlFlowCompleteAttributeNameAfter),
}
}
@@ -478,13 +479,13 @@ pub fn complete_attribute_name_after(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'\t' | b' ') => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowCompleteAttributeNameAfter)
+ State::Next(Name::HtmlFlowCompleteAttributeNameAfter)
}
Some(b'=') => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowCompleteAttributeValueBefore)
+ State::Next(Name::HtmlFlowCompleteAttributeValueBefore)
}
- _ => State::Retry(StateName::HtmlFlowCompleteAttributeNameBefore),
+ _ => State::Retry(Name::HtmlFlowCompleteAttributeNameBefore),
}
}
@@ -505,14 +506,14 @@ pub fn complete_attribute_value_before(tokenizer: &mut Tokenizer) -> State {
}
Some(b'\t' | b' ') => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowCompleteAttributeValueBefore)
+ State::Next(Name::HtmlFlowCompleteAttributeValueBefore)
}
Some(b'"' | b'\'') => {
tokenizer.tokenize_state.marker_b = tokenizer.current.unwrap();
tokenizer.consume();
- State::Next(StateName::HtmlFlowCompleteAttributeValueQuoted)
+ State::Next(Name::HtmlFlowCompleteAttributeValueQuoted)
}
- _ => State::Retry(StateName::HtmlFlowCompleteAttributeValueUnquoted),
+ _ => State::Retry(Name::HtmlFlowCompleteAttributeValueUnquoted),
}
}
@@ -534,11 +535,11 @@ pub fn complete_attribute_value_quoted(tokenizer: &mut Tokenizer) -> State {
Some(b'"' | b'\'') if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker_b => {
tokenizer.tokenize_state.marker_b = 0;
tokenizer.consume();
- State::Next(StateName::HtmlFlowCompleteAttributeValueQuotedAfter)
+ State::Next(Name::HtmlFlowCompleteAttributeValueQuotedAfter)
}
_ => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowCompleteAttributeValueQuoted)
+ State::Next(Name::HtmlFlowCompleteAttributeValueQuoted)
}
}
}
@@ -552,11 +553,11 @@ pub fn complete_attribute_value_quoted(tokenizer: &mut Tokenizer) -> State {
pub fn complete_attribute_value_unquoted(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\t' | b'\n' | b' ' | b'"' | b'\'' | b'/' | b'<' | b'=' | b'>' | b'`') => {
- State::Retry(StateName::HtmlFlowCompleteAttributeNameAfter)
+ State::Retry(Name::HtmlFlowCompleteAttributeNameAfter)
}
Some(_) => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowCompleteAttributeValueUnquoted)
+ State::Next(Name::HtmlFlowCompleteAttributeValueUnquoted)
}
}
}
@@ -570,7 +571,7 @@ pub fn complete_attribute_value_unquoted(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn complete_attribute_value_quoted_after(tokenizer: &mut Tokenizer) -> State {
if let Some(b'\t' | b' ' | b'/' | b'>') = tokenizer.current {
- State::Retry(StateName::HtmlFlowCompleteAttributeNameBefore)
+ State::Retry(Name::HtmlFlowCompleteAttributeNameBefore)
} else {
tokenizer.tokenize_state.marker = 0;
State::Nok
@@ -586,7 +587,7 @@ pub fn complete_attribute_value_quoted_after(tokenizer: &mut Tokenizer) -> State
pub fn complete_end(tokenizer: &mut Tokenizer) -> State {
if let Some(b'>') = tokenizer.current {
tokenizer.consume();
- State::Next(StateName::HtmlFlowCompleteAfter)
+ State::Next(Name::HtmlFlowCompleteAfter)
} else {
tokenizer.tokenize_state.marker = 0;
State::Nok
@@ -604,11 +605,11 @@ pub fn complete_after(tokenizer: &mut Tokenizer) -> State {
None | Some(b'\n') => {
// Do not form containers.
tokenizer.concrete = true;
- State::Retry(StateName::HtmlFlowContinuation)
+ State::Retry(Name::HtmlFlowContinuation)
}
Some(b'\t' | b' ') => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowCompleteAfter)
+ State::Next(Name::HtmlFlowCompleteAfter)
}
Some(_) => {
tokenizer.tokenize_state.marker = 0;
@@ -631,39 +632,39 @@ pub fn continuation(tokenizer: &mut Tokenizer) -> State {
{
tokenizer.exit(Token::HtmlFlowData);
tokenizer.check(
- StateName::HtmlFlowBlankLineBefore,
- State::Next(StateName::HtmlFlowContinuationAfter),
- State::Next(StateName::HtmlFlowContinuationStart),
+ Name::HtmlFlowBlankLineBefore,
+ State::Next(Name::HtmlFlowContinuationAfter),
+ State::Next(Name::HtmlFlowContinuationStart),
)
}
// Note: important that this is after the basic/complete case.
None | Some(b'\n') => {
tokenizer.exit(Token::HtmlFlowData);
- State::Retry(StateName::HtmlFlowContinuationStart)
+ State::Retry(Name::HtmlFlowContinuationStart)
}
Some(b'-') if tokenizer.tokenize_state.marker == COMMENT => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowContinuationCommentInside)
+ State::Next(Name::HtmlFlowContinuationCommentInside)
}
Some(b'<') if tokenizer.tokenize_state.marker == RAW => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowContinuationRawTagOpen)
+ State::Next(Name::HtmlFlowContinuationRawTagOpen)
}
Some(b'>') if tokenizer.tokenize_state.marker == DECLARATION => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowContinuationClose)
+ State::Next(Name::HtmlFlowContinuationClose)
}
Some(b'?') if tokenizer.tokenize_state.marker == INSTRUCTION => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowContinuationDeclarationInside)
+ State::Next(Name::HtmlFlowContinuationDeclarationInside)
}
Some(b']') if tokenizer.tokenize_state.marker == CDATA => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowContinuationCdataInside)
+ State::Next(Name::HtmlFlowContinuationCdataInside)
}
_ => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowContinuation)
+ State::Next(Name::HtmlFlowContinuation)
}
}
}
@@ -677,9 +678,9 @@ pub fn continuation(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn continuation_start(tokenizer: &mut Tokenizer) -> State {
tokenizer.check(
- StateName::NonLazyContinuationStart,
- State::Next(StateName::HtmlFlowContinuationStartNonLazy),
- State::Next(StateName::HtmlFlowContinuationAfter),
+ Name::NonLazyContinuationStart,
+ State::Next(Name::HtmlFlowContinuationStartNonLazy),
+ State::Next(Name::HtmlFlowContinuationAfter),
)
}
@@ -696,7 +697,7 @@ pub fn continuation_start_non_lazy(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::LineEnding);
tokenizer.consume();
tokenizer.exit(Token::LineEnding);
- State::Next(StateName::HtmlFlowContinuationBefore)
+ State::Next(Name::HtmlFlowContinuationBefore)
}
_ => unreachable!("expected eol"),
}
@@ -711,10 +712,10 @@ pub fn continuation_start_non_lazy(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn continuation_before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
- None | Some(b'\n') => State::Retry(StateName::HtmlFlowContinuationStart),
+ None | Some(b'\n') => State::Retry(Name::HtmlFlowContinuationStart),
_ => {
tokenizer.enter(Token::HtmlFlowData);
- State::Retry(StateName::HtmlFlowContinuation)
+ State::Retry(Name::HtmlFlowContinuation)
}
}
}
@@ -729,9 +730,9 @@ pub fn continuation_comment_inside(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'-') => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowContinuationDeclarationInside)
+ State::Next(Name::HtmlFlowContinuationDeclarationInside)
}
- _ => State::Retry(StateName::HtmlFlowContinuation),
+ _ => State::Retry(Name::HtmlFlowContinuation),
}
}
@@ -746,9 +747,9 @@ pub fn continuation_raw_tag_open(tokenizer: &mut Tokenizer) -> State {
Some(b'/') => {
tokenizer.consume();
tokenizer.tokenize_state.start = tokenizer.point.index;
- State::Next(StateName::HtmlFlowContinuationRawEndTag)
+ State::Next(Name::HtmlFlowContinuationRawEndTag)
}
- _ => State::Retry(StateName::HtmlFlowContinuation),
+ _ => State::Retry(Name::HtmlFlowContinuation),
}
}
@@ -773,20 +774,20 @@ pub fn continuation_raw_end_tag(tokenizer: &mut Tokenizer) -> State {
if HTML_RAW_NAMES.contains(&name.as_str()) {
tokenizer.consume();
- State::Next(StateName::HtmlFlowContinuationClose)
+ State::Next(Name::HtmlFlowContinuationClose)
} else {
- State::Retry(StateName::HtmlFlowContinuation)
+ State::Retry(Name::HtmlFlowContinuation)
}
}
Some(b'A'..=b'Z' | b'a'..=b'z')
if tokenizer.point.index - tokenizer.tokenize_state.start < HTML_RAW_SIZE_MAX =>
{
tokenizer.consume();
- State::Next(StateName::HtmlFlowContinuationRawEndTag)
+ State::Next(Name::HtmlFlowContinuationRawEndTag)
}
_ => {
tokenizer.tokenize_state.start = 0;
- State::Retry(StateName::HtmlFlowContinuation)
+ State::Retry(Name::HtmlFlowContinuation)
}
}
}
@@ -801,9 +802,9 @@ pub fn continuation_cdata_inside(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b']') => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowContinuationDeclarationInside)
+ State::Next(Name::HtmlFlowContinuationDeclarationInside)
}
- _ => State::Retry(StateName::HtmlFlowContinuation),
+ _ => State::Retry(Name::HtmlFlowContinuation),
}
}
@@ -825,13 +826,13 @@ pub fn continuation_declaration_inside(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'>') => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowContinuationClose)
+ State::Next(Name::HtmlFlowContinuationClose)
}
Some(b'-') if tokenizer.tokenize_state.marker == COMMENT => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowContinuationDeclarationInside)
+ State::Next(Name::HtmlFlowContinuationDeclarationInside)
}
- _ => State::Retry(StateName::HtmlFlowContinuation),
+ _ => State::Retry(Name::HtmlFlowContinuation),
}
}
@@ -845,11 +846,11 @@ pub fn continuation_close(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => {
tokenizer.exit(Token::HtmlFlowData);
- State::Retry(StateName::HtmlFlowContinuationAfter)
+ State::Retry(Name::HtmlFlowContinuationAfter)
}
_ => {
tokenizer.consume();
- State::Next(StateName::HtmlFlowContinuationClose)
+ State::Next(Name::HtmlFlowContinuationClose)
}
}
}
@@ -881,5 +882,5 @@ pub fn blank_line_before(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::LineEnding);
tokenizer.consume();
tokenizer.exit(Token::LineEnding);
- State::Next(StateName::BlankLineStart)
+ State::Next(Name::BlankLineStart)
}
diff --git a/src/construct/html_text.rs b/src/construct/html_text.rs
index 7474dbf..dd14e70 100644
--- a/src/construct/html_text.rs
+++ b/src/construct/html_text.rs
@@ -56,8 +56,9 @@
use crate::constant::HTML_CDATA_PREFIX;
use crate::construct::partial_space_or_tab::space_or_tab;
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{State, StateName, Tokenizer};
+use crate::tokenizer::Tokenizer;
/// Start of HTML (text)
///
@@ -70,7 +71,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::HtmlText);
tokenizer.enter(Token::HtmlTextData);
tokenizer.consume();
- State::Next(StateName::HtmlTextOpen)
+ State::Next(Name::HtmlTextOpen)
} else {
State::Nok
}
@@ -90,20 +91,20 @@ pub fn open(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'!') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextDeclarationOpen)
+ State::Next(Name::HtmlTextDeclarationOpen)
}
Some(b'/') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextTagCloseStart)
+ State::Next(Name::HtmlTextTagCloseStart)
}
Some(b'?') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextInstruction)
+ State::Next(Name::HtmlTextInstruction)
}
// ASCII alphabetical.
Some(b'A'..=b'Z' | b'a'..=b'z') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextTagOpen)
+ State::Next(Name::HtmlTextTagOpen)
}
_ => State::Nok,
}
@@ -123,16 +124,16 @@ pub fn declaration_open(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'-') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextCommentOpenInside)
+ State::Next(Name::HtmlTextCommentOpenInside)
}
// ASCII alphabetical.
Some(b'A'..=b'Z' | b'a'..=b'z') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextDeclaration)
+ State::Next(Name::HtmlTextDeclaration)
}
Some(b'[') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextCdataOpenInside)
+ State::Next(Name::HtmlTextCdataOpenInside)
}
_ => State::Nok,
}
@@ -148,7 +149,7 @@ pub fn comment_open_inside(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'-') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextCommentStart)
+ State::Next(Name::HtmlTextCommentStart)
}
_ => State::Nok,
}
@@ -172,9 +173,9 @@ pub fn comment_start(tokenizer: &mut Tokenizer) -> State {
Some(b'>') => State::Nok,
Some(b'-') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextCommentStartDash)
+ State::Next(Name::HtmlTextCommentStartDash)
}
- _ => State::Retry(StateName::HtmlTextComment),
+ _ => State::Retry(Name::HtmlTextComment),
}
}
@@ -194,7 +195,7 @@ pub fn comment_start(tokenizer: &mut Tokenizer) -> State {
pub fn comment_start_dash(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'>') => State::Nok,
- _ => State::Retry(StateName::HtmlTextComment),
+ _ => State::Retry(Name::HtmlTextComment),
}
}
@@ -208,17 +209,17 @@ pub fn comment(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None => State::Nok,
Some(b'\n') => tokenizer.attempt(
- StateName::HtmlTextLineEndingBefore,
- State::Next(StateName::HtmlTextComment),
+ Name::HtmlTextLineEndingBefore,
+ State::Next(Name::HtmlTextComment),
State::Nok,
),
Some(b'-') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextCommentClose)
+ State::Next(Name::HtmlTextCommentClose)
}
_ => {
tokenizer.consume();
- State::Next(StateName::HtmlTextComment)
+ State::Next(Name::HtmlTextComment)
}
}
}
@@ -233,9 +234,9 @@ pub fn comment_close(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'-') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextEnd)
+ State::Next(Name::HtmlTextEnd)
}
- _ => State::Retry(StateName::HtmlTextComment),
+ _ => State::Retry(Name::HtmlTextComment),
}
}
@@ -252,9 +253,9 @@ pub fn cdata_open_inside(tokenizer: &mut Tokenizer) -> State {
if tokenizer.tokenize_state.size == HTML_CDATA_PREFIX.len() {
tokenizer.tokenize_state.size = 0;
- State::Next(StateName::HtmlTextCdata)
+ State::Next(Name::HtmlTextCdata)
} else {
- State::Next(StateName::HtmlTextCdataOpenInside)
+ State::Next(Name::HtmlTextCdataOpenInside)
}
} else {
State::Nok
@@ -271,17 +272,17 @@ pub fn cdata(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None => State::Nok,
Some(b'\n') => tokenizer.attempt(
- StateName::HtmlTextLineEndingBefore,
- State::Next(StateName::HtmlTextCdata),
+ Name::HtmlTextLineEndingBefore,
+ State::Next(Name::HtmlTextCdata),
State::Nok,
),
Some(b']') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextCdataClose)
+ State::Next(Name::HtmlTextCdataClose)
}
_ => {
tokenizer.consume();
- State::Next(StateName::HtmlTextCdata)
+ State::Next(Name::HtmlTextCdata)
}
}
}
@@ -296,9 +297,9 @@ pub fn cdata_close(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b']') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextCdataEnd)
+ State::Next(Name::HtmlTextCdataEnd)
}
- _ => State::Retry(StateName::HtmlTextCdata),
+ _ => State::Retry(Name::HtmlTextCdata),
}
}
@@ -310,9 +311,9 @@ pub fn cdata_close(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn cdata_end(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
- Some(b'>') => State::Retry(StateName::HtmlTextEnd),
- Some(b']') => State::Retry(StateName::HtmlTextCdataClose),
- _ => State::Retry(StateName::HtmlTextCdata),
+ Some(b'>') => State::Retry(Name::HtmlTextEnd),
+ Some(b']') => State::Retry(Name::HtmlTextCdataClose),
+ _ => State::Retry(Name::HtmlTextCdata),
}
}
@@ -324,15 +325,15 @@ pub fn cdata_end(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn declaration(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
- None | Some(b'>') => State::Retry(StateName::HtmlTextEnd),
+ None | Some(b'>') => State::Retry(Name::HtmlTextEnd),
Some(b'\n') => tokenizer.attempt(
- StateName::HtmlTextLineEndingBefore,
- State::Next(StateName::HtmlTextDeclaration),
+ Name::HtmlTextLineEndingBefore,
+ State::Next(Name::HtmlTextDeclaration),
State::Nok,
),
_ => {
tokenizer.consume();
- State::Next(StateName::HtmlTextDeclaration)
+ State::Next(Name::HtmlTextDeclaration)
}
}
}
@@ -347,17 +348,17 @@ pub fn instruction(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None => State::Nok,
Some(b'\n') => tokenizer.attempt(
- StateName::HtmlTextLineEndingBefore,
- State::Next(StateName::HtmlTextInstruction),
+ Name::HtmlTextLineEndingBefore,
+ State::Next(Name::HtmlTextInstruction),
State::Nok,
),
Some(b'?') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextInstructionClose)
+ State::Next(Name::HtmlTextInstructionClose)
}
_ => {
tokenizer.consume();
- State::Next(StateName::HtmlTextInstruction)
+ State::Next(Name::HtmlTextInstruction)
}
}
}
@@ -370,8 +371,8 @@ pub fn instruction(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn instruction_close(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
- Some(b'>') => State::Retry(StateName::HtmlTextEnd),
- _ => State::Retry(StateName::HtmlTextInstruction),
+ Some(b'>') => State::Retry(Name::HtmlTextEnd),
+ _ => State::Retry(Name::HtmlTextInstruction),
}
}
@@ -386,7 +387,7 @@ pub fn tag_close_start(tokenizer: &mut Tokenizer) -> State {
// ASCII alphabetical.
Some(b'A'..=b'Z' | b'a'..=b'z') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextTagClose)
+ State::Next(Name::HtmlTextTagClose)
}
_ => State::Nok,
}
@@ -403,9 +404,9 @@ pub fn tag_close(tokenizer: &mut Tokenizer) -> State {
// ASCII alphanumerical and `-`.
Some(b'-' | b'0'..=b'9' | b'A'..=b'Z' | b'a'..=b'z') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextTagClose)
+ State::Next(Name::HtmlTextTagClose)
}
- _ => State::Retry(StateName::HtmlTextTagCloseBetween),
+ _ => State::Retry(Name::HtmlTextTagCloseBetween),
}
}
@@ -418,15 +419,15 @@ pub fn tag_close(tokenizer: &mut Tokenizer) -> State {
pub fn tag_close_between(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'\n') => tokenizer.attempt(
- StateName::HtmlTextLineEndingBefore,
- State::Next(StateName::HtmlTextTagCloseBetween),
+ Name::HtmlTextLineEndingBefore,
+ State::Next(Name::HtmlTextTagCloseBetween),
State::Nok,
),
Some(b'\t' | b' ') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextTagCloseBetween)
+ State::Next(Name::HtmlTextTagCloseBetween)
}
- _ => State::Retry(StateName::HtmlTextEnd),
+ _ => State::Retry(Name::HtmlTextEnd),
}
}
@@ -441,9 +442,9 @@ pub fn tag_open(tokenizer: &mut Tokenizer) -> State {
// ASCII alphanumerical and `-`.
Some(b'-' | b'0'..=b'9' | b'A'..=b'Z' | b'a'..=b'z') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextTagOpen)
+ State::Next(Name::HtmlTextTagOpen)
}
- Some(b'\t' | b'\n' | b' ' | b'/' | b'>') => State::Retry(StateName::HtmlTextTagOpenBetween),
+ Some(b'\t' | b'\n' | b' ' | b'/' | b'>') => State::Retry(Name::HtmlTextTagOpenBetween),
_ => State::Nok,
}
}
@@ -457,24 +458,24 @@ pub fn tag_open(tokenizer: &mut Tokenizer) -> State {
pub fn tag_open_between(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'\n') => tokenizer.attempt(
- StateName::HtmlTextLineEndingBefore,
- State::Next(StateName::HtmlTextTagOpenBetween),
+ Name::HtmlTextLineEndingBefore,
+ State::Next(Name::HtmlTextTagOpenBetween),
State::Nok,
),
Some(b'\t' | b' ') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextTagOpenBetween)
+ State::Next(Name::HtmlTextTagOpenBetween)
}
Some(b'/') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextEnd)
+ State::Next(Name::HtmlTextEnd)
}
// ASCII alphabetical and `:` and `_`.
Some(b':' | b'A'..=b'Z' | b'_' | b'a'..=b'z') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextTagOpenAttributeName)
+ State::Next(Name::HtmlTextTagOpenAttributeName)
}
- _ => State::Retry(StateName::HtmlTextEnd),
+ _ => State::Retry(Name::HtmlTextEnd),
}
}
@@ -489,9 +490,9 @@ pub fn tag_open_attribute_name(tokenizer: &mut Tokenizer) -> State {
// ASCII alphabetical and `-`, `.`, `:`, and `_`.
Some(b'-' | b'.' | b'0'..=b'9' | b':' | b'A'..=b'Z' | b'_' | b'a'..=b'z') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextTagOpenAttributeName)
+ State::Next(Name::HtmlTextTagOpenAttributeName)
}
- _ => State::Retry(StateName::HtmlTextTagOpenAttributeNameAfter),
+ _ => State::Retry(Name::HtmlTextTagOpenAttributeNameAfter),
}
}
@@ -505,19 +506,19 @@ pub fn tag_open_attribute_name(tokenizer: &mut Tokenizer) -> State {
pub fn tag_open_attribute_name_after(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'\n') => tokenizer.attempt(
- StateName::HtmlTextLineEndingBefore,
- State::Next(StateName::HtmlTextTagOpenAttributeNameAfter),
+ Name::HtmlTextLineEndingBefore,
+ State::Next(Name::HtmlTextTagOpenAttributeNameAfter),
State::Nok,
),
Some(b'\t' | b' ') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextTagOpenAttributeNameAfter)
+ State::Next(Name::HtmlTextTagOpenAttributeNameAfter)
}
Some(b'=') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextTagOpenAttributeValueBefore)
+ State::Next(Name::HtmlTextTagOpenAttributeValueBefore)
}
- _ => State::Retry(StateName::HtmlTextTagOpenBetween),
+ _ => State::Retry(Name::HtmlTextTagOpenBetween),
}
}
@@ -532,22 +533,22 @@ pub fn tag_open_attribute_value_before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'<' | b'=' | b'>' | b'`') => State::Nok,
Some(b'\n') => tokenizer.attempt(
- StateName::HtmlTextLineEndingBefore,
- State::Next(StateName::HtmlTextTagOpenAttributeValueBefore),
+ Name::HtmlTextLineEndingBefore,
+ State::Next(Name::HtmlTextTagOpenAttributeValueBefore),
State::Nok,
),
Some(b'\t' | b' ') => {
tokenizer.consume();
- State::Next(StateName::HtmlTextTagOpenAttributeValueBefore)
+ State::Next(Name::HtmlTextTagOpenAttributeValueBefore)
}
Some(b'"' | b'\'') => {
tokenizer.tokenize_state.marker = tokenizer.current.unwrap();
tokenizer.consume();
- State::Next(StateName::HtmlTextTagOpenAttributeValueQuoted)
+ State::Next(Name::HtmlTextTagOpenAttributeValueQuoted)
}
Some(_) => {
tokenizer.consume();
- State::Next(StateName::HtmlTextTagOpenAttributeValueUnquoted)
+ State::Next(Name::HtmlTextTagOpenAttributeValueUnquoted)
}
}
}
@@ -565,18 +566,18 @@ pub fn tag_open_attribute_value_quoted(tokenizer: &mut Tokenizer) -> State {
State::Nok
}
Some(b'\n') => tokenizer.attempt(
- StateName::HtmlTextLineEndingBefore,
- State::Next(StateName::HtmlTextTagOpenAttributeValueQuoted),
+ Name::HtmlTextLineEndingBefore,
+ State::Next(Name::HtmlTextTagOpenAttributeValueQuoted),
State::Nok,
),
Some(b'"' | b'\'') if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker => {
tokenizer.tokenize_state.marker = 0;
tokenizer.consume();
- State::Next(StateName::HtmlTextTagOpenAttributeValueQuotedAfter)
+ State::Next(Name::HtmlTextTagOpenAttributeValueQuotedAfter)
}
_ => {
tokenizer.consume();
- State::Next(StateName::HtmlTextTagOpenAttributeValueQuoted)
+ State::Next(Name::HtmlTextTagOpenAttributeValueQuoted)
}
}
}
@@ -590,10 +591,10 @@ pub fn tag_open_attribute_value_quoted(tokenizer: &mut Tokenizer) -> State {
pub fn tag_open_attribute_value_unquoted(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'"' | b'\'' | b'<' | b'=' | b'`') => State::Nok,
- Some(b'\t' | b'\n' | b' ' | b'/' | b'>') => State::Retry(StateName::HtmlTextTagOpenBetween),
+ Some(b'\t' | b'\n' | b' ' | b'/' | b'>') => State::Retry(Name::HtmlTextTagOpenBetween),
Some(_) => {
tokenizer.consume();
- State::Next(StateName::HtmlTextTagOpenAttributeValueUnquoted)
+ State::Next(Name::HtmlTextTagOpenAttributeValueUnquoted)
}
}
}
@@ -607,7 +608,7 @@ pub fn tag_open_attribute_value_unquoted(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn tag_open_attribute_value_quoted_after(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
- Some(b'\t' | b'\n' | b' ' | b'>' | b'/') => State::Retry(StateName::HtmlTextTagOpenBetween),
+ Some(b'\t' | b'\n' | b' ' | b'>' | b'/') => State::Retry(Name::HtmlTextTagOpenBetween),
_ => State::Nok,
}
}
@@ -647,7 +648,7 @@ pub fn line_ending_before(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::LineEnding);
tokenizer.consume();
tokenizer.exit(Token::LineEnding);
- State::Next(StateName::HtmlTextLineEndingAfter)
+ State::Next(Name::HtmlTextLineEndingAfter)
}
_ => unreachable!("expected eol"),
}
@@ -667,8 +668,8 @@ pub fn line_ending_after(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::HtmlTextLineEndingAfterPrefix),
- State::Next(StateName::HtmlTextLineEndingAfterPrefix),
+ State::Next(Name::HtmlTextLineEndingAfterPrefix),
+ State::Next(Name::HtmlTextLineEndingAfterPrefix),
)
}
diff --git a/src/construct/label_end.rs b/src/construct/label_end.rs
index a25f917..0607077 100644
--- a/src/construct/label_end.rs
+++ b/src/construct/label_end.rs
@@ -148,8 +148,9 @@
use crate::constant::RESOURCE_DESTINATION_BALANCE_MAX;
use crate::construct::partial_space_or_tab::space_or_tab_eol;
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{Event, EventType, Media, State, StateName, Tokenizer};
+use crate::tokenizer::{Event, EventType, Media, Tokenizer};
use crate::util::{
normalize_identifier::normalize_identifier,
skip,
@@ -194,7 +195,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
// Mark as balanced if the info is inactive.
if label_start.inactive {
- return State::Retry(StateName::LabelEndNok);
+ return State::Retry(Name::LabelEndNok);
}
tokenizer.enter(Token::LabelEnd);
@@ -202,7 +203,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.consume();
tokenizer.exit(Token::LabelMarker);
tokenizer.exit(Token::LabelEnd);
- return State::Next(StateName::LabelEndAfter);
+ return State::Next(Name::LabelEndAfter);
}
}
@@ -239,29 +240,29 @@ pub fn after(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
// Resource (`[asd](fgh)`)?
Some(b'(') => tokenizer.attempt(
- StateName::LabelEndResourceStart,
- State::Next(StateName::LabelEndOk),
+ Name::LabelEndResourceStart,
+ State::Next(Name::LabelEndOk),
State::Next(if defined {
- StateName::LabelEndOk
+ Name::LabelEndOk
} else {
- StateName::LabelEndNok
+ Name::LabelEndNok
}),
),
// Full (`[asd][fgh]`) or collapsed (`[asd][]`) reference?
Some(b'[') => tokenizer.attempt(
- StateName::LabelEndReferenceFull,
- State::Next(StateName::LabelEndOk),
+ Name::LabelEndReferenceFull,
+ State::Next(Name::LabelEndOk),
State::Next(if defined {
- StateName::LabelEndReferenceNotFull
+ Name::LabelEndReferenceNotFull
} else {
- StateName::LabelEndNok
+ Name::LabelEndNok
}),
),
// Shortcut (`[asd]`) reference?
_ => State::Retry(if defined {
- StateName::LabelEndOk
+ Name::LabelEndOk
} else {
- StateName::LabelEndNok
+ Name::LabelEndNok
}),
}
}
@@ -278,9 +279,9 @@ pub fn after(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn reference_not_full(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
- StateName::LabelEndReferenceCollapsed,
- State::Next(StateName::LabelEndOk),
- State::Next(StateName::LabelEndNok),
+ Name::LabelEndReferenceCollapsed,
+ State::Next(Name::LabelEndOk),
+ State::Next(Name::LabelEndNok),
)
}
@@ -370,7 +371,7 @@ pub fn resource_start(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::ResourceMarker);
tokenizer.consume();
tokenizer.exit(Token::ResourceMarker);
- State::Next(StateName::LabelEndResourceBefore)
+ State::Next(Name::LabelEndResourceBefore)
}
_ => unreachable!("expected `(`"),
}
@@ -386,8 +387,8 @@ pub fn resource_before(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab_eol(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::LabelEndResourceOpen),
- State::Next(StateName::LabelEndResourceOpen),
+ State::Next(Name::LabelEndResourceOpen),
+ State::Next(Name::LabelEndResourceOpen),
)
}
@@ -399,7 +400,7 @@ pub fn resource_before(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn resource_open(tokenizer: &mut Tokenizer) -> State {
if let Some(b')') = tokenizer.current {
- State::Retry(StateName::LabelEndResourceEnd)
+ State::Retry(Name::LabelEndResourceEnd)
} else {
tokenizer.tokenize_state.token_1 = Token::ResourceDestination;
tokenizer.tokenize_state.token_2 = Token::ResourceDestinationLiteral;
@@ -409,9 +410,9 @@ pub fn resource_open(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.size_b = RESOURCE_DESTINATION_BALANCE_MAX;
tokenizer.attempt(
- StateName::DestinationStart,
- State::Next(StateName::LabelEndResourceDestinationAfter),
- State::Next(StateName::LabelEndResourceDestinationMissing),
+ Name::DestinationStart,
+ State::Next(Name::LabelEndResourceDestinationAfter),
+ State::Next(Name::LabelEndResourceDestinationMissing),
)
}
}
@@ -432,8 +433,8 @@ pub fn resource_destination_after(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab_eol(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::LabelEndResourceBetween),
- State::Next(StateName::LabelEndResourceEnd),
+ State::Next(Name::LabelEndResourceBetween),
+ State::Next(Name::LabelEndResourceEnd),
)
}
@@ -461,12 +462,12 @@ pub fn resource_between(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_2 = Token::ResourceTitleMarker;
tokenizer.tokenize_state.token_3 = Token::ResourceTitleString;
tokenizer.attempt(
- StateName::TitleStart,
- State::Next(StateName::LabelEndResourceTitleAfter),
+ Name::TitleStart,
+ State::Next(Name::LabelEndResourceTitleAfter),
State::Nok,
)
}
- _ => State::Retry(StateName::LabelEndResourceEnd),
+ _ => State::Retry(Name::LabelEndResourceEnd),
}
}
@@ -483,8 +484,8 @@ pub fn resource_title_after(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab_eol(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::LabelEndResourceEnd),
- State::Next(StateName::LabelEndResourceEnd),
+ State::Next(Name::LabelEndResourceEnd),
+ State::Next(Name::LabelEndResourceEnd),
)
}
@@ -520,8 +521,8 @@ pub fn reference_full(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.token_2 = Token::ReferenceMarker;
tokenizer.tokenize_state.token_3 = Token::ReferenceString;
tokenizer.attempt(
- StateName::LabelStart,
- State::Next(StateName::LabelEndReferenceFullAfter),
+ Name::LabelStart,
+ State::Next(Name::LabelEndReferenceFullAfter),
State::Nok,
)
}
@@ -580,7 +581,7 @@ pub fn reference_collapsed(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::ReferenceMarker);
tokenizer.consume();
tokenizer.exit(Token::ReferenceMarker);
- State::Next(StateName::LabelEndReferenceCollapsedOpen)
+ State::Next(Name::LabelEndReferenceCollapsedOpen)
}
_ => State::Nok,
}
diff --git a/src/construct/label_start_image.rs b/src/construct/label_start_image.rs
index 629e836..7703ba4 100644
--- a/src/construct/label_start_image.rs
+++ b/src/construct/label_start_image.rs
@@ -29,8 +29,9 @@
//! [html-img]: https://html.spec.whatwg.org/multipage/embedded-content.html#the-img-element
use super::label_end::resolve_media;
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{LabelStart, State, StateName, Tokenizer};
+use crate::tokenizer::{LabelStart, Tokenizer};
/// Start of label (image) start.
///
@@ -45,7 +46,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::LabelImageMarker);
tokenizer.consume();
tokenizer.exit(Token::LabelImageMarker);
- State::Next(StateName::LabelStartImageOpen)
+ State::Next(Name::LabelStartImageOpen)
}
_ => State::Nok,
}
diff --git a/src/construct/label_start_link.rs b/src/construct/label_start_link.rs
index 6eb7b40..3ca51bf 100644
--- a/src/construct/label_start_link.rs
+++ b/src/construct/label_start_link.rs
@@ -28,8 +28,9 @@
//! [html-a]: https://html.spec.whatwg.org/multipage/text-level-semantics.html#the-a-element
use super::label_end::resolve_media;
+use crate::state::State;
use crate::token::Token;
-use crate::tokenizer::{LabelStart, State, Tokenizer};
+use crate::tokenizer::{LabelStart, Tokenizer};
/// Start of label (link) start.
///
diff --git a/src/construct/list.rs b/src/construct/list.rs
index d726c73..516cec7 100644
--- a/src/construct/list.rs
+++ b/src/construct/list.rs
@@ -46,8 +46,9 @@
use crate::constant::{LIST_ITEM_VALUE_SIZE_MAX, TAB_SIZE};
use crate::construct::partial_space_or_tab::space_or_tab_min_max;
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{EventType, State, StateName, Tokenizer};
+use crate::tokenizer::{EventType, Tokenizer};
use crate::util::{
skip,
slice::{Position, Slice},
@@ -71,7 +72,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
usize::MAX
},
);
- tokenizer.attempt(name, State::Next(StateName::ListBefore), State::Nok)
+ tokenizer.attempt(name, State::Next(Name::ListBefore), State::Nok)
} else {
State::Nok
}
@@ -87,14 +88,14 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
// Unordered.
Some(b'*' | b'-') => tokenizer.check(
- StateName::ThematicBreakStart,
- State::Next(StateName::ListNok),
- State::Next(StateName::ListBeforeUnordered),
+ Name::ThematicBreakStart,
+ State::Next(Name::ListNok),
+ State::Next(Name::ListBeforeUnordered),
),
- Some(b'+') => State::Retry(StateName::ListBeforeUnordered),
+ Some(b'+') => State::Retry(Name::ListBeforeUnordered),
// Ordered.
- Some(b'0'..=b'9') if !tokenizer.interrupt => State::Retry(StateName::ListBeforeOrdered),
- Some(b'1') => State::Retry(StateName::ListBeforeOrdered),
+ Some(b'0'..=b'9') if !tokenizer.interrupt => State::Retry(Name::ListBeforeOrdered),
+ Some(b'1') => State::Retry(Name::ListBeforeOrdered),
_ => State::Nok,
}
}
@@ -109,7 +110,7 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn before_unordered(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::ListItemPrefix);
- State::Retry(StateName::ListMarker)
+ State::Retry(Name::ListMarker)
}
/// Start of an ordered list item.
@@ -121,7 +122,7 @@ pub fn before_unordered(tokenizer: &mut Tokenizer) -> State {
pub fn before_ordered(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::ListItemPrefix);
tokenizer.enter(Token::ListItemValue);
- State::Retry(StateName::ListValue)
+ State::Retry(Name::ListValue)
}
/// In an ordered list item value.
@@ -134,12 +135,12 @@ pub fn value(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'.' | b')') if !tokenizer.interrupt || tokenizer.tokenize_state.size < 2 => {
tokenizer.exit(Token::ListItemValue);
- State::Retry(StateName::ListMarker)
+ State::Retry(Name::ListMarker)
}
Some(b'0'..=b'9') if tokenizer.tokenize_state.size + 1 < LIST_ITEM_VALUE_SIZE_MAX => {
tokenizer.tokenize_state.size += 1;
tokenizer.consume();
- State::Next(StateName::ListValue)
+ State::Next(Name::ListValue)
}
_ => {
tokenizer.tokenize_state.size = 0;
@@ -160,7 +161,7 @@ pub fn marker(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::ListItemMarker);
tokenizer.consume();
tokenizer.exit(Token::ListItemMarker);
- State::Next(StateName::ListMarkerAfter)
+ State::Next(Name::ListMarkerAfter)
}
/// After a list item marker.
@@ -174,9 +175,9 @@ pub fn marker(tokenizer: &mut Tokenizer) -> State {
pub fn marker_after(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.size = 1;
tokenizer.check(
- StateName::BlankLineStart,
- State::Next(StateName::ListAfter),
- State::Next(StateName::ListMarkerAfterFilled),
+ Name::BlankLineStart,
+ State::Next(Name::ListAfter),
+ State::Next(Name::ListMarkerAfterFilled),
)
}
@@ -191,9 +192,9 @@ pub fn marker_after_filled(tokenizer: &mut Tokenizer) -> State {
// Attempt to parse up to the largest allowed indent, `nok` if there is more whitespace.
tokenizer.attempt(
- StateName::ListWhitespace,
- State::Next(StateName::ListAfter),
- State::Next(StateName::ListPrefixOther),
+ Name::ListWhitespace,
+ State::Next(Name::ListAfter),
+ State::Next(Name::ListPrefixOther),
)
}
@@ -205,11 +206,7 @@ pub fn marker_after_filled(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn whitespace(tokenizer: &mut Tokenizer) -> State {
let name = space_or_tab_min_max(tokenizer, 1, TAB_SIZE);
- tokenizer.attempt(
- name,
- State::Next(StateName::ListWhitespaceAfter),
- State::Nok,
- )
+ tokenizer.attempt(name, State::Next(Name::ListWhitespaceAfter), State::Nok)
}
/// After acceptable whitespace.
@@ -238,7 +235,7 @@ pub fn prefix_other(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::SpaceOrTab);
tokenizer.consume();
tokenizer.exit(Token::SpaceOrTab);
- State::Next(StateName::ListAfter)
+ State::Next(Name::ListAfter)
}
_ => State::Nok,
}
@@ -296,9 +293,9 @@ pub fn after(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn cont_start(tokenizer: &mut Tokenizer) -> State {
tokenizer.check(
- StateName::BlankLineStart,
- State::Next(StateName::ListContBlank),
- State::Next(StateName::ListContFilled),
+ Name::BlankLineStart,
+ State::Next(Name::ListContBlank),
+ State::Next(Name::ListContFilled),
)
}
@@ -320,7 +317,7 @@ pub fn cont_blank(tokenizer: &mut Tokenizer) -> State {
} else {
let name = space_or_tab_min_max(tokenizer, 0, size);
// Consume, optionally, at most `size`.
- tokenizer.attempt(name, State::Next(StateName::ListOk), State::Nok)
+ tokenizer.attempt(name, State::Next(Name::ListOk), State::Nok)
}
}
@@ -340,7 +337,7 @@ pub fn cont_filled(tokenizer: &mut Tokenizer) -> State {
// Consume exactly `size`.
let name = space_or_tab_min_max(tokenizer, size, size);
- tokenizer.attempt(name, State::Next(StateName::ListOk), State::Nok)
+ tokenizer.attempt(name, State::Next(Name::ListOk), State::Nok)
}
/// A state fn to yield [`State::Ok`].
diff --git a/src/construct/paragraph.rs b/src/construct/paragraph.rs
index bac4369..dec25b8 100644
--- a/src/construct/paragraph.rs
+++ b/src/construct/paragraph.rs
@@ -32,8 +32,9 @@
//! [code_text]: crate::construct::code_text
//! [html]: https://html.spec.whatwg.org/multipage/grouping-content.html#the-p-element
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{ContentType, EventType, State, StateName, Tokenizer};
+use crate::tokenizer::{ContentType, EventType, Tokenizer};
use crate::util::skip::opt as skip_opt;
/// Before a paragraph.
@@ -48,7 +49,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
_ => {
tokenizer.enter(Token::Paragraph);
tokenizer.enter_with_content(Token::Data, Some(ContentType::Text));
- State::Retry(StateName::ParagraphInside)
+ State::Retry(Name::ParagraphInside)
}
}
}
@@ -71,7 +72,7 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
}
_ => {
tokenizer.consume();
- State::Next(StateName::ParagraphInside)
+ State::Next(Name::ParagraphInside)
}
}
}
diff --git a/src/construct/partial_bom.rs b/src/construct/partial_bom.rs
index d20c2c7..cca0770 100644
--- a/src/construct/partial_bom.rs
+++ b/src/construct/partial_bom.rs
@@ -10,8 +10,9 @@
//!
//! * [`micromark/lib/preprocess.js` in `micromark`](https://github.com/micromark/micromark/blob/ed23453/packages/micromark/dev/lib/preprocess.js#L54-L60)
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{State, StateName, Tokenizer};
+use crate::tokenizer::Tokenizer;
const BOM: [u8; 3] = [0xEF, 0xBB, 0xBF];
@@ -24,7 +25,7 @@ const BOM: [u8; 3] = [0xEF, 0xBB, 0xBF];
pub fn start(tokenizer: &mut Tokenizer) -> State {
if tokenizer.current == Some(BOM[0]) {
tokenizer.enter(Token::ByteOrderMark);
- State::Retry(StateName::BomInside)
+ State::Retry(Name::BomInside)
} else {
State::Nok
}
@@ -45,7 +46,7 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.size = 0;
State::Ok
} else {
- State::Next(StateName::BomInside)
+ State::Next(Name::BomInside)
}
} else {
tokenizer.tokenize_state.size = 0;
diff --git a/src/construct/partial_data.rs b/src/construct/partial_data.rs
index 0ad67c5..c05aaa5 100644
--- a/src/construct/partial_data.rs
+++ b/src/construct/partial_data.rs
@@ -6,8 +6,9 @@
//! [string]: crate::content::string
//! [text]: crate::content::text
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{EventType, State, StateName, Tokenizer};
+use crate::tokenizer::{EventType, Tokenizer};
/// At the beginning of data.
///
@@ -21,9 +22,9 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
Some(byte) if tokenizer.tokenize_state.markers.contains(&byte) => {
tokenizer.enter(Token::Data);
tokenizer.consume();
- State::Next(StateName::DataInside)
+ State::Next(Name::DataInside)
}
- _ => State::Retry(StateName::DataAtBreak),
+ _ => State::Retry(Name::DataAtBreak),
}
}
@@ -40,7 +41,7 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::LineEnding);
tokenizer.consume();
tokenizer.exit(Token::LineEnding);
- State::Next(StateName::DataAtBreak)
+ State::Next(Name::DataAtBreak)
}
Some(byte) if tokenizer.tokenize_state.markers.contains(&byte) => {
tokenizer.register_resolver_before("data".to_string(), Box::new(resolve_data));
@@ -48,7 +49,7 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
}
_ => {
tokenizer.enter(Token::Data);
- State::Retry(StateName::DataInside)
+ State::Retry(Name::DataInside)
}
}
}
@@ -68,10 +69,10 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
if done {
tokenizer.exit(Token::Data);
- State::Retry(StateName::DataAtBreak)
+ State::Retry(Name::DataAtBreak)
} else {
tokenizer.consume();
- State::Next(StateName::DataInside)
+ State::Next(Name::DataInside)
}
}
diff --git a/src/construct/partial_destination.rs b/src/construct/partial_destination.rs
index 735fb38..5aa0539 100644
--- a/src/construct/partial_destination.rs
+++ b/src/construct/partial_destination.rs
@@ -71,8 +71,9 @@
//! [label_end]: crate::construct::label_end
//! [sanitize_uri]: crate::util::sanitize_uri
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{ContentType, State, StateName, Tokenizer};
+use crate::tokenizer::{ContentType, Tokenizer};
/// Before a destination.
///
@@ -90,7 +91,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(tokenizer.tokenize_state.token_3.clone());
tokenizer.consume();
tokenizer.exit(tokenizer.tokenize_state.token_3.clone());
- State::Next(StateName::DestinationEnclosedBefore)
+ State::Next(Name::DestinationEnclosedBefore)
}
// ASCII control, space, closing paren, but *not* `\0`.
None | Some(0x01..=0x1F | b' ' | b')' | 0x7F) => State::Nok,
@@ -99,7 +100,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(tokenizer.tokenize_state.token_4.clone());
tokenizer.enter(tokenizer.tokenize_state.token_5.clone());
tokenizer.enter_with_content(Token::Data, Some(ContentType::String));
- State::Retry(StateName::DestinationRaw)
+ State::Retry(Name::DestinationRaw)
}
}
}
@@ -121,7 +122,7 @@ pub fn enclosed_before(tokenizer: &mut Tokenizer) -> State {
} else {
tokenizer.enter(tokenizer.tokenize_state.token_5.clone());
tokenizer.enter_with_content(Token::Data, Some(ContentType::String));
- State::Retry(StateName::DestinationEnclosed)
+ State::Retry(Name::DestinationEnclosed)
}
}
@@ -137,15 +138,15 @@ pub fn enclosed(tokenizer: &mut Tokenizer) -> State {
Some(b'>') => {
tokenizer.exit(Token::Data);
tokenizer.exit(tokenizer.tokenize_state.token_5.clone());
- State::Retry(StateName::DestinationEnclosedBefore)
+ State::Retry(Name::DestinationEnclosedBefore)
}
Some(b'\\') => {
tokenizer.consume();
- State::Next(StateName::DestinationEnclosedEscape)
+ State::Next(Name::DestinationEnclosedEscape)
}
_ => {
tokenizer.consume();
- State::Next(StateName::DestinationEnclosed)
+ State::Next(Name::DestinationEnclosed)
}
}
}
@@ -160,9 +161,9 @@ pub fn enclosed_escape(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'<' | b'>' | b'\\') => {
tokenizer.consume();
- State::Next(StateName::DestinationEnclosed)
+ State::Next(Name::DestinationEnclosed)
}
- _ => State::Retry(StateName::DestinationEnclosed),
+ _ => State::Retry(Name::DestinationEnclosed),
}
}
@@ -185,7 +186,7 @@ pub fn raw(tokenizer: &mut Tokenizer) -> State {
Some(b'(') if tokenizer.tokenize_state.size < tokenizer.tokenize_state.size_b => {
tokenizer.consume();
tokenizer.tokenize_state.size += 1;
- State::Next(StateName::DestinationRaw)
+ State::Next(Name::DestinationRaw)
}
// ASCII control (but *not* `\0`) and space and `(`.
None | Some(0x01..=0x1F | b' ' | b'(' | 0x7F) => {
@@ -195,15 +196,15 @@ pub fn raw(tokenizer: &mut Tokenizer) -> State {
Some(b')') => {
tokenizer.consume();
tokenizer.tokenize_state.size -= 1;
- State::Next(StateName::DestinationRaw)
+ State::Next(Name::DestinationRaw)
}
Some(b'\\') => {
tokenizer.consume();
- State::Next(StateName::DestinationRawEscape)
+ State::Next(Name::DestinationRawEscape)
}
Some(_) => {
tokenizer.consume();
- State::Next(StateName::DestinationRaw)
+ State::Next(Name::DestinationRaw)
}
}
}
@@ -218,8 +219,8 @@ pub fn raw_escape(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'(' | b')' | b'\\') => {
tokenizer.consume();
- State::Next(StateName::DestinationRaw)
+ State::Next(Name::DestinationRaw)
}
- _ => State::Retry(StateName::DestinationRaw),
+ _ => State::Retry(Name::DestinationRaw),
}
}
diff --git a/src/construct/partial_label.rs b/src/construct/partial_label.rs
index 6447961..6e7c843 100644
--- a/src/construct/partial_label.rs
+++ b/src/construct/partial_label.rs
@@ -60,9 +60,10 @@
use super::partial_space_or_tab::{space_or_tab_eol_with_options, EolOptions};
use crate::constant::LINK_REFERENCE_SIZE_MAX;
+use crate::state::{Name, State};
use crate::subtokenize::link;
use crate::token::Token;
-use crate::tokenizer::{ContentType, State, StateName, Tokenizer};
+use crate::tokenizer::{ContentType, Tokenizer};
/// Before a label.
///
@@ -78,7 +79,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.consume();
tokenizer.exit(tokenizer.tokenize_state.token_2.clone());
tokenizer.enter(tokenizer.tokenize_state.token_3.clone());
- State::Next(StateName::LabelAtBreak)
+ State::Next(Name::LabelAtBreak)
}
_ => State::Nok,
}
@@ -111,8 +112,8 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
);
tokenizer.attempt(
name,
- State::Next(StateName::LabelEolAfter),
- State::Next(StateName::LabelAtBlankLine),
+ State::Next(Name::LabelEolAfter),
+ State::Next(Name::LabelAtBlankLine),
)
}
Some(b']') => {
@@ -136,7 +137,7 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.connect = true;
}
- State::Retry(StateName::LabelInside)
+ State::Retry(Name::LabelInside)
}
}
}
@@ -151,7 +152,7 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn eol_after(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.connect = true;
- State::Retry(StateName::LabelAtBreak)
+ State::Retry(Name::LabelAtBreak)
}
/// In a label, at a blank line.
@@ -178,12 +179,12 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n' | b'[' | b']') => {
tokenizer.exit(Token::Data);
- State::Retry(StateName::LabelAtBreak)
+ State::Retry(Name::LabelAtBreak)
}
Some(byte) => {
if tokenizer.tokenize_state.size > LINK_REFERENCE_SIZE_MAX {
tokenizer.exit(Token::Data);
- State::Retry(StateName::LabelAtBreak)
+ State::Retry(Name::LabelAtBreak)
} else {
tokenizer.consume();
tokenizer.tokenize_state.size += 1;
@@ -191,9 +192,9 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.seen = true;
}
State::Next(if matches!(byte, b'\\') {
- StateName::LabelEscape
+ Name::LabelEscape
} else {
- StateName::LabelInside
+ Name::LabelInside
})
}
}
@@ -211,8 +212,8 @@ pub fn escape(tokenizer: &mut Tokenizer) -> State {
Some(b'[' | b'\\' | b']') => {
tokenizer.consume();
tokenizer.tokenize_state.size += 1;
- State::Next(StateName::LabelInside)
+ State::Next(Name::LabelInside)
}
- _ => State::Retry(StateName::LabelInside),
+ _ => State::Retry(Name::LabelInside),
}
}
diff --git a/src/construct/partial_non_lazy_continuation.rs b/src/construct/partial_non_lazy_continuation.rs
index 9d19860..497c81e 100644
--- a/src/construct/partial_non_lazy_continuation.rs
+++ b/src/construct/partial_non_lazy_continuation.rs
@@ -10,8 +10,9 @@
//! [code_indented]: crate::construct::code_indented
//! [html_flow]: crate::construct::html_flow
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{State, StateName, Tokenizer};
+use crate::tokenizer::Tokenizer;
/// Start of continuation.
///
@@ -26,7 +27,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::LineEnding);
tokenizer.consume();
tokenizer.exit(Token::LineEnding);
- State::Next(StateName::NonLazyContinuationAfter)
+ State::Next(Name::NonLazyContinuationAfter)
}
_ => State::Nok,
}
diff --git a/src/construct/partial_space_or_tab.rs b/src/construct/partial_space_or_tab.rs
index 5e8c212..a8e8f06 100644
--- a/src/construct/partial_space_or_tab.rs
+++ b/src/construct/partial_space_or_tab.rs
@@ -4,9 +4,10 @@
//!
//! * [`micromark-factory-space/index.js` in `micromark`](https://github.com/micromark/micromark/blob/main/packages/micromark-factory-space/dev/index.js)
+use crate::state::{Name, State};
use crate::subtokenize::link;
use crate::token::Token;
-use crate::tokenizer::{ContentType, State, StateName, Tokenizer};
+use crate::tokenizer::{ContentType, Tokenizer};
/// Options to parse `space_or_tab`.
#[derive(Debug)]
@@ -37,7 +38,7 @@ pub struct EolOptions {
/// ```bnf
/// space_or_tab ::= 1*( ' ' '\t' )
/// ```
-pub fn space_or_tab(tokenizer: &mut Tokenizer) -> StateName {
+pub fn space_or_tab(tokenizer: &mut Tokenizer) -> Name {
space_or_tab_min_max(tokenizer, 1, usize::MAX)
}
@@ -46,7 +47,7 @@ pub fn space_or_tab(tokenizer: &mut Tokenizer) -> StateName {
/// ```bnf
/// space_or_tab_min_max ::= x*y( ' ' '\t' )
/// ```
-pub fn space_or_tab_min_max(tokenizer: &mut Tokenizer, min: usize, max: usize) -> StateName {
+pub fn space_or_tab_min_max(tokenizer: &mut Tokenizer, min: usize, max: usize) -> Name {
space_or_tab_with_options(
tokenizer,
Options {
@@ -60,13 +61,13 @@ pub fn space_or_tab_min_max(tokenizer: &mut Tokenizer, min: usize, max: usize) -
}
/// `space_or_tab`, with the given options.
-pub fn space_or_tab_with_options(tokenizer: &mut Tokenizer, options: Options) -> StateName {
+pub fn space_or_tab_with_options(tokenizer: &mut Tokenizer, options: Options) -> Name {
tokenizer.tokenize_state.space_or_tab_connect = options.connect;
tokenizer.tokenize_state.space_or_tab_content_type = options.content_type;
tokenizer.tokenize_state.space_or_tab_min = options.min;
tokenizer.tokenize_state.space_or_tab_max = options.max;
tokenizer.tokenize_state.space_or_tab_token = options.kind;
- StateName::SpaceOrTabStart
+ Name::SpaceOrTabStart
}
/// `space_or_tab`, or optionally `space_or_tab`, one `eol`, and
@@ -75,7 +76,7 @@ pub fn space_or_tab_with_options(tokenizer: &mut Tokenizer, options: Options) ->
/// ```bnf
/// space_or_tab_eol ::= 1*( ' ' '\t' ) | 0*( ' ' '\t' ) eol 0*( ' ' '\t' )
/// ```
-pub fn space_or_tab_eol(tokenizer: &mut Tokenizer) -> StateName {
+pub fn space_or_tab_eol(tokenizer: &mut Tokenizer) -> Name {
space_or_tab_eol_with_options(
tokenizer,
EolOptions {
@@ -86,10 +87,10 @@ pub fn space_or_tab_eol(tokenizer: &mut Tokenizer) -> StateName {
}
/// `space_or_tab_eol`, with the given options.
-pub fn space_or_tab_eol_with_options(tokenizer: &mut Tokenizer, options: EolOptions) -> StateName {
+pub fn space_or_tab_eol_with_options(tokenizer: &mut Tokenizer, options: EolOptions) -> Name {
tokenizer.tokenize_state.space_or_tab_eol_content_type = options.content_type;
tokenizer.tokenize_state.space_or_tab_eol_connect = options.connect;
- StateName::SpaceOrTabEolStart
+ Name::SpaceOrTabEolStart
}
/// Before `space_or_tab`.
@@ -113,9 +114,9 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.space_or_tab_connect = true;
}
- State::Retry(StateName::SpaceOrTabInside)
+ State::Retry(Name::SpaceOrTabInside)
}
- _ => State::Retry(StateName::SpaceOrTabAfter),
+ _ => State::Retry(Name::SpaceOrTabAfter),
}
}
@@ -133,11 +134,11 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
{
tokenizer.consume();
tokenizer.tokenize_state.space_or_tab_size += 1;
- State::Next(StateName::SpaceOrTabInside)
+ State::Next(Name::SpaceOrTabInside)
}
_ => {
tokenizer.exit(tokenizer.tokenize_state.space_or_tab_token.clone());
- State::Retry(StateName::SpaceOrTabAfter)
+ State::Retry(Name::SpaceOrTabAfter)
}
}
}
@@ -182,8 +183,8 @@ pub fn eol_start(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
name,
- State::Next(StateName::SpaceOrTabEolAfterFirst),
- State::Next(StateName::SpaceOrTabEolAtEol),
+ State::Next(Name::SpaceOrTabEolAfterFirst),
+ State::Next(Name::SpaceOrTabEolAtEol),
)
}
@@ -198,7 +199,7 @@ pub fn eol_after_first(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.space_or_tab_eol_connect = true;
}
- State::Retry(StateName::SpaceOrTabEolAtEol)
+ State::Retry(Name::SpaceOrTabEolAtEol)
}
/// `space_or_tab_eol`: after optionally first `space_or_tab`.
@@ -231,7 +232,7 @@ pub fn eol_at_eol(tokenizer: &mut Tokenizer) -> State {
tokenizer.consume();
tokenizer.exit(Token::LineEnding);
- State::Next(StateName::SpaceOrTabEolAfterEol)
+ State::Next(Name::SpaceOrTabEolAfterEol)
} else {
let ok = tokenizer.tokenize_state.space_or_tab_eol_ok;
tokenizer.tokenize_state.space_or_tab_eol_content_type = None;
@@ -269,8 +270,8 @@ pub fn eol_after_eol(tokenizer: &mut Tokenizer) -> State {
);
tokenizer.attempt(
name,
- State::Next(StateName::SpaceOrTabEolAfterMore),
- State::Next(StateName::SpaceOrTabEolAfterMore),
+ State::Next(Name::SpaceOrTabEolAfterMore),
+ State::Next(Name::SpaceOrTabEolAfterMore),
)
}
diff --git a/src/construct/partial_title.rs b/src/construct/partial_title.rs
index 209240e..11c28bd 100644
--- a/src/construct/partial_title.rs
+++ b/src/construct/partial_title.rs
@@ -31,9 +31,10 @@
//! [label_end]: crate::construct::label_end
use crate::construct::partial_space_or_tab::{space_or_tab_eol_with_options, EolOptions};
+use crate::state::{Name, State};
use crate::subtokenize::link;
use crate::token::Token;
-use crate::tokenizer::{ContentType, State, StateName, Tokenizer};
+use crate::tokenizer::{ContentType, Tokenizer};
/// Before a title.
///
@@ -50,7 +51,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(tokenizer.tokenize_state.token_2.clone());
tokenizer.consume();
tokenizer.exit(tokenizer.tokenize_state.token_2.clone());
- State::Next(StateName::TitleBegin)
+ State::Next(Name::TitleBegin)
}
_ => State::Nok,
}
@@ -79,7 +80,7 @@ pub fn begin(tokenizer: &mut Tokenizer) -> State {
}
_ => {
tokenizer.enter(tokenizer.tokenize_state.token_3.clone());
- State::Retry(StateName::TitleAtBreak)
+ State::Retry(Name::TitleAtBreak)
}
}
}
@@ -108,15 +109,15 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
name,
- State::Next(StateName::TitleAfterEol),
- State::Next(StateName::TitleAtBlankLine),
+ State::Next(Name::TitleAfterEol),
+ State::Next(Name::TitleAtBlankLine),
)
}
Some(b'"' | b'\'' | b')')
if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker =>
{
tokenizer.exit(tokenizer.tokenize_state.token_3.clone());
- State::Retry(StateName::TitleBegin)
+ State::Retry(Name::TitleBegin)
}
Some(_) => {
tokenizer.enter_with_content(Token::Data, Some(ContentType::String));
@@ -128,7 +129,7 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.connect = true;
}
- State::Retry(StateName::TitleInside)
+ State::Retry(Name::TitleInside)
}
}
}
@@ -142,7 +143,7 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
/// ```
pub fn after_eol(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.connect = true;
- State::Retry(StateName::TitleAtBreak)
+ State::Retry(Name::TitleAtBreak)
}
/// In a title, at a blank line.
@@ -169,20 +170,20 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => {
tokenizer.exit(Token::Data);
- State::Retry(StateName::TitleAtBreak)
+ State::Retry(Name::TitleAtBreak)
}
Some(b'"' | b'\'' | b')')
if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker =>
{
tokenizer.exit(Token::Data);
- State::Retry(StateName::TitleAtBreak)
+ State::Retry(Name::TitleAtBreak)
}
Some(byte) => {
tokenizer.consume();
State::Next(if matches!(byte, b'\\') {
- StateName::TitleEscape
+ Name::TitleEscape
} else {
- StateName::TitleInside
+ Name::TitleInside
})
}
}
@@ -198,8 +199,8 @@ pub fn escape(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'"' | b'\'' | b')') => {
tokenizer.consume();
- State::Next(StateName::TitleInside)
+ State::Next(Name::TitleInside)
}
- _ => State::Retry(StateName::TitleInside),
+ _ => State::Retry(Name::TitleInside),
}
}
diff --git a/src/construct/thematic_break.rs b/src/construct/thematic_break.rs
index 288d818..fc71d73 100644
--- a/src/construct/thematic_break.rs
+++ b/src/construct/thematic_break.rs
@@ -50,8 +50,9 @@
use super::partial_space_or_tab::{space_or_tab, space_or_tab_min_max};
use crate::constant::{TAB_SIZE, THEMATIC_BREAK_MARKER_COUNT_MIN};
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{State, StateName, Tokenizer};
+use crate::tokenizer::Tokenizer;
/// Start of a thematic break.
///
@@ -72,11 +73,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
},
);
- tokenizer.attempt(
- name,
- State::Next(StateName::ThematicBreakBefore),
- State::Nok,
- )
+ tokenizer.attempt(name, State::Next(Name::ThematicBreakBefore), State::Nok)
} else {
State::Nok
}
@@ -92,7 +89,7 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'*' | b'-' | b'_') => {
tokenizer.tokenize_state.marker = tokenizer.current.unwrap();
- State::Retry(StateName::ThematicBreakAtBreak)
+ State::Retry(Name::ThematicBreakAtBreak)
}
_ => State::Nok,
}
@@ -118,7 +115,7 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker =>
{
tokenizer.enter(Token::ThematicBreakSequence);
- State::Retry(StateName::ThematicBreakSequence)
+ State::Retry(Name::ThematicBreakSequence)
}
_ => {
tokenizer.tokenize_state.marker = 0;
@@ -141,15 +138,15 @@ pub fn sequence(tokenizer: &mut Tokenizer) -> State {
{
tokenizer.consume();
tokenizer.tokenize_state.size += 1;
- State::Next(StateName::ThematicBreakSequence)
+ State::Next(Name::ThematicBreakSequence)
}
_ => {
tokenizer.exit(Token::ThematicBreakSequence);
let name = space_or_tab(tokenizer);
tokenizer.attempt(
name,
- State::Next(StateName::ThematicBreakAtBreak),
- State::Next(StateName::ThematicBreakAtBreak),
+ State::Next(Name::ThematicBreakAtBreak),
+ State::Next(Name::ThematicBreakAtBreak),
)
}
}