aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/character_reference.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-11 13:45:24 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-11 13:45:24 +0200
commit6eb2f644057f371841fe25330a57ee185f91c7af (patch)
tree7b4d02586339d1a7f82104b4473d9ac243b3abf9 /src/construct/character_reference.rs
parent2d35cbfceace81a217cd0fbdae7a8777c7a6465e (diff)
downloadmarkdown-rs-6eb2f644057f371841fe25330a57ee185f91c7af.tar.gz
markdown-rs-6eb2f644057f371841fe25330a57ee185f91c7af.tar.bz2
markdown-rs-6eb2f644057f371841fe25330a57ee185f91c7af.zip
Refactor to move some code to `state.rs`
Diffstat (limited to 'src/construct/character_reference.rs')
-rw-r--r--src/construct/character_reference.rs19
1 files changed, 10 insertions, 9 deletions
diff --git a/src/construct/character_reference.rs b/src/construct/character_reference.rs
index e1c7e79..435c115 100644
--- a/src/construct/character_reference.rs
+++ b/src/construct/character_reference.rs
@@ -65,8 +65,9 @@ use crate::constant::{
CHARACTER_REFERENCES, CHARACTER_REFERENCE_DECIMAL_SIZE_MAX,
CHARACTER_REFERENCE_HEXADECIMAL_SIZE_MAX, CHARACTER_REFERENCE_NAMED_SIZE_MAX,
};
+use crate::state::{Name, State};
use crate::token::Token;
-use crate::tokenizer::{State, StateName, Tokenizer};
+use crate::tokenizer::Tokenizer;
use crate::util::slice::Slice;
/// Start of a character reference.
@@ -86,7 +87,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Token::CharacterReferenceMarker);
tokenizer.consume();
tokenizer.exit(Token::CharacterReferenceMarker);
- State::Next(StateName::CharacterReferenceOpen)
+ State::Next(Name::CharacterReferenceOpen)
}
_ => State::Nok,
}
@@ -103,17 +104,17 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
/// > | a&#x9;b
/// ^
/// ```
-// StateName::CharacterReferenceOpen
+// Name::CharacterReferenceOpen
pub fn open(tokenizer: &mut Tokenizer) -> State {
if let Some(b'#') = tokenizer.current {
tokenizer.enter(Token::CharacterReferenceMarkerNumeric);
tokenizer.consume();
tokenizer.exit(Token::CharacterReferenceMarkerNumeric);
- State::Next(StateName::CharacterReferenceNumeric)
+ State::Next(Name::CharacterReferenceNumeric)
} else {
tokenizer.tokenize_state.marker = b'&';
tokenizer.enter(Token::CharacterReferenceValue);
- State::Retry(StateName::CharacterReferenceValue)
+ State::Retry(Name::CharacterReferenceValue)
}
}
@@ -126,7 +127,7 @@ pub fn open(tokenizer: &mut Tokenizer) -> State {
/// > | a&#x9;b
/// ^
/// ```
-// StateName::CharacterReferenceNumeric
+// Name::CharacterReferenceNumeric
pub fn numeric(tokenizer: &mut Tokenizer) -> State {
if let Some(b'x' | b'X') = tokenizer.current {
tokenizer.enter(Token::CharacterReferenceMarkerHexadecimal);
@@ -134,11 +135,11 @@ pub fn numeric(tokenizer: &mut Tokenizer) -> State {
tokenizer.exit(Token::CharacterReferenceMarkerHexadecimal);
tokenizer.enter(Token::CharacterReferenceValue);
tokenizer.tokenize_state.marker = b'x';
- State::Next(StateName::CharacterReferenceValue)
+ State::Next(Name::CharacterReferenceValue)
} else {
tokenizer.enter(Token::CharacterReferenceValue);
tokenizer.tokenize_state.marker = b'#';
- State::Retry(StateName::CharacterReferenceValue)
+ State::Retry(Name::CharacterReferenceValue)
}
}
@@ -202,7 +203,7 @@ pub fn value(tokenizer: &mut Tokenizer) -> State {
if tokenizer.tokenize_state.size < max && test(&byte) {
tokenizer.tokenize_state.size += 1;
tokenizer.consume();
- return State::Next(StateName::CharacterReferenceValue);
+ return State::Next(Name::CharacterReferenceValue);
}
}