aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/partial_destination.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-10 09:16:36 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-10 09:16:41 +0200
commit8162222295d71ea7fd9270c7b3b9497b91db3f1f (patch)
tree77a42dab6775450836ba0ca1b5d0d2360e506d52 /src/construct/partial_destination.rs
parented2e62f99ca9cf594c677e47df9d954309b43294 (diff)
downloadmarkdown-rs-8162222295d71ea7fd9270c7b3b9497b91db3f1f.tar.gz
markdown-rs-8162222295d71ea7fd9270c7b3b9497b91db3f1f.tar.bz2
markdown-rs-8162222295d71ea7fd9270c7b3b9497b91db3f1f.zip
Rename `State::Fn` to `State::Next`
Diffstat (limited to 'src/construct/partial_destination.rs')
-rw-r--r--src/construct/partial_destination.rs18
1 files changed, 9 insertions, 9 deletions
diff --git a/src/construct/partial_destination.rs b/src/construct/partial_destination.rs
index e8818a0..e4cfdc3 100644
--- a/src/construct/partial_destination.rs
+++ b/src/construct/partial_destination.rs
@@ -90,7 +90,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(tokenizer.tokenize_state.token_3.clone());
tokenizer.consume();
tokenizer.exit(tokenizer.tokenize_state.token_3.clone());
- State::Fn(StateName::DestinationEnclosedBefore)
+ State::Next(StateName::DestinationEnclosedBefore)
}
// ASCII control, space, closing paren, but *not* `\0`.
None | Some(0x01..=0x1F | b' ' | b')' | 0x7F) => State::Nok,
@@ -141,11 +141,11 @@ pub fn enclosed(tokenizer: &mut Tokenizer) -> State {
}
Some(b'\\') => {
tokenizer.consume();
- State::Fn(StateName::DestinationEnclosedEscape)
+ State::Next(StateName::DestinationEnclosedEscape)
}
_ => {
tokenizer.consume();
- State::Fn(StateName::DestinationEnclosed)
+ State::Next(StateName::DestinationEnclosed)
}
}
}
@@ -160,7 +160,7 @@ pub fn enclosed_escape(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'<' | b'>' | b'\\') => {
tokenizer.consume();
- State::Fn(StateName::DestinationEnclosed)
+ State::Next(StateName::DestinationEnclosed)
}
_ => enclosed(tokenizer),
}
@@ -185,7 +185,7 @@ pub fn raw(tokenizer: &mut Tokenizer) -> State {
Some(b'(') if tokenizer.tokenize_state.size < tokenizer.tokenize_state.size_other => {
tokenizer.consume();
tokenizer.tokenize_state.size += 1;
- State::Fn(StateName::DestinationRaw)
+ State::Next(StateName::DestinationRaw)
}
// ASCII control (but *not* `\0`) and space and `(`.
None | Some(0x01..=0x1F | b' ' | b'(' | 0x7F) => {
@@ -195,15 +195,15 @@ pub fn raw(tokenizer: &mut Tokenizer) -> State {
Some(b')') => {
tokenizer.consume();
tokenizer.tokenize_state.size -= 1;
- State::Fn(StateName::DestinationRaw)
+ State::Next(StateName::DestinationRaw)
}
Some(b'\\') => {
tokenizer.consume();
- State::Fn(StateName::DestinationRawEscape)
+ State::Next(StateName::DestinationRawEscape)
}
Some(_) => {
tokenizer.consume();
- State::Fn(StateName::DestinationRaw)
+ State::Next(StateName::DestinationRaw)
}
}
}
@@ -218,7 +218,7 @@ pub fn raw_escape(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'(' | b')' | b'\\') => {
tokenizer.consume();
- State::Fn(StateName::DestinationRaw)
+ State::Next(StateName::DestinationRaw)
}
_ => raw(tokenizer),
}