aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/partial_data.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-12 17:02:01 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-12 17:02:01 +0200
commit6ba11bdaca1721fb4591819604c340d147798f45 (patch)
treeef602b518043c0a7228e76d9d00bee95a17798d4 /src/construct/partial_data.rs
parent504729a4a0c8f3e0d8fc9159e0273150b169e184 (diff)
downloadmarkdown-rs-6ba11bdaca1721fb4591819604c340d147798f45.tar.gz
markdown-rs-6ba11bdaca1721fb4591819604c340d147798f45.tar.bz2
markdown-rs-6ba11bdaca1721fb4591819604c340d147798f45.zip
Remove `match` statements if clumsy
Diffstat (limited to 'src/construct/partial_data.rs')
-rw-r--r--src/construct/partial_data.rs57
1 files changed, 26 insertions, 31 deletions
diff --git a/src/construct/partial_data.rs b/src/construct/partial_data.rs
index b6b0f59..8c8ecbb 100644
--- a/src/construct/partial_data.rs
+++ b/src/construct/partial_data.rs
@@ -18,15 +18,16 @@ use crate::tokenizer::Tokenizer;
/// ^
/// ```
pub fn start(tokenizer: &mut Tokenizer) -> State {
- match tokenizer.current {
- // Make sure to eat the first `markers`.
- Some(byte) if tokenizer.tokenize_state.markers.contains(&byte) => {
+ // Make sure to eat the first `markers`.
+ if let Some(byte) = tokenizer.current {
+ if tokenizer.tokenize_state.markers.contains(&byte) {
tokenizer.enter(Name::Data);
tokenizer.consume();
- State::Next(StateName::DataInside)
+ return State::Next(StateName::DataInside);
}
- _ => State::Retry(StateName::DataAtBreak),
}
+
+ State::Retry(StateName::DataAtBreak)
}
/// Before something.
@@ -36,23 +37,21 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
/// ^
/// ```
pub fn at_break(tokenizer: &mut Tokenizer) -> State {
- match tokenizer.current {
- None => State::Ok,
- Some(b'\n') => {
- tokenizer.enter(Name::LineEnding);
- tokenizer.consume();
- tokenizer.exit(Name::LineEnding);
- State::Next(StateName::DataAtBreak)
- }
- Some(byte) if tokenizer.tokenize_state.markers.contains(&byte) => {
- tokenizer.register_resolver_before(ResolveName::Data);
- State::Ok
- }
- _ => {
+ if let Some(byte) = tokenizer.current {
+ if !tokenizer.tokenize_state.markers.contains(&byte) {
+ if byte == b'\n' {
+ tokenizer.enter(Name::LineEnding);
+ tokenizer.consume();
+ tokenizer.exit(Name::LineEnding);
+ return State::Next(StateName::DataAtBreak);
+ }
tokenizer.enter(Name::Data);
- State::Retry(StateName::DataInside)
+ return State::Retry(StateName::DataInside);
}
}
+
+ tokenizer.register_resolver_before(ResolveName::Data);
+ State::Ok
}
/// In data.
@@ -62,19 +61,15 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
/// ^^^
/// ```
pub fn inside(tokenizer: &mut Tokenizer) -> State {
- let done = match tokenizer.current {
- None | Some(b'\n') => true,
- Some(byte) if tokenizer.tokenize_state.markers.contains(&byte) => true,
- _ => false,
- };
-
- if done {
- tokenizer.exit(Name::Data);
- State::Retry(StateName::DataAtBreak)
- } else {
- tokenizer.consume();
- State::Next(StateName::DataInside)
+ if let Some(byte) = tokenizer.current {
+ if byte != b'\n' && !tokenizer.tokenize_state.markers.contains(&byte) {
+ tokenizer.consume();
+ return State::Next(StateName::DataInside);
+ }
}
+
+ tokenizer.exit(Name::Data);
+ State::Retry(StateName::DataAtBreak)
}
/// Merge adjacent data events.