aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/partial_title.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-12 19:04:31 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-12 19:04:31 +0200
commit395b13daf6dd6da0204302d344caa710ea891d62 (patch)
tree4a7c688af7a70c7e3b694d87ba66e01dd0670cf6 /src/construct/partial_title.rs
parent6dc2011d69c85820feddf6799142d304cc2eeb29 (diff)
downloadmarkdown-rs-395b13daf6dd6da0204302d344caa710ea891d62.tar.gz
markdown-rs-395b13daf6dd6da0204302d344caa710ea891d62.tar.bz2
markdown-rs-395b13daf6dd6da0204302d344caa710ea891d62.zip
Refactor to attempt less if never needed
Diffstat (limited to 'src/construct/partial_title.rs')
-rw-r--r--src/construct/partial_title.rs84
1 files changed, 34 insertions, 50 deletions
diff --git a/src/construct/partial_title.rs b/src/construct/partial_title.rs
index 93dbd28..f0c4931 100644
--- a/src/construct/partial_title.rs
+++ b/src/construct/partial_title.rs
@@ -66,22 +66,17 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
/// ^
/// ```
pub fn begin(tokenizer: &mut Tokenizer) -> State {
- match tokenizer.current {
- Some(b'"' | b'\'' | b')')
- if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker =>
- {
- tokenizer.enter(tokenizer.tokenize_state.token_2.clone());
- tokenizer.consume();
- tokenizer.exit(tokenizer.tokenize_state.token_2.clone());
- tokenizer.exit(tokenizer.tokenize_state.token_1.clone());
- tokenizer.tokenize_state.marker = 0;
- tokenizer.tokenize_state.connect = false;
- State::Ok
- }
- _ => {
- tokenizer.enter(tokenizer.tokenize_state.token_3.clone());
- State::Retry(StateName::TitleAtBreak)
- }
+ if tokenizer.current == Some(tokenizer.tokenize_state.marker) {
+ tokenizer.enter(tokenizer.tokenize_state.token_2.clone());
+ tokenizer.consume();
+ tokenizer.exit(tokenizer.tokenize_state.token_2.clone());
+ tokenizer.exit(tokenizer.tokenize_state.token_1.clone());
+ tokenizer.tokenize_state.marker = 0;
+ tokenizer.tokenize_state.connect = false;
+ State::Ok
+ } else {
+ tokenizer.enter(tokenizer.tokenize_state.token_3.clone());
+ State::Retry(StateName::TitleAtBreak)
}
}
@@ -92,13 +87,11 @@ pub fn begin(tokenizer: &mut Tokenizer) -> State {
/// ^
/// ```
pub fn at_break(tokenizer: &mut Tokenizer) -> State {
- match tokenizer.current {
- None => {
- tokenizer.tokenize_state.marker = 0;
- tokenizer.tokenize_state.connect = false;
- State::Nok
- }
- Some(b'\n') => {
+ if let Some(byte) = tokenizer.current {
+ if byte == tokenizer.tokenize_state.marker {
+ tokenizer.exit(tokenizer.tokenize_state.token_3.clone());
+ State::Retry(StateName::TitleBegin)
+ } else if byte == b'\n' {
tokenizer.attempt(
State::Next(StateName::TitleAfterEol),
State::Next(StateName::TitleAtBlankLine),
@@ -110,14 +103,7 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
connect: tokenizer.tokenize_state.connect,
},
))
- }
- Some(b'"' | b'\'' | b')')
- if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker =>
- {
- tokenizer.exit(tokenizer.tokenize_state.token_3.clone());
- State::Retry(StateName::TitleBegin)
- }
- Some(_) => {
+ } else {
tokenizer.enter_link(
Name::Data,
Link {
@@ -136,6 +122,10 @@ pub fn at_break(tokenizer: &mut Tokenizer) -> State {
State::Retry(StateName::TitleInside)
}
+ } else {
+ tokenizer.tokenize_state.marker = 0;
+ tokenizer.tokenize_state.connect = false;
+ State::Nok
}
}
@@ -172,25 +162,19 @@ pub fn at_blank_line(tokenizer: &mut Tokenizer) -> State {
/// ^
/// ```
pub fn inside(tokenizer: &mut Tokenizer) -> State {
- match tokenizer.current {
- None | Some(b'\n') => {
- tokenizer.exit(Name::Data);
- State::Retry(StateName::TitleAtBreak)
- }
- Some(b'"' | b'\'' | b')')
- if tokenizer.current.unwrap() == tokenizer.tokenize_state.marker =>
- {
- tokenizer.exit(Name::Data);
- State::Retry(StateName::TitleAtBreak)
- }
- Some(byte) => {
- tokenizer.consume();
- State::Next(if matches!(byte, b'\\') {
- StateName::TitleEscape
- } else {
- StateName::TitleInside
- })
- }
+ if tokenizer.current == Some(tokenizer.tokenize_state.marker)
+ || matches!(tokenizer.current, None | Some(b'\n'))
+ {
+ tokenizer.exit(Name::Data);
+ State::Retry(StateName::TitleAtBreak)
+ } else {
+ let name = if tokenizer.current == Some(b'\\') {
+ StateName::TitleEscape
+ } else {
+ StateName::TitleInside
+ };
+ tokenizer.consume();
+ State::Next(name)
}
}