aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/partial_label.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-07-29 18:22:59 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-07-29 18:22:59 +0200
commit0eeff9148e327183e532752f46421a75506dd7a6 (patch)
tree4f0aed04f90aa759ce96a2e87aa719e7fa95c450 /src/construct/partial_label.rs
parent148ede7f0f42f0ccb1620b13d91f35d0c7d04c2f (diff)
downloadmarkdown-rs-0eeff9148e327183e532752f46421a75506dd7a6.tar.gz
markdown-rs-0eeff9148e327183e532752f46421a75506dd7a6.tar.bz2
markdown-rs-0eeff9148e327183e532752f46421a75506dd7a6.zip
Refactor to improve states
* Remove custom kind wrappers, use plain bytes instead * Remove `Into`s, use the explicit expected types instead * Refactor to use `slice.as_str` in most places * Remove unneeded unique check before adding a definition * Use a shared CDATA prefix in constants * Inline byte checks into matches * Pass bytes back from parser instead of whole parse state * Refactor to work more often on bytes * Rename custom `size` to `len`
Diffstat (limited to 'src/construct/partial_label.rs')
-rw-r--r--src/construct/partial_label.rs101
1 files changed, 47 insertions, 54 deletions
diff --git a/src/construct/partial_label.rs b/src/construct/partial_label.rs
index 7e40a2d..6fdb70d 100644
--- a/src/construct/partial_label.rs
+++ b/src/construct/partial_label.rs
@@ -123,39 +123,43 @@ pub fn start(tokenizer: &mut Tokenizer, options: Options) -> State {
/// ^
/// ```
fn at_break(tokenizer: &mut Tokenizer, mut info: Info) -> State {
- match tokenizer.current {
- None | Some(b'[') => State::Nok,
- Some(b']') if !info.data => State::Nok,
- _ if info.size > LINK_REFERENCE_SIZE_MAX => State::Nok,
- Some(b']') => {
- tokenizer.exit(info.options.string.clone());
- tokenizer.enter(info.options.marker.clone());
- tokenizer.consume();
- tokenizer.exit(info.options.marker.clone());
- tokenizer.exit(info.options.label);
- State::Ok
- }
- Some(b'\n') => tokenizer.go(
- space_or_tab_eol_with_options(EolOptions {
- content_type: Some(ContentType::String),
- connect: info.connect,
- }),
- |t| {
- info.connect = true;
- at_break(t, info)
- },
- )(tokenizer),
- _ => {
- tokenizer.enter_with_content(Token::Data, Some(ContentType::String));
-
- if info.connect {
- let index = tokenizer.events.len() - 1;
- link(&mut tokenizer.events, index);
- } else {
- info.connect = true;
+ if info.size > LINK_REFERENCE_SIZE_MAX
+ || matches!(tokenizer.current, None | Some(b'['))
+ || (matches!(tokenizer.current, Some(b']')) && !info.data)
+ {
+ State::Nok
+ } else {
+ match tokenizer.current {
+ Some(b'\n') => tokenizer.go(
+ space_or_tab_eol_with_options(EolOptions {
+ content_type: Some(ContentType::String),
+ connect: info.connect,
+ }),
+ |t| {
+ info.connect = true;
+ at_break(t, info)
+ },
+ )(tokenizer),
+ Some(b']') => {
+ tokenizer.exit(info.options.string.clone());
+ tokenizer.enter(info.options.marker.clone());
+ tokenizer.consume();
+ tokenizer.exit(info.options.marker.clone());
+ tokenizer.exit(info.options.label);
+ State::Ok
}
+ _ => {
+ tokenizer.enter_with_content(Token::Data, Some(ContentType::String));
+
+ if info.connect {
+ let index = tokenizer.events.len() - 1;
+ link(&mut tokenizer.events, index);
+ } else {
+ info.connect = true;
+ }
- label(tokenizer, info)
+ label(tokenizer, info)
+ }
}
}
}
@@ -172,30 +176,19 @@ fn label(tokenizer: &mut Tokenizer, mut info: Info) -> State {
tokenizer.exit(Token::Data);
at_break(tokenizer, info)
}
- _ if info.size > LINK_REFERENCE_SIZE_MAX => {
- tokenizer.exit(Token::Data);
- at_break(tokenizer, info)
- }
- Some(b'\t' | b' ') => {
- tokenizer.consume();
- info.size += 1;
- State::Fn(Box::new(|t| label(t, info)))
- }
- Some(b'\\') => {
- tokenizer.consume();
- info.size += 1;
- if !info.data {
- info.data = true;
- }
- State::Fn(Box::new(|t| escape(t, info)))
- }
- Some(_) => {
- tokenizer.consume();
- info.size += 1;
- if !info.data {
- info.data = true;
+ Some(byte) => {
+ if info.size > LINK_REFERENCE_SIZE_MAX {
+ tokenizer.exit(Token::Data);
+ at_break(tokenizer, info)
+ } else {
+ let func = if matches!(byte, b'\\') { escape } else { label };
+ tokenizer.consume();
+ info.size += 1;
+ if !info.data && !matches!(byte, b'\t' | b' ') {
+ info.data = true;
+ }
+ State::Fn(Box::new(move |t| func(t, info)))
}
- State::Fn(Box::new(|t| label(t, info)))
}
}
}