aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/partial_label.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-07-28 16:48:00 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-07-28 16:48:00 +0200
commitf7e5fb852dc9c416b9eeb1f0d4f2d51ba5b68456 (patch)
treec1ac3f22473bd79566d835b2474d2ae9e00d6c55 /src/construct/partial_label.rs
parentd729b07712ca9cc91e68af1776dac9d7008a90cb (diff)
downloadmarkdown-rs-f7e5fb852dc9c416b9eeb1f0d4f2d51ba5b68456.tar.gz
markdown-rs-f7e5fb852dc9c416b9eeb1f0d4f2d51ba5b68456.tar.bz2
markdown-rs-f7e5fb852dc9c416b9eeb1f0d4f2d51ba5b68456.zip
Refactor to work on `char`s
Previously, a custom char implementation was used. This was easier to work with, as sometimes “virtual” characters are injected, or characters are ignored. This replaces that with working on actual `char`s. In the hope of in the future working on `u8`s, even. This simplifies the state machine somewhat, as only `\n` is fed, regardless of whether it was a CRLF, CR, or LF. It also feeds `' '` instead of virtual spaces. The BOM, if present, is now available as a `ByteOrderMark` event.
Diffstat (limited to 'src/construct/partial_label.rs')
-rw-r--r--src/construct/partial_label.rs22
1 files changed, 11 insertions, 11 deletions
diff --git a/src/construct/partial_label.rs b/src/construct/partial_label.rs
index 91a0e26..ee31533 100644
--- a/src/construct/partial_label.rs
+++ b/src/construct/partial_label.rs
@@ -62,7 +62,7 @@ use super::partial_space_or_tab::{space_or_tab_eol_with_options, EolOptions};
use crate::constant::LINK_REFERENCE_SIZE_MAX;
use crate::subtokenize::link;
use crate::token::Token;
-use crate::tokenizer::{Code, ContentType, State, Tokenizer};
+use crate::tokenizer::{ContentType, State, Tokenizer};
/// Configuration.
///
@@ -98,7 +98,7 @@ struct Info {
/// ```
pub fn start(tokenizer: &mut Tokenizer, options: Options) -> State {
match tokenizer.current {
- Code::Char('[') => {
+ Some('[') => {
let info = Info {
connect: false,
data: false,
@@ -124,10 +124,10 @@ pub fn start(tokenizer: &mut Tokenizer, options: Options) -> State {
/// ```
fn at_break(tokenizer: &mut Tokenizer, mut info: Info) -> State {
match tokenizer.current {
- Code::None | Code::Char('[') => State::Nok,
- Code::Char(']') if !info.data => State::Nok,
+ None | Some('[') => State::Nok,
+ Some(']') if !info.data => State::Nok,
_ if info.size > LINK_REFERENCE_SIZE_MAX => State::Nok,
- Code::Char(']') => {
+ Some(']') => {
tokenizer.exit(info.options.string.clone());
tokenizer.enter(info.options.marker.clone());
tokenizer.consume();
@@ -135,7 +135,7 @@ fn at_break(tokenizer: &mut Tokenizer, mut info: Info) -> State {
tokenizer.exit(info.options.label);
State::Ok
}
- Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => tokenizer.go(
+ Some('\n') => tokenizer.go(
space_or_tab_eol_with_options(EolOptions {
content_type: Some(ContentType::String),
connect: info.connect,
@@ -168,7 +168,7 @@ fn at_break(tokenizer: &mut Tokenizer, mut info: Info) -> State {
/// ```
fn label(tokenizer: &mut Tokenizer, mut info: Info) -> State {
match tokenizer.current {
- Code::None | Code::CarriageReturnLineFeed | Code::Char('\n' | '\r' | '[' | ']') => {
+ None | Some('\n' | '[' | ']') => {
tokenizer.exit(Token::Data);
at_break(tokenizer, info)
}
@@ -176,12 +176,12 @@ fn label(tokenizer: &mut Tokenizer, mut info: Info) -> State {
tokenizer.exit(Token::Data);
at_break(tokenizer, info)
}
- Code::VirtualSpace | Code::Char('\t' | ' ') => {
+ Some('\t' | ' ') => {
tokenizer.consume();
info.size += 1;
State::Fn(Box::new(|t| label(t, info)))
}
- Code::Char('\\') => {
+ Some('\\') => {
tokenizer.consume();
info.size += 1;
if !info.data {
@@ -189,7 +189,7 @@ fn label(tokenizer: &mut Tokenizer, mut info: Info) -> State {
}
State::Fn(Box::new(|t| escape(t, info)))
}
- Code::Char(_) => {
+ Some(_) => {
tokenizer.consume();
info.size += 1;
if !info.data {
@@ -208,7 +208,7 @@ fn label(tokenizer: &mut Tokenizer, mut info: Info) -> State {
/// ```
fn escape(tokenizer: &mut Tokenizer, mut info: Info) -> State {
match tokenizer.current {
- Code::Char('[' | '\\' | ']') => {
+ Some('[' | '\\' | ']') => {
tokenizer.consume();
info.size += 1;
State::Fn(Box::new(|t| label(t, info)))