diff options
author | Titus Wormer <tituswormer@gmail.com> | 2022-08-12 17:12:03 +0200 |
---|---|---|
committer | Titus Wormer <tituswormer@gmail.com> | 2022-08-12 17:12:03 +0200 |
commit | 1234de9c22343fc4e1fe9e3e1127d2db01e96c2f (patch) | |
tree | 85d5115fbe549246d2cf798853f5600ee0e8d9b2 /src | |
parent | 037fb65633edaad1a75a707eee7c223471d7fb00 (diff) | |
download | markdown-rs-1234de9c22343fc4e1fe9e3e1127d2db01e96c2f.tar.gz markdown-rs-1234de9c22343fc4e1fe9e3e1127d2db01e96c2f.tar.bz2 markdown-rs-1234de9c22343fc4e1fe9e3e1127d2db01e96c2f.zip |
Refactor to improve some names
Diffstat (limited to '')
-rw-r--r-- | src/construct/label_end.rs | 26 | ||||
-rw-r--r-- | src/construct/label_start_image.rs | 2 | ||||
-rw-r--r-- | src/construct/label_start_link.rs | 2 | ||||
-rw-r--r-- | src/tokenizer.rs | 24 |
4 files changed, 27 insertions, 27 deletions
diff --git a/src/construct/label_end.rs b/src/construct/label_end.rs index 8801ea7..7f80415 100644 --- a/src/construct/label_end.rs +++ b/src/construct/label_end.rs @@ -151,7 +151,7 @@ use crate::construct::partial_space_or_tab_eol::space_or_tab_eol; use crate::event::{Event, Kind, Name}; use crate::resolve::Name as ResolveName; use crate::state::{Name as StateName, State}; -use crate::tokenizer::{Media, Tokenizer}; +use crate::tokenizer::{Label, Tokenizer}; use crate::util::{ normalize_identifier::normalize_identifier, skip, @@ -172,8 +172,8 @@ use crate::util::{ pub fn start(tokenizer: &mut Tokenizer) -> State { if Some(b']') == tokenizer.current && tokenizer.parse_state.constructs.label_end { // If there is an okay opening: - if !tokenizer.tokenize_state.label_start_stack.is_empty() { - let label_start = tokenizer.tokenize_state.label_start_stack.last().unwrap(); + if !tokenizer.tokenize_state.label_starts.is_empty() { + let label_start = tokenizer.tokenize_state.label_starts.last().unwrap(); tokenizer.tokenize_state.end = tokenizer.events.len(); @@ -207,7 +207,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State { /// ^ /// ``` pub fn after(tokenizer: &mut Tokenizer) -> State { - let start = tokenizer.tokenize_state.label_start_stack.last().unwrap(); + let start = tokenizer.tokenize_state.label_starts.last().unwrap(); let defined = tokenizer .parse_state .definitions @@ -287,7 +287,7 @@ pub fn reference_not_full(tokenizer: &mut Tokenizer) -> State { /// ``` pub fn ok(tokenizer: &mut Tokenizer) -> State { // Remove the start. - let label_start = tokenizer.tokenize_state.label_start_stack.pop().unwrap(); + let label_start = tokenizer.tokenize_state.label_starts.pop().unwrap(); let is_link = tokenizer.events[label_start.start.0].name == Name::LabelLink; @@ -297,8 +297,8 @@ pub fn ok(tokenizer: &mut Tokenizer) -> State { // we can’t remove them. if is_link { let mut index = 0; - while index < tokenizer.tokenize_state.label_start_stack.len() { - let label_start = &mut tokenizer.tokenize_state.label_start_stack[index]; + while index < tokenizer.tokenize_state.label_starts.len() { + let label_start = &mut tokenizer.tokenize_state.label_starts[index]; if tokenizer.events[label_start.start.0].name == Name::LabelLink { label_start.inactive = true; } @@ -306,7 +306,7 @@ pub fn ok(tokenizer: &mut Tokenizer) -> State { } } - tokenizer.tokenize_state.media_list.push(Media { + tokenizer.tokenize_state.labels.push(Label { start: label_start.start, end: (tokenizer.tokenize_state.end, tokenizer.events.len() - 1), }); @@ -328,9 +328,9 @@ pub fn ok(tokenizer: &mut Tokenizer) -> State { /// ^ /// ``` pub fn nok(tokenizer: &mut Tokenizer) -> State { - let start = tokenizer.tokenize_state.label_start_stack.pop().unwrap(); + let start = tokenizer.tokenize_state.label_starts.pop().unwrap(); - tokenizer.tokenize_state.label_start_list_loose.push(start); + tokenizer.tokenize_state.label_starts_loose.push(start); tokenizer.tokenize_state.end = 0; State::Nok @@ -594,9 +594,9 @@ pub fn reference_collapsed_open(tokenizer: &mut Tokenizer) -> State { /// images, and turns unmatched label starts back into data. #[allow(clippy::too_many_lines)] pub fn resolve(tokenizer: &mut Tokenizer) { - let mut left = tokenizer.tokenize_state.label_start_list_loose.split_off(0); - let mut left_2 = tokenizer.tokenize_state.label_start_stack.split_off(0); - let media = tokenizer.tokenize_state.media_list.split_off(0); + let mut left = tokenizer.tokenize_state.label_starts_loose.split_off(0); + let mut left_2 = tokenizer.tokenize_state.label_starts.split_off(0); + let media = tokenizer.tokenize_state.labels.split_off(0); left.append(&mut left_2); let events = &tokenizer.events; diff --git a/src/construct/label_start_image.rs b/src/construct/label_start_image.rs index b2890e6..e6a75d3 100644 --- a/src/construct/label_start_image.rs +++ b/src/construct/label_start_image.rs @@ -64,7 +64,7 @@ pub fn open(tokenizer: &mut Tokenizer) -> State { tokenizer.consume(); tokenizer.exit(Name::LabelMarker); tokenizer.exit(Name::LabelImage); - tokenizer.tokenize_state.label_start_stack.push(LabelStart { + tokenizer.tokenize_state.label_starts.push(LabelStart { start: (tokenizer.events.len() - 6, tokenizer.events.len() - 1), inactive: false, }); diff --git a/src/construct/label_start_link.rs b/src/construct/label_start_link.rs index 8089e80..7b6ac6e 100644 --- a/src/construct/label_start_link.rs +++ b/src/construct/label_start_link.rs @@ -46,7 +46,7 @@ pub fn start(tokenizer: &mut Tokenizer) -> State { tokenizer.consume(); tokenizer.exit(Name::LabelMarker); tokenizer.exit(Name::LabelLink); - tokenizer.tokenize_state.label_start_stack.push(LabelStart { + tokenizer.tokenize_state.label_starts.push(LabelStart { start: (start, tokenizer.events.len() - 1), inactive: false, }); diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 3f60b86..4a9fa01 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -64,12 +64,12 @@ pub struct LabelStart { pub inactive: bool, } -/// Media we found. +/// Valid label. #[derive(Debug)] -pub struct Media { - /// Indices of where the media’s label start starts and ends in `events`. +pub struct Label { + /// Indices of label start. pub start: (usize, usize), - /// Indices of where the media’s label end starts and ends in `events`. + /// Indices of label end. pub end: (usize, usize), } @@ -152,18 +152,18 @@ pub struct TokenizeState<'a> { pub space_or_tab_token: Name, // Couple of media related fields. - /// Stack of label (start) that could form images and links. + /// List of usable label starts. /// /// Used when tokenizing [text content][crate::content::text]. - pub label_start_stack: Vec<LabelStart>, - /// Stack of label (start) that cannot form images and links. + pub label_starts: Vec<LabelStart>, + /// List of unusable label starts. /// /// Used when tokenizing [text content][crate::content::text]. - pub label_start_list_loose: Vec<LabelStart>, + pub label_starts_loose: Vec<LabelStart>, /// Stack of images and links. /// /// Used when tokenizing [text content][crate::content::text]. - pub media_list: Vec<Media>, + pub labels: Vec<Label>, /// List of defined identifiers. pub definitions: Vec<String>, @@ -279,12 +279,12 @@ impl<'a> Tokenizer<'a> { document_child: None, definitions: vec![], end: 0, - label_start_stack: vec![], - label_start_list_loose: vec![], + label_starts: vec![], + label_starts_loose: vec![], marker: 0, marker_b: 0, markers: &[], - media_list: vec![], + labels: vec![], seen: false, size: 0, size_b: 0, |