aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-07-19 17:56:57 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-07-19 17:56:57 +0200
commit0f20660cb95abd4f407bdafa2c45e01829fa971f (patch)
tree865875c20cee2e74f95e54f52aa73105e90067e7 /src/construct
parentae0f12e668cfd37728aad907c813431595e6cc1b (diff)
downloadmarkdown-rs-0f20660cb95abd4f407bdafa2c45e01829fa971f.tar.gz
markdown-rs-0f20660cb95abd4f407bdafa2c45e01829fa971f.tar.bz2
markdown-rs-0f20660cb95abd4f407bdafa2c45e01829fa971f.zip
Refactor to remove cloning in `edit_map`
Diffstat (limited to '')
-rw-r--r--src/construct/attention.rs2
-rw-r--r--src/construct/heading_atx.rs2
-rw-r--r--src/construct/heading_setext.rs2
-rw-r--r--src/construct/label_end.rs17
-rw-r--r--src/construct/list.rs8
-rw-r--r--src/construct/paragraph.rs2
-rw-r--r--src/construct/partial_data.rs2
7 files changed, 15 insertions, 20 deletions
diff --git a/src/construct/attention.rs b/src/construct/attention.rs
index 3e15f9a..7e99600 100644
--- a/src/construct/attention.rs
+++ b/src/construct/attention.rs
@@ -523,7 +523,7 @@ fn resolve_attention(tokenizer: &mut Tokenizer) -> Vec<Event> {
index += 1;
}
- edit_map.consume(&mut tokenizer.events)
+ edit_map.consume(tokenizer.events.split_off(0))
}
/// Classify whether a character code represents whitespace, punctuation, or
diff --git a/src/construct/heading_atx.rs b/src/construct/heading_atx.rs
index 68a6be7..4546924 100644
--- a/src/construct/heading_atx.rs
+++ b/src/construct/heading_atx.rs
@@ -258,5 +258,5 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Vec<Event> {
index += 1;
}
- edit_map.consume(&mut tokenizer.events)
+ edit_map.consume(tokenizer.events.split_off(0))
}
diff --git a/src/construct/heading_setext.rs b/src/construct/heading_setext.rs
index 7cd259b..841bf53 100644
--- a/src/construct/heading_setext.rs
+++ b/src/construct/heading_setext.rs
@@ -236,5 +236,5 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Vec<Event> {
index += 1;
}
- edit_map.consume(&mut tokenizer.events)
+ edit_map.consume(tokenizer.events.split_off(0))
}
diff --git a/src/construct/label_end.rs b/src/construct/label_end.rs
index 2ac2500..2124681 100644
--- a/src/construct/label_end.rs
+++ b/src/construct/label_end.rs
@@ -154,9 +154,7 @@ use crate::construct::{
partial_title::{start as title, Options as TitleOptions},
};
use crate::token::Token;
-use crate::tokenizer::{
- Code, Event, EventType, LabelStart, Media, State, StateFnResult, Tokenizer,
-};
+use crate::tokenizer::{Code, Event, EventType, Media, State, StateFnResult, Tokenizer};
use crate::util::{
edit_map::EditMap,
normalize_identifier::normalize_identifier,
@@ -325,10 +323,9 @@ fn reference_not_full(tokenizer: &mut Tokenizer, code: Code, info: Info) -> Stat
/// ```
fn ok(tokenizer: &mut Tokenizer, code: Code, mut info: Info) -> StateFnResult {
// Remove this one and everything after it.
- let mut left: Vec<LabelStart> = tokenizer
+ let mut left = tokenizer
.label_start_stack
- .drain(info.label_start_index..)
- .collect();
+ .split_off(info.label_start_index);
// Remove this one from `left`, as we’ll move it to `media_list`.
left.remove(0);
tokenizer.label_start_list_loose.append(&mut left);
@@ -616,9 +613,9 @@ fn collapsed_reference_open(tokenizer: &mut Tokenizer, code: Code) -> StateFnRes
/// images, or turns them back into data.
#[allow(clippy::too_many_lines)]
pub fn resolve_media(tokenizer: &mut Tokenizer) -> Vec<Event> {
- let mut left: Vec<LabelStart> = tokenizer.label_start_list_loose.drain(..).collect();
- let mut left_2: Vec<LabelStart> = tokenizer.label_start_stack.drain(..).collect();
- let media: Vec<Media> = tokenizer.media_list.drain(..).collect();
+ let mut left = tokenizer.label_start_list_loose.split_off(0);
+ let mut left_2 = tokenizer.label_start_stack.split_off(0);
+ let media = tokenizer.media_list.split_off(0);
left.append(&mut left_2);
let mut edit_map = EditMap::new();
@@ -776,5 +773,5 @@ pub fn resolve_media(tokenizer: &mut Tokenizer) -> Vec<Event> {
index += 1;
}
- edit_map.consume(&mut tokenizer.events)
+ edit_map.consume(tokenizer.events.split_off(0))
}
diff --git a/src/construct/list.rs b/src/construct/list.rs
index 9cd3b62..db8af36 100644
--- a/src/construct/list.rs
+++ b/src/construct/list.rs
@@ -432,8 +432,7 @@ pub fn resolve_list_item(tokenizer: &mut Tokenizer) -> Vec<Event> {
if previous.0 == current.0 && previous.1 == current.1 && before == current.2 {
let previous_mut = &mut lists_wip[list_index];
previous_mut.3 = current.3;
- let mut remainder = lists_wip.drain((list_index + 1)..).collect::<Vec<_>>();
- lists.append(&mut remainder);
+ lists.append(&mut lists_wip.split_off(list_index + 1));
matched = true;
break;
}
@@ -457,8 +456,7 @@ pub fn resolve_list_item(tokenizer: &mut Tokenizer) -> Vec<Event> {
}
if let Some(exit) = exit {
- let mut remainder = lists_wip.drain(exit..).collect::<Vec<_>>();
- lists.append(&mut remainder);
+ lists.append(&mut lists_wip.split_off(exit));
}
lists_wip.push(current);
@@ -494,5 +492,5 @@ pub fn resolve_list_item(tokenizer: &mut Tokenizer) -> Vec<Event> {
index += 1;
}
- edit_map.consume(&mut tokenizer.events)
+ edit_map.consume(tokenizer.events.split_off(0))
}
diff --git a/src/construct/paragraph.rs b/src/construct/paragraph.rs
index 74dca87..53030f4 100644
--- a/src/construct/paragraph.rs
+++ b/src/construct/paragraph.rs
@@ -142,5 +142,5 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Vec<Event> {
index += 1;
}
- edit_map.consume(&mut tokenizer.events)
+ edit_map.consume(tokenizer.events.split_off(0))
}
diff --git a/src/construct/partial_data.rs b/src/construct/partial_data.rs
index 3bc8c1c..b59bb76 100644
--- a/src/construct/partial_data.rs
+++ b/src/construct/partial_data.rs
@@ -114,5 +114,5 @@ pub fn resolve_data(tokenizer: &mut Tokenizer) -> Vec<Event> {
index += 1;
}
- edit_map.consume(&mut tokenizer.events)
+ edit_map.consume(tokenizer.events.split_off(0))
}