From 0f20660cb95abd4f407bdafa2c45e01829fa971f Mon Sep 17 00:00:00 2001 From: Titus Wormer Date: Tue, 19 Jul 2022 17:56:57 +0200 Subject: Refactor to remove cloning in `edit_map` --- src/construct/attention.rs | 2 +- src/construct/heading_atx.rs | 2 +- src/construct/heading_setext.rs | 2 +- src/construct/label_end.rs | 17 +++++++---------- src/construct/list.rs | 8 +++----- src/construct/paragraph.rs | 2 +- src/construct/partial_data.rs | 2 +- 7 files changed, 15 insertions(+), 20 deletions(-) (limited to 'src/construct') diff --git a/src/construct/attention.rs b/src/construct/attention.rs index 3e15f9a..7e99600 100644 --- a/src/construct/attention.rs +++ b/src/construct/attention.rs @@ -523,7 +523,7 @@ fn resolve_attention(tokenizer: &mut Tokenizer) -> Vec { index += 1; } - edit_map.consume(&mut tokenizer.events) + edit_map.consume(tokenizer.events.split_off(0)) } /// Classify whether a character code represents whitespace, punctuation, or diff --git a/src/construct/heading_atx.rs b/src/construct/heading_atx.rs index 68a6be7..4546924 100644 --- a/src/construct/heading_atx.rs +++ b/src/construct/heading_atx.rs @@ -258,5 +258,5 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Vec { index += 1; } - edit_map.consume(&mut tokenizer.events) + edit_map.consume(tokenizer.events.split_off(0)) } diff --git a/src/construct/heading_setext.rs b/src/construct/heading_setext.rs index 7cd259b..841bf53 100644 --- a/src/construct/heading_setext.rs +++ b/src/construct/heading_setext.rs @@ -236,5 +236,5 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Vec { index += 1; } - edit_map.consume(&mut tokenizer.events) + edit_map.consume(tokenizer.events.split_off(0)) } diff --git a/src/construct/label_end.rs b/src/construct/label_end.rs index 2ac2500..2124681 100644 --- a/src/construct/label_end.rs +++ b/src/construct/label_end.rs @@ -154,9 +154,7 @@ use crate::construct::{ partial_title::{start as title, Options as TitleOptions}, }; use crate::token::Token; -use crate::tokenizer::{ - Code, Event, EventType, LabelStart, Media, State, StateFnResult, Tokenizer, -}; +use crate::tokenizer::{Code, Event, EventType, Media, State, StateFnResult, Tokenizer}; use crate::util::{ edit_map::EditMap, normalize_identifier::normalize_identifier, @@ -325,10 +323,9 @@ fn reference_not_full(tokenizer: &mut Tokenizer, code: Code, info: Info) -> Stat /// ``` fn ok(tokenizer: &mut Tokenizer, code: Code, mut info: Info) -> StateFnResult { // Remove this one and everything after it. - let mut left: Vec = tokenizer + let mut left = tokenizer .label_start_stack - .drain(info.label_start_index..) - .collect(); + .split_off(info.label_start_index); // Remove this one from `left`, as we’ll move it to `media_list`. left.remove(0); tokenizer.label_start_list_loose.append(&mut left); @@ -616,9 +613,9 @@ fn collapsed_reference_open(tokenizer: &mut Tokenizer, code: Code) -> StateFnRes /// images, or turns them back into data. #[allow(clippy::too_many_lines)] pub fn resolve_media(tokenizer: &mut Tokenizer) -> Vec { - let mut left: Vec = tokenizer.label_start_list_loose.drain(..).collect(); - let mut left_2: Vec = tokenizer.label_start_stack.drain(..).collect(); - let media: Vec = tokenizer.media_list.drain(..).collect(); + let mut left = tokenizer.label_start_list_loose.split_off(0); + let mut left_2 = tokenizer.label_start_stack.split_off(0); + let media = tokenizer.media_list.split_off(0); left.append(&mut left_2); let mut edit_map = EditMap::new(); @@ -776,5 +773,5 @@ pub fn resolve_media(tokenizer: &mut Tokenizer) -> Vec { index += 1; } - edit_map.consume(&mut tokenizer.events) + edit_map.consume(tokenizer.events.split_off(0)) } diff --git a/src/construct/list.rs b/src/construct/list.rs index 9cd3b62..db8af36 100644 --- a/src/construct/list.rs +++ b/src/construct/list.rs @@ -432,8 +432,7 @@ pub fn resolve_list_item(tokenizer: &mut Tokenizer) -> Vec { if previous.0 == current.0 && previous.1 == current.1 && before == current.2 { let previous_mut = &mut lists_wip[list_index]; previous_mut.3 = current.3; - let mut remainder = lists_wip.drain((list_index + 1)..).collect::>(); - lists.append(&mut remainder); + lists.append(&mut lists_wip.split_off(list_index + 1)); matched = true; break; } @@ -457,8 +456,7 @@ pub fn resolve_list_item(tokenizer: &mut Tokenizer) -> Vec { } if let Some(exit) = exit { - let mut remainder = lists_wip.drain(exit..).collect::>(); - lists.append(&mut remainder); + lists.append(&mut lists_wip.split_off(exit)); } lists_wip.push(current); @@ -494,5 +492,5 @@ pub fn resolve_list_item(tokenizer: &mut Tokenizer) -> Vec { index += 1; } - edit_map.consume(&mut tokenizer.events) + edit_map.consume(tokenizer.events.split_off(0)) } diff --git a/src/construct/paragraph.rs b/src/construct/paragraph.rs index 74dca87..53030f4 100644 --- a/src/construct/paragraph.rs +++ b/src/construct/paragraph.rs @@ -142,5 +142,5 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Vec { index += 1; } - edit_map.consume(&mut tokenizer.events) + edit_map.consume(tokenizer.events.split_off(0)) } diff --git a/src/construct/partial_data.rs b/src/construct/partial_data.rs index 3bc8c1c..b59bb76 100644 --- a/src/construct/partial_data.rs +++ b/src/construct/partial_data.rs @@ -114,5 +114,5 @@ pub fn resolve_data(tokenizer: &mut Tokenizer) -> Vec { index += 1; } - edit_map.consume(&mut tokenizer.events) + edit_map.consume(tokenizer.events.split_off(0)) } -- cgit