aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/heading_atx.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-07-22 18:46:33 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-07-22 18:57:19 +0200
commitbac358ee5c341729e50630f2569a69b4d580ce47 (patch)
tree9ea5f311dcad46e54dfaa55a2985c75925ff6c83 /src/construct/heading_atx.rs
parent0525454e33ed6bcd7b43da1c0969c1d592e743d9 (diff)
downloadmarkdown-rs-bac358ee5c341729e50630f2569a69b4d580ce47.tar.gz
markdown-rs-bac358ee5c341729e50630f2569a69b4d580ce47.tar.bz2
markdown-rs-bac358ee5c341729e50630f2569a69b4d580ce47.zip
Refactor to use a single shared edit map
Diffstat (limited to 'src/construct/heading_atx.rs')
-rw-r--r--src/construct/heading_atx.rs12
1 files changed, 4 insertions, 8 deletions
diff --git a/src/construct/heading_atx.rs b/src/construct/heading_atx.rs
index 52eca54..1eabb56 100644
--- a/src/construct/heading_atx.rs
+++ b/src/construct/heading_atx.rs
@@ -58,7 +58,6 @@ use super::partial_space_or_tab::{space_or_tab, space_or_tab_min_max};
use crate::constant::{HEADING_ATX_OPENING_FENCE_SIZE_MAX, TAB_SIZE};
use crate::token::Token;
use crate::tokenizer::{Code, ContentType, Event, EventType, State, Tokenizer};
-use crate::util::edit_map::EditMap;
/// Start of a heading (atx).
///
@@ -190,7 +189,7 @@ fn data(tokenizer: &mut Tokenizer, code: Code) -> State {
}
/// Resolve heading (atx).
-pub fn resolve(tokenizer: &mut Tokenizer, map: &mut EditMap) -> bool {
+pub fn resolve(tokenizer: &mut Tokenizer) {
let mut index = 0;
let mut heading_start: Option<usize> = None;
let mut data_start: Option<usize> = None;
@@ -206,7 +205,7 @@ pub fn resolve(tokenizer: &mut Tokenizer, map: &mut EditMap) -> bool {
// If `start` is some, `end` is too.
let end = data_end.unwrap();
- map.add(
+ tokenizer.map.add(
start,
0,
vec![Event {
@@ -218,9 +217,9 @@ pub fn resolve(tokenizer: &mut Tokenizer, map: &mut EditMap) -> bool {
);
// Remove everything between the start and the end.
- map.add(start + 1, end - start - 1, vec![]);
+ tokenizer.map.add(start + 1, end - start - 1, vec![]);
- map.add(
+ tokenizer.map.add(
end + 1,
0,
vec![Event {
@@ -247,7 +246,4 @@ pub fn resolve(tokenizer: &mut Tokenizer, map: &mut EditMap) -> bool {
index += 1;
}
-
- // This resolver improves events, but is not needed by other resolvers.
- false
}