aboutsummaryrefslogtreecommitdiffstats
path: root/src/content/document.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-11 17:00:07 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-11 17:00:07 +0200
commit90969231bfcdfcd09bae646abba17d832b633376 (patch)
treeb85321bc25c1888a3ee7adc7420ed3ce8f188cb0 /src/content/document.rs
parenta4dc19af6e82757af87c6658d7b1771a9694b83d (diff)
downloadmarkdown-rs-90969231bfcdfcd09bae646abba17d832b633376.tar.gz
markdown-rs-90969231bfcdfcd09bae646abba17d832b633376.tar.bz2
markdown-rs-90969231bfcdfcd09bae646abba17d832b633376.zip
Refactor to handle definitions when parsing
Diffstat (limited to '')
-rw-r--r--src/content/document.rs37
1 files changed, 7 insertions, 30 deletions
diff --git a/src/content/document.rs b/src/content/document.rs
index b990ba5..f2890f3 100644
--- a/src/content/document.rs
+++ b/src/content/document.rs
@@ -13,11 +13,7 @@ use crate::parser::ParseState;
use crate::state::{Name as StateName, State};
use crate::subtokenize::{divide_events, subtokenize};
use crate::tokenizer::{Container, ContainerState, Tokenizer};
-use crate::util::{
- normalize_identifier::normalize_identifier,
- skip,
- slice::{Position, Slice},
-};
+use crate::util::skip;
/// Phases where we can exit containers.
#[derive(Debug, PartialEq)]
@@ -61,33 +57,9 @@ pub fn document(parse_state: &mut ParseState, point: Point) -> Vec<Event> {
);
tokenizer.flush(state, true);
- let mut index = 0;
- let mut definitions = vec![];
-
- while index < tokenizer.events.len() {
- let event = &tokenizer.events[index];
-
- if event.kind == Kind::Exit && event.name == Name::DefinitionLabelString {
- // Note: we don’t care about virtual spaces, so `as_str` is fine.
- let id = normalize_identifier(
- Slice::from_position(
- tokenizer.parse_state.bytes,
- &Position::from_exit_event(&tokenizer.events, index),
- )
- .as_str(),
- );
-
- if !definitions.contains(&id) {
- definitions.push(id);
- }
- }
-
- index += 1;
- }
-
let mut events = tokenizer.events;
- parse_state.definitions = definitions;
+ parse_state.definitions = tokenizer.tokenize_state.definitions;
while !subtokenize(&mut events, parse_state) {}
@@ -531,4 +503,9 @@ fn resolve(tokenizer: &mut Tokenizer) {
tokenizer
.resolvers
.append(&mut child.resolvers.split_off(0));
+
+ tokenizer
+ .tokenize_state
+ .definitions
+ .append(&mut child.tokenize_state.definitions.split_off(0));
}