aboutsummaryrefslogtreecommitdiffstats
path: root/src/content
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-06-29 10:26:39 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-06-29 10:26:39 +0200
commit7721f210c16e19b1c2af90f69130386b89bb5104 (patch)
treec47ff3d9c974ccd0c81a2c5b8ccbce9f7635975d /src/content
parent7bb1008f508f61b51dd80086a91ada347be36c68 (diff)
downloadmarkdown-rs-7721f210c16e19b1c2af90f69130386b89bb5104.tar.gz
markdown-rs-7721f210c16e19b1c2af90f69130386b89bb5104.tar.bz2
markdown-rs-7721f210c16e19b1c2af90f69130386b89bb5104.zip
Add support for sharing identifiers, references before definitions
Diffstat (limited to '')
-rw-r--r--src/content/flow.rs12
1 files changed, 7 insertions, 5 deletions
diff --git a/src/content/flow.rs b/src/content/flow.rs
index 546712f..0d3ede0 100644
--- a/src/content/flow.rs
+++ b/src/content/flow.rs
@@ -33,12 +33,13 @@ use crate::util::{
normalize_identifier::normalize_identifier,
span::{from_exit_event, serialize},
};
+use std::collections::HashSet;
/// Turn `codes` as the flow content type into events.
-pub fn flow(parse_state: &ParseState, point: Point, index: usize) -> Vec<Event> {
+pub fn flow(parse_state: &mut ParseState, point: Point, index: usize) -> Vec<Event> {
let mut tokenizer = Tokenizer::new(point, index, parse_state);
-
tokenizer.push(&parse_state.codes, Box::new(start), true);
+ let mut next_definitions: HashSet<String> = HashSet::new();
let mut index = 0;
@@ -48,15 +49,14 @@ pub fn flow(parse_state: &ParseState, point: Point, index: usize) -> Vec<Event>
if event.event_type == EventType::Exit
&& event.token_type == TokenType::DefinitionLabelString
{
- let id = normalize_identifier(
+ next_definitions.insert(normalize_identifier(
serialize(
&parse_state.codes,
&from_exit_event(&tokenizer.events, index),
false,
)
.as_str(),
- );
- println!("to do: use definition identifier {:?}", id);
+ ));
}
index += 1;
@@ -64,6 +64,8 @@ pub fn flow(parse_state: &ParseState, point: Point, index: usize) -> Vec<Event>
let mut result = (tokenizer.events, false);
+ parse_state.definitions = next_definitions;
+
while !result.1 {
result = subtokenize(result.0, parse_state);
}