aboutsummaryrefslogtreecommitdiffstats
path: root/src/subtokenize.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-11 14:53:42 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-11 14:53:42 +0200
commitd87dc75500a2e73d9521135054b42c18e6eae987 (patch)
tree4bc3f4a72894db3ec5ed4069841c6be5be69713d /src/subtokenize.rs
parent6eb2f644057f371841fe25330a57ee185f91c7af (diff)
downloadmarkdown-rs-d87dc75500a2e73d9521135054b42c18e6eae987.tar.gz
markdown-rs-d87dc75500a2e73d9521135054b42c18e6eae987.tar.bz2
markdown-rs-d87dc75500a2e73d9521135054b42c18e6eae987.zip
Refactor to move some code to `event.rs`
Diffstat (limited to 'src/subtokenize.rs')
-rw-r--r--src/subtokenize.rs23
1 files changed, 12 insertions, 11 deletions
diff --git a/src/subtokenize.rs b/src/subtokenize.rs
index c545043..9b7c6ae 100644
--- a/src/subtokenize.rs
+++ b/src/subtokenize.rs
@@ -21,9 +21,10 @@
//! thus the whole document needs to be parsed up to the level of definitions,
//! before any level that can include references can be parsed.
+use crate::event::{Content, Event, Kind};
use crate::parser::ParseState;
-use crate::state::{Name, State};
-use crate::tokenizer::{ContentType, Event, EventType, Tokenizer};
+use crate::state::{Name as StateName, State};
+use crate::tokenizer::Tokenizer;
use crate::util::edit_map::EditMap;
/// Create a link between two [`Event`][]s.
@@ -37,10 +38,10 @@ pub fn link(events: &mut [Event], index: usize) {
/// Link two arbitrary [`Event`][]s together.
pub fn link_to(events: &mut [Event], pevious: usize, next: usize) {
- debug_assert_eq!(events[pevious].event_type, EventType::Enter);
- debug_assert_eq!(events[pevious + 1].event_type, EventType::Exit);
- debug_assert_eq!(events[pevious + 1].token_type, events[pevious].token_type);
- debug_assert_eq!(events[next].event_type, EventType::Enter);
+ debug_assert_eq!(events[pevious].kind, Kind::Enter);
+ debug_assert_eq!(events[pevious + 1].kind, Kind::Exit);
+ debug_assert_eq!(events[pevious + 1].name, events[pevious].name);
+ debug_assert_eq!(events[next].kind, Kind::Enter);
// Note: the exit of this event may not exist, so don’t check for that.
let link_previous = events[pevious]
@@ -70,7 +71,7 @@ pub fn subtokenize(events: &mut Vec<Event>, parse_state: &ParseState) -> bool {
// Find each first opening chunk.
if let Some(ref link) = event.link {
- debug_assert_eq!(event.event_type, EventType::Enter);
+ debug_assert_eq!(event.kind, Kind::Enter);
// No need to enter linked events again.
if link.previous == None {
@@ -79,17 +80,17 @@ pub fn subtokenize(events: &mut Vec<Event>, parse_state: &ParseState) -> bool {
// Subtokenizer.
let mut tokenizer = Tokenizer::new(event.point.clone(), parse_state);
// Substate.
- let mut state = State::Next(if link.content_type == ContentType::String {
- Name::StringStart
+ let mut state = State::Next(if link.content_type == Content::String {
+ StateName::StringStart
} else {
- Name::TextStart
+ StateName::TextStart
});
// Loop through links to pass them in order to the subtokenizer.
while let Some(index) = link_index {
let enter = &events[index];
let link_curr = enter.link.as_ref().expect("expected link");
- debug_assert_eq!(enter.event_type, EventType::Enter);
+ debug_assert_eq!(enter.kind, Kind::Enter);
if link_curr.previous != None {
tokenizer.define_skip(enter.point.clone());