aboutsummaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-15 11:05:22 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-15 11:05:22 +0200
commit3a4de4b3a89e7171f8e3e0b6bf02fcd2d5c5f748 (patch)
tree6aec470579ec3021b158ff7aa1e784ecacedb1ad /src
parent30dccab8678bd134e084afdafbf3e14d047f0965 (diff)
downloadmarkdown-rs-3a4de4b3a89e7171f8e3e0b6bf02fcd2d5c5f748.tar.gz
markdown-rs-3a4de4b3a89e7171f8e3e0b6bf02fcd2d5c5f748.tar.bz2
markdown-rs-3a4de4b3a89e7171f8e3e0b6bf02fcd2d5c5f748.zip
Refactor paragraph, data resolvers
Diffstat (limited to 'src')
-rw-r--r--src/construct/paragraph.rs65
-rw-r--r--src/construct/partial_data.rs29
2 files changed, 50 insertions, 44 deletions
diff --git a/src/construct/paragraph.rs b/src/construct/paragraph.rs
index e9fd377..b33346a 100644
--- a/src/construct/paragraph.rs
+++ b/src/construct/paragraph.rs
@@ -36,7 +36,6 @@ use crate::event::{Content, Kind, Link, Name};
use crate::resolve::Name as ResolveName;
use crate::state::{Name as StateName, State};
use crate::tokenizer::Tokenizer;
-use crate::util::skip::opt as skip_opt;
/// Before paragraph.
///
@@ -96,47 +95,57 @@ pub fn resolve(tokenizer: &mut Tokenizer) {
if event.kind == Kind::Enter && event.name == Name::Paragraph {
// Exit:Paragraph
let mut exit_index = index + 3;
- let mut enter_next_index =
- skip_opt(&tokenizer.events, exit_index + 1, &[Name::LineEnding]);
- // Enter:Paragraph
- enter_next_index = skip_opt(
- &tokenizer.events,
- enter_next_index,
- &[Name::SpaceOrTab, Name::BlockQuotePrefix],
- );
- // Find future `Paragraphs`.
- while enter_next_index < tokenizer.events.len()
- && tokenizer.events[enter_next_index].name == Name::Paragraph
- {
- // Remove Exit:Paragraph, Enter:LineEnding, Exit:LineEnding, Enter:Paragraph.
+ loop {
+ let mut enter_index = exit_index + 1;
+
+ if enter_index == tokenizer.events.len()
+ || tokenizer.events[enter_index].name != Name::LineEnding
+ {
+ break;
+ }
+
+ enter_index += 2;
+
+ while enter_index < tokenizer.events.len() {
+ let event = &tokenizer.events[enter_index];
+
+ if event.name != Name::SpaceOrTab
+ && event.name != Name::BlockQuotePrefix
+ && event.name != Name::BlockQuoteMarker
+ {
+ break;
+ }
+
+ enter_index += 1;
+ }
+
+ if enter_index == tokenizer.events.len()
+ || tokenizer.events[enter_index].name != Name::Paragraph
+ {
+ break;
+ }
+
+ // Remove Exit:Paragraph, Enter:LineEnding, Exit:LineEnding.
tokenizer.map.add(exit_index, 3, vec![]);
// Remove Enter:Paragraph.
- tokenizer.map.add(enter_next_index, 1, vec![]);
+ tokenizer.map.add(enter_index, 1, vec![]);
// Add Exit:LineEnding position info to Exit:Data.
- let line_ending_exit = &tokenizer.events[exit_index + 2];
- let line_ending_point = line_ending_exit.point.clone();
- let data_exit = &mut tokenizer.events[exit_index - 1];
- data_exit.point = line_ending_point;
+ tokenizer.events[exit_index - 1].point =
+ tokenizer.events[exit_index + 2].point.clone();
// Link Enter:Data on the previous line to Enter:Data on this line.
if let Some(link) = &mut tokenizer.events[exit_index - 2].link {
- link.next = Some(enter_next_index + 1);
+ link.next = Some(enter_index + 1);
}
- if let Some(link) = &mut tokenizer.events[enter_next_index + 1].link {
+ if let Some(link) = &mut tokenizer.events[enter_index + 1].link {
link.previous = Some(exit_index - 2);
}
// Potential next start.
- exit_index = enter_next_index + 3;
- enter_next_index = skip_opt(&tokenizer.events, exit_index + 1, &[Name::LineEnding]);
- enter_next_index = skip_opt(
- &tokenizer.events,
- enter_next_index,
- &[Name::SpaceOrTab, Name::BlockQuotePrefix],
- );
+ exit_index = enter_index + 3;
}
// Move to `Exit:Paragraph`.
diff --git a/src/construct/partial_data.rs b/src/construct/partial_data.rs
index 8c8ecbb..86e48c7 100644
--- a/src/construct/partial_data.rs
+++ b/src/construct/partial_data.rs
@@ -81,27 +81,24 @@ pub fn resolve(tokenizer: &mut Tokenizer) {
let event = &tokenizer.events[index];
if event.kind == Kind::Enter && event.name == Name::Data {
- let exit_index = index + 1;
- let mut exit_far_index = exit_index;
+ // Move to exit.
+ index += 1;
- // Find multiple `data` events.
- while exit_far_index + 1 < tokenizer.events.len()
- && tokenizer.events[exit_far_index + 1].name == Name::Data
+ let mut exit_index = index;
+
+ // Find the farthest `data` event exit event.
+ while exit_index + 1 < tokenizer.events.len()
+ && tokenizer.events[exit_index + 1].name == Name::Data
{
- exit_far_index += 2;
+ exit_index += 2;
}
- if exit_far_index > exit_index {
- tokenizer
- .map
- .add(exit_index, exit_far_index - exit_index, vec![]);
-
+ if exit_index > index {
+ tokenizer.map.add(index, exit_index - index, vec![]);
// Change positional info.
- let exit_far = &tokenizer.events[exit_far_index];
- tokenizer.events[exit_index].point = exit_far.point.clone();
- index = exit_far_index;
-
- continue;
+ tokenizer.events[index].point = tokenizer.events[exit_index].point.clone();
+ // Move to the end.
+ index = exit_index;
}
}