aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/partial_whitespace.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-01 11:27:39 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-01 11:27:39 +0200
commit1bb160f9dc45c3cdbe929e8965be69bcf8415d0c (patch)
tree4e3dfd7795a15082ed5218d25f852be80f3fd89c /src/construct/partial_whitespace.rs
parente97ad954e1468b90722cf91996d7dfc069fedf78 (diff)
downloadmarkdown-rs-1bb160f9dc45c3cdbe929e8965be69bcf8415d0c.tar.gz
markdown-rs-1bb160f9dc45c3cdbe929e8965be69bcf8415d0c.tar.bz2
markdown-rs-1bb160f9dc45c3cdbe929e8965be69bcf8415d0c.zip
Add missing docs, refactor some code
Diffstat (limited to '')
-rw-r--r--src/construct/partial_whitespace.rs16
1 files changed, 9 insertions, 7 deletions
diff --git a/src/construct/partial_whitespace.rs b/src/construct/partial_whitespace.rs
index 4f872ba..bf3bd4d 100644
--- a/src/construct/partial_whitespace.rs
+++ b/src/construct/partial_whitespace.rs
@@ -47,15 +47,18 @@
use crate::constant::HARD_BREAK_PREFIX_SIZE_MIN;
use crate::token::Token;
-use crate::tokenizer::{Event, EventType, Tokenizer};
+use crate::tokenizer::{Event, EventType, Resolver, Tokenizer};
use crate::util::slice::{Position, Slice};
-/// To do.
-pub fn create_resolve_whitespace(hard_break: bool, trim_whole: bool) -> impl Fn(&mut Tokenizer) {
- move |t| resolve_whitespace(t, hard_break, trim_whole)
+/// Create a resolver to handle trailing whitespace in events.
+///
+/// Performing this as a resolver instead of a tokenizer improves performance
+/// *a lot*.
+pub fn create_resolve_whitespace(hard_break: bool, trim_whole: bool) -> Box<Resolver> {
+ Box::new(move |t| resolve_whitespace(t, hard_break, trim_whole))
}
-/// To do.
+/// Resolve whitespace.
pub fn resolve_whitespace(tokenizer: &mut Tokenizer, hard_break: bool, trim_whole: bool) {
let mut index = 0;
@@ -76,8 +79,7 @@ pub fn resolve_whitespace(tokenizer: &mut Tokenizer, hard_break: bool, trim_whol
}
}
-/// To do.
-#[allow(clippy::too_many_lines)]
+/// Trim a [`Data`][Token::Data] token.
fn trim_data(
tokenizer: &mut Tokenizer,
exit_index: usize,