diff options
author | Titus Wormer <tituswormer@gmail.com> | 2022-09-19 11:17:26 +0200 |
---|---|---|
committer | Titus Wormer <tituswormer@gmail.com> | 2022-09-19 11:17:26 +0200 |
commit | fe618ff6e38ec0ed4da72a3935fd9ea64ee1cef5 (patch) | |
tree | e1db142b47091ab2a3cb3ed09f52e4218d1355fa /src/util | |
parent | d4cc03c65a9657d22c59d50f9b3d38b483362560 (diff) | |
download | markdown-rs-fe618ff6e38ec0ed4da72a3935fd9ea64ee1cef5.tar.gz markdown-rs-fe618ff6e38ec0ed4da72a3935fd9ea64ee1cef5.tar.bz2 markdown-rs-fe618ff6e38ec0ed4da72a3935fd9ea64ee1cef5.zip |
Add support for parsing MDX ESM, expressions
This commit adds support for hooks that lets a user integrate another
parser with `micromark-rs`, to parse ESM and expressions according to
a particular grammar (such as a programming language, typically
JavaScript).
For an example integrating with SWC, see `tests/test_utils/mod.rs`.
The integration occurs with two functions passed in `options`:
`mdx_expression_parse` and `mdx_esm_parse`.
The can signal back to micromark when they are successful,
whether there is an error at the end (in which case micromark will
try to parse more), or whether there is a syntax error (in which case
micromark will crash).
Diffstat (limited to '')
-rw-r--r-- | src/util/mdx_collect.rs | 70 | ||||
-rw-r--r-- | src/util/mod.rs | 1 |
2 files changed, 71 insertions, 0 deletions
diff --git a/src/util/mdx_collect.rs b/src/util/mdx_collect.rs new file mode 100644 index 0000000..73ead51 --- /dev/null +++ b/src/util/mdx_collect.rs @@ -0,0 +1,70 @@ +//! Collect info for MDX. + +use crate::event::{Kind, Name, Point}; +use crate::tokenizer::Tokenizer; +use crate::util::slice::{Position, Slice}; +use alloc::{string::String, vec, vec::Vec}; + +pub type Location<'a> = (usize, &'a Point); + +pub struct Result<'a> { + pub start: &'a Point, + pub value: String, + pub locations: Vec<Location<'a>>, +} + +pub fn collect<'a>(tokenizer: &'a Tokenizer, from: usize, names: &[Name]) -> Result<'a> { + let mut result = Result { + start: &tokenizer.events[from].point, + value: String::new(), + locations: vec![], + }; + let mut index = from; + let mut acc = 0; + + while index < tokenizer.events.len() { + if tokenizer.events[index].kind == Kind::Enter + && names.contains(&tokenizer.events[index].name) + { + // Include virtual spaces. + let value = Slice::from_position( + tokenizer.parse_state.bytes, + &Position { + start: &tokenizer.events[index].point, + end: &tokenizer.events[index + 1].point, + }, + ) + .serialize(); + acc += value.len(); + result.locations.push((acc, &tokenizer.events[index].point)); + result.value.push_str(&value); + } + + index += 1; + } + + result +} + +// Turn an index of `result.value` into a point in the whole document. +pub fn place_to_point(result: &Result, place: usize) -> Point { + let mut index = 0; + let mut point = result.start; + let mut rest = place; + + while index < result.locations.len() { + point = result.locations[index].1; + + if result.locations[index].0 > place { + break; + } + + rest = place - result.locations[index].0; + index += 1; + } + + let mut point = point.clone(); + point.column += rest; + point.index += rest; + point +} diff --git a/src/util/mod.rs b/src/util/mod.rs index 2ea372c..6281356 100644 --- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -6,6 +6,7 @@ pub mod decode_character_reference; pub mod edit_map; pub mod encode; pub mod gfm_tagfilter; +pub mod mdx_collect; pub mod normalize_identifier; pub mod sanitize_uri; pub mod skip; |