diff options
author | Titus Wormer <tituswormer@gmail.com> | 2022-06-24 17:57:10 +0200 |
---|---|---|
committer | Titus Wormer <tituswormer@gmail.com> | 2022-06-24 17:57:10 +0200 |
commit | a3dd207e3b1ebcbcb6cec0f703a695e51ae4ece0 (patch) | |
tree | 7b4bf040da23a03f38efe92a252e187a630a14f6 /src/content/flow.rs | |
parent | e7b3761c8cd6f0f902dd9927e4fbf2589465ed57 (diff) | |
download | markdown-rs-a3dd207e3b1ebcbcb6cec0f703a695e51ae4ece0.tar.gz markdown-rs-a3dd207e3b1ebcbcb6cec0f703a695e51ae4ece0.tar.bz2 markdown-rs-a3dd207e3b1ebcbcb6cec0f703a695e51ae4ece0.zip |
Add link, images (resource)
This is still some messy code that needs cleaning up, but it adds support for
links and images, of the resource kind (`[a](b)`).
References (`[a][b]`) are parsed and will soon be supported, but need matching.
* Fix bug to pad percent-encoded bytes when normalizing urls
* Fix bug with escapes counting as balancing in destination
* Add `space_or_tab_one_line_ending`, to parse whitespace including up to
one line ending (but not a blank line)
* Add `ParserState` to share codes, definitions, etc
Diffstat (limited to '')
-rw-r--r-- | src/content/flow.rs | 20 |
1 files changed, 14 insertions, 6 deletions
diff --git a/src/content/flow.rs b/src/content/flow.rs index e71d25a..546712f 100644 --- a/src/content/flow.rs +++ b/src/content/flow.rs @@ -26,6 +26,7 @@ use crate::construct::{ html_flow::start as html_flow, paragraph::start as paragraph, thematic_break::start as thematic_break, }; +use crate::parser::ParseState; use crate::subtokenize::subtokenize; use crate::tokenizer::{Code, Event, EventType, Point, State, StateFnResult, TokenType, Tokenizer}; use crate::util::{ @@ -34,9 +35,10 @@ use crate::util::{ }; /// Turn `codes` as the flow content type into events. -pub fn flow(codes: &[Code], point: Point, index: usize) -> Vec<Event> { - let mut tokenizer = Tokenizer::new(point, index); - tokenizer.feed(codes, Box::new(start), true); +pub fn flow(parse_state: &ParseState, point: Point, index: usize) -> Vec<Event> { + let mut tokenizer = Tokenizer::new(point, index, parse_state); + + tokenizer.push(&parse_state.codes, Box::new(start), true); let mut index = 0; @@ -47,9 +49,14 @@ pub fn flow(codes: &[Code], point: Point, index: usize) -> Vec<Event> { && event.token_type == TokenType::DefinitionLabelString { let id = normalize_identifier( - serialize(codes, &from_exit_event(&tokenizer.events, index), false).as_str(), + serialize( + &parse_state.codes, + &from_exit_event(&tokenizer.events, index), + false, + ) + .as_str(), ); - println!("to do: use identifier {:?}", id); + println!("to do: use definition identifier {:?}", id); } index += 1; @@ -58,8 +65,9 @@ pub fn flow(codes: &[Code], point: Point, index: usize) -> Vec<Event> { let mut result = (tokenizer.events, false); while !result.1 { - result = subtokenize(result.0, codes); + result = subtokenize(result.0, parse_state); } + result.0 } |