diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/content/document.rs | 25 | ||||
-rw-r--r-- | src/parser.rs | 21 |
2 files changed, 20 insertions, 26 deletions
diff --git a/src/content/document.rs b/src/content/document.rs index f90aea7..9def6c5 100644 --- a/src/content/document.rs +++ b/src/content/document.rs @@ -8,10 +8,9 @@ //! * [Block quote][crate::construct::block_quote] //! * [List][crate::construct::list_item] -use crate::event::{Content, Event, Kind, Link, Name, Point}; -use crate::parser::ParseState; +use crate::event::{Content, Event, Kind, Link, Name}; use crate::state::{Name as StateName, State}; -use crate::subtokenize::{divide_events, subtokenize}; +use crate::subtokenize::divide_events; use crate::tokenizer::{Container, ContainerState, Tokenizer}; use crate::util::skip; @@ -46,26 +45,6 @@ enum Phase { Eof, } -/// Parse a document. -pub fn document(parse_state: &mut ParseState, point: Point) -> Vec<Event> { - let mut tokenizer = Tokenizer::new(point, parse_state); - - let state = tokenizer.push( - (0, 0), - (parse_state.bytes.len(), 0), - State::Next(StateName::DocumentStart), - ); - tokenizer.flush(state, true); - - let mut events = tokenizer.events; - - parse_state.definitions = tokenizer.tokenize_state.definitions; - - while !subtokenize(&mut events, parse_state) {} - - events -} - /// Start of document, at an optional BOM. /// /// ```markdown diff --git a/src/parser.rs b/src/parser.rs index a8416ed..cc93021 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -1,7 +1,9 @@ //! Turn a string of markdown into events. -use crate::content::document::document; use crate::event::{Event, Point}; +use crate::state::{Name as StateName, State}; +use crate::subtokenize::subtokenize; +use crate::tokenizer::Tokenizer; use crate::{Constructs, Options}; /// Info needed, in all content types, when parsing markdown. @@ -27,15 +29,28 @@ pub fn parse<'a>(value: &'a str, options: &'a Options) -> (Vec<Event>, &'a [u8]) definitions: vec![], }; - let events = document( - &mut parse_state, + let mut tokenizer = Tokenizer::new( Point { line: 1, column: 1, index: 0, vs: 0, }, + &parse_state, ); + let state = tokenizer.push( + (0, 0), + (parse_state.bytes.len(), 0), + State::Next(StateName::DocumentStart), + ); + tokenizer.flush(state, true); + + let mut events = tokenizer.events; + + parse_state.definitions = tokenizer.tokenize_state.definitions; + + while !subtokenize(&mut events, &parse_state) {} + (events, parse_state.bytes) } |