diff options
| author | 2022-07-07 17:21:38 +0200 | |
|---|---|---|
| committer | 2022-07-07 17:36:35 +0200 | |
| commit | 4806864e5377a5fef937b3fa02542e620c547969 (patch) | |
| tree | c91ae2bbd1dc2037f425efd24d62d05e706e3e60 /src/content/flow.rs | |
| parent | c2b4402223e53498078fc33dd55aabc0a48cdb56 (diff) | |
| download | markdown-rs-4806864e5377a5fef937b3fa02542e620c547969.tar.gz markdown-rs-4806864e5377a5fef937b3fa02542e620c547969.tar.bz2 markdown-rs-4806864e5377a5fef937b3fa02542e620c547969.zip | |
Add basic support for block quotes
Diffstat (limited to '')
| -rw-r--r-- | src/content/flow.rs | 49 | 
1 files changed, 2 insertions, 47 deletions
| diff --git a/src/content/flow.rs b/src/content/flow.rs index 74c6a62..f406685 100644 --- a/src/content/flow.rs +++ b/src/content/flow.rs @@ -26,52 +26,7 @@ use crate::construct::{      html_flow::start as html_flow, paragraph::start as paragraph,      thematic_break::start as thematic_break,  }; -use crate::parser::ParseState; -use crate::subtokenize::subtokenize; -use crate::tokenizer::{Code, Event, EventType, Point, State, StateFnResult, TokenType, Tokenizer}; -use crate::util::{ -    normalize_identifier::normalize_identifier, -    span::{from_exit_event, serialize}, -}; -use std::collections::HashSet; - -/// Turn `codes` as the flow content type into events. -pub fn flow(parse_state: &mut ParseState, point: Point, index: usize) -> Vec<Event> { -    let mut tokenizer = Tokenizer::new(point, index, parse_state); -    tokenizer.push(&parse_state.codes, Box::new(start), true); -    let mut next_definitions: HashSet<String> = HashSet::new(); - -    let mut index = 0; - -    while index < tokenizer.events.len() { -        let event = &tokenizer.events[index]; - -        if event.event_type == EventType::Exit -            && event.token_type == TokenType::DefinitionLabelString -        { -            next_definitions.insert(normalize_identifier( -                serialize( -                    &parse_state.codes, -                    &from_exit_event(&tokenizer.events, index), -                    false, -                ) -                .as_str(), -            )); -        } - -        index += 1; -    } - -    let mut result = (tokenizer.events, false); - -    parse_state.definitions = next_definitions; - -    while !result.1 { -        result = subtokenize(result.0, parse_state); -    } - -    result.0 -} +use crate::tokenizer::{Code, State, StateFnResult, TokenType, Tokenizer};  /// Before flow.  /// @@ -83,7 +38,7 @@ pub fn flow(parse_state: &mut ParseState, point: Point, index: usize) -> Vec<Eve  /// |    bravo  /// |***  /// ``` -fn start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult { +pub fn start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {      match code {          Code::None => (State::Ok, None),          _ => tokenizer.attempt(blank_line, |ok| { | 
