aboutsummaryrefslogtreecommitdiffstats
path: root/src/parser.rs
blob: c69eb3867378dd966352465699fce1e1394d81a3 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
//! Turn bytes of markdown into events.

use crate::event::{Event, Point};
use crate::state::{Name as StateName, State};
use crate::subtokenize::subtokenize;
use crate::tokenizer::Tokenizer;
use crate::Options;
use alloc::{string::String, vec, vec::Vec};

/// Info needed, in all content types, when parsing markdown.
///
/// Importantly, this contains a set of known definitions.
/// It also references the input value as bytes (`u8`).
#[derive(Debug)]
pub struct ParseState<'a> {
    /// Configuration.
    pub options: &'a Options,
    /// List of chars.
    pub bytes: &'a [u8],
    /// Set of defined definition identifiers.
    pub definitions: Vec<String>,
    /// Set of defined GFM footnote definition identifiers.
    pub gfm_footnote_definitions: Vec<String>,
}

/// Turn a string of markdown into events.
///
/// Passes the bytes back so the compiler can access the source.
pub fn parse<'a>(value: &'a str, options: &'a Options) -> Result<(Vec<Event>, &'a [u8]), String> {
    let mut parse_state = ParseState {
        options,
        bytes: value.as_bytes(),
        definitions: vec![],
        gfm_footnote_definitions: vec![],
    };

    let mut tokenizer = Tokenizer::new(
        Point {
            line: 1,
            column: 1,
            index: 0,
            vs: 0,
        },
        &parse_state,
    );

    let state = tokenizer.push(
        (0, 0),
        (parse_state.bytes.len(), 0),
        State::Next(StateName::DocumentStart),
    );
    let mut result = tokenizer.flush(state, true)?;
    let mut events = tokenizer.events;

    parse_state
        .gfm_footnote_definitions
        .append(&mut result.gfm_footnote_definitions);
    parse_state.definitions.append(&mut result.definitions);

    loop {
        let mut result = subtokenize(&mut events, &parse_state, &None)?;
        parse_state
            .gfm_footnote_definitions
            .append(&mut result.gfm_footnote_definitions);
        parse_state.definitions.append(&mut result.definitions);

        if result.done {
            break;
        }
    }

    Ok((events, parse_state.bytes))
}