1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
|
//! The string content type.
//!
//! **String** is a limited **text** like content type which only allows
//! character escapes and character references.
//! It exists in things such as identifiers (media references, definitions),
//! titles, URLs, code (fenced) info and meta parts.
//!
//! The constructs found in strin are:
//!
//! * [Character escape][crate::construct::character_escape]
//! * [Character reference][crate::construct::character_reference]
use crate::construct::{
character_escape::start as character_escape, character_reference::start as character_reference,
};
use crate::tokenizer::{Code, Event, State, StateFnResult, TokenType, Tokenizer};
/// Turn `codes` as the string content type into events.
// To do: remove this `allow` when all the content types are glued together.
#[allow(dead_code)]
pub fn string(codes: &[Code]) -> Vec<Event> {
let mut tokenizer = Tokenizer::new();
let (state, remainder) = tokenizer.feed(codes, Box::new(before), true);
if let Some(ref x) = remainder {
if !x.is_empty() {
unreachable!("expected no final remainder {:?}", x);
}
}
match state {
State::Ok => {}
_ => unreachable!("expected final state to be `State::Ok`"),
}
tokenizer.events
}
/// Before string.
///
/// First we assume character reference.
///
/// ```markdown
/// |&
/// |\&
/// |qwe
/// ```
fn before(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None => (State::Ok, None),
_ => tokenizer.attempt(character_reference, |ok| {
Box::new(if ok {
before
} else {
before_not_character_reference
})
})(tokenizer, code),
}
}
/// Before string, not at a character reference.
///
/// Assume character escape.
///
/// ```markdown
/// |\&
/// |qwe
/// ```
fn before_not_character_reference(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None => (State::Ok, None),
_ => tokenizer.attempt(character_escape, |ok| {
Box::new(if ok {
before
} else {
before_not_character_escape
})
})(tokenizer, code),
}
}
/// Before string, not at a character reference or character escape.
///
/// We’re at data.
///
/// ```markdown
/// |qwe
/// ```
fn before_not_character_escape(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
if let Code::None = code {
(State::Ok, None)
} else {
tokenizer.enter(TokenType::Data);
tokenizer.consume(code);
(State::Fn(Box::new(in_data)), None)
}
}
/// In data.
///
/// ```markdown
/// q|w|e
/// ```
fn in_data(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None => {
tokenizer.exit(TokenType::Data);
(State::Ok, None)
}
// To do: somehow get these markers from constructs.
Code::Char('&' | '\\') => {
tokenizer.exit(TokenType::Data);
before(tokenizer, code)
}
_ => {
tokenizer.consume(code);
(State::Fn(Box::new(in_data)), None)
}
}
}
|