1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
|
//! The string content type.
//!
//! **String** is a limited [text][] like content type which only allows
//! character escapes and character references.
//! It exists in things such as identifiers (media references, definitions),
//! titles, URLs, code (fenced) info and meta parts.
//!
//! The constructs found in string are:
//!
//! * [Character escape][crate::construct::character_escape]
//! * [Character reference][crate::construct::character_reference]
//!
//! [text]: crate::construct::text
use crate::construct::partial_whitespace::resolve_whitespace;
use crate::resolve::Name as ResolveName;
use crate::state::{Name as StateName, State};
use crate::subtokenize::Subresult;
use crate::tokenizer::Tokenizer;
use alloc::string::String;
/// Characters that can start something in string.
const MARKERS: [u8; 2] = [b'&', b'\\'];
/// Start of string.
///
/// ````markdown
/// > | ```js
/// ^
/// ````
pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.tokenize_state.markers = &MARKERS;
State::Retry(StateName::StringBefore)
}
/// Before string.
///
/// ````markdown
/// > | ```js
/// ^
/// ````
pub fn before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None => {
tokenizer.register_resolver(ResolveName::Data);
tokenizer.register_resolver(ResolveName::String);
State::Ok
}
Some(b'&') => {
tokenizer.attempt(
State::Next(StateName::StringBefore),
State::Next(StateName::StringBeforeData),
);
State::Retry(StateName::CharacterReferenceStart)
}
Some(b'\\') => {
tokenizer.attempt(
State::Next(StateName::StringBefore),
State::Next(StateName::StringBeforeData),
);
State::Retry(StateName::CharacterEscapeStart)
}
_ => State::Retry(StateName::StringBeforeData),
}
}
/// At data.
///
/// ````markdown
/// > | ```js
/// ^
/// ````
pub fn before_data(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(State::Next(StateName::StringBefore), State::Nok);
State::Retry(StateName::DataStart)
}
/// Resolve whitespace in string.
pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> {
resolve_whitespace(tokenizer, false, false);
Ok(None)
}
|