1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
|
//! The `content`, ahum, content type.
//!
//! **Content** is zero or more definitions, and then zero or one paragraph.
//! It’s a weird one, and needed to make certain edge cases around definitions
//! spec compliant.
//! Definitions are unlike other things in markdown, in that they behave like
//! **text** in that they can contain arbitrary line endings, but *have* to end
//! at a line ending.
//! If they end in something else, the whole definition instead is seen as a
//! paragraph.
//!
//! The constructs found in content are:
//!
//! * Definition
//! * Paragraph
use crate::tokenizer::{Code, State, StateFnResult, TokenType, Tokenizer};
/// Before content.
///
/// ```markdown
/// |[x]: y
/// |asd
/// ```
pub fn start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None | Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
unreachable!("expected non-eol/eof");
}
_ => paragraph_initial(tokenizer, code)
// To do: definition.
// _ => tokenizer.attempt(definition, |ok| {
// Box::new(if ok {
// a
// } else {
// b
// })
// })(tokenizer, code),
}
}
/// Before a paragraph.
///
/// ```markdown
/// |asd
/// ```
fn paragraph_initial(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None | Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
unreachable!("expected non-eol/eof");
}
_ => {
tokenizer.enter(TokenType::Paragraph);
tokenizer.enter(TokenType::ChunkText);
data(tokenizer, code)
}
}
}
/// In a line in a paragraph.
///
/// ```markdown
/// |\&
/// |qwe
/// ```
fn data(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None => {
tokenizer.exit(TokenType::ChunkText);
tokenizer.exit(TokenType::Paragraph);
(State::Ok, None)
}
Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
tokenizer.consume(code);
tokenizer.exit(TokenType::ChunkText);
tokenizer.enter(TokenType::ChunkText);
(State::Fn(Box::new(data)), None)
}
_ => {
tokenizer.consume(code);
(State::Fn(Box::new(data)), None)
}
}
}
|