aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/partial_destination.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-06-21 18:06:47 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-06-21 18:06:47 +0200
commitd1063268e62633a334bb9464f7cba2385da5812d (patch)
tree76035cdf260046cd2afe1841c0351b311ea66d99 /src/construct/partial_destination.rs
parent56ff5c73c7ec19b349e7d60d04ce1057c006d6ec (diff)
downloadmarkdown-rs-d1063268e62633a334bb9464f7cba2385da5812d.tar.gz
markdown-rs-d1063268e62633a334bb9464f7cba2385da5812d.tar.bz2
markdown-rs-d1063268e62633a334bb9464f7cba2385da5812d.zip
Add support for passing token types to destination, label, title
Diffstat (limited to '')
-rw-r--r--src/construct/partial_destination.rs135
1 files changed, 82 insertions, 53 deletions
diff --git a/src/construct/partial_destination.rs b/src/construct/partial_destination.rs
index 82e83fe..823ce99 100644
--- a/src/construct/partial_destination.rs
+++ b/src/construct/partial_destination.rs
@@ -40,36 +40,68 @@
//!
//! <!-- To do: link label end. -->
-// To do: pass token types in.
-
use crate::tokenizer::{Code, State, StateFnResult, TokenType, Tokenizer};
+/// Configuration.
+///
+/// You must pass the token types in that are used.
+#[derive(Debug)]
+pub struct Options {
+ /// Token for the whole destination.
+ pub destination: TokenType,
+ /// Token for a literal (enclosed) destination.
+ pub literal: TokenType,
+ /// Token for a literal marker.
+ pub marker: TokenType,
+ /// Token for a raw destination.
+ pub raw: TokenType,
+ /// Token for a the string.
+ pub string: TokenType,
+}
+
+/// State needed to parse destination.
+#[derive(Debug)]
+struct Info {
+ /// Paren balance (used in raw).
+ balance: usize,
+ /// Configuration.
+ options: Options,
+}
+
/// Before a destination.
///
/// ```markdown
/// |<ab>
/// |ab
/// ```
-pub fn start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+pub fn start(tokenizer: &mut Tokenizer, code: Code, options: Options) -> StateFnResult {
+ let info = Info {
+ balance: 0,
+ options,
+ };
+
match code {
Code::Char('<') => {
- tokenizer.enter(TokenType::DefinitionDestination);
- tokenizer.enter(TokenType::DefinitionDestinationLiteral);
- tokenizer.enter(TokenType::DefinitionDestinationLiteralMarker);
+ tokenizer.enter(info.options.destination.clone());
+ tokenizer.enter(info.options.literal.clone());
+ tokenizer.enter(info.options.marker.clone());
tokenizer.consume(code);
- tokenizer.exit(TokenType::DefinitionDestinationLiteralMarker);
- (State::Fn(Box::new(enclosed_before)), None)
+ tokenizer.exit(info.options.marker.clone());
+ (
+ State::Fn(Box::new(|t, c| enclosed_before(t, c, info))),
+ None,
+ )
}
Code::None | Code::CarriageReturnLineFeed | Code::VirtualSpace | Code::Char(' ' | ')') => {
(State::Nok, None)
}
Code::Char(char) if char.is_ascii_control() => (State::Nok, None),
Code::Char(_) => {
- tokenizer.enter(TokenType::DefinitionDestination);
- tokenizer.enter(TokenType::DefinitionDestinationRaw);
- tokenizer.enter(TokenType::DefinitionDestinationString);
+ tokenizer.enter(info.options.destination.clone());
+ tokenizer.enter(info.options.raw.clone());
+ tokenizer.enter(info.options.string.clone());
tokenizer.enter(TokenType::ChunkString);
- raw(tokenizer, code, 0)
+ raw(tokenizer, code, info)
}
}
}
@@ -79,18 +111,18 @@ pub fn start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// ```markdown
/// <|ab>
/// ```
-fn enclosed_before(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn enclosed_before(tokenizer: &mut Tokenizer, code: Code, info: Info) -> StateFnResult {
if let Code::Char('>') = code {
- tokenizer.enter(TokenType::DefinitionDestinationLiteralMarker);
+ tokenizer.enter(info.options.marker.clone());
tokenizer.consume(code);
- tokenizer.exit(TokenType::DefinitionDestinationLiteralMarker);
- tokenizer.exit(TokenType::DefinitionDestinationLiteral);
- tokenizer.exit(TokenType::DefinitionDestination);
+ tokenizer.exit(info.options.marker.clone());
+ tokenizer.exit(info.options.literal.clone());
+ tokenizer.exit(info.options.destination);
(State::Ok, None)
} else {
- tokenizer.enter(TokenType::DefinitionDestinationString);
+ tokenizer.enter(info.options.string.clone());
tokenizer.enter(TokenType::ChunkString);
- enclosed(tokenizer, code)
+ enclosed(tokenizer, code, info)
}
}
@@ -99,23 +131,26 @@ fn enclosed_before(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// ```markdown
/// <u|rl>
/// ```
-fn enclosed(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn enclosed(tokenizer: &mut Tokenizer, code: Code, info: Info) -> StateFnResult {
match code {
Code::Char('>') => {
tokenizer.exit(TokenType::ChunkString);
- tokenizer.exit(TokenType::DefinitionDestinationString);
- enclosed_before(tokenizer, code)
+ tokenizer.exit(info.options.string.clone());
+ enclosed_before(tokenizer, code, info)
}
Code::None | Code::CarriageReturnLineFeed | Code::Char('\r' | '\n' | '<') => {
(State::Nok, None)
}
Code::Char('\\') => {
tokenizer.consume(code);
- (State::Fn(Box::new(enclosed_escape)), None)
+ (
+ State::Fn(Box::new(|t, c| enclosed_escape(t, c, info))),
+ None,
+ )
}
_ => {
tokenizer.consume(code);
- (State::Fn(Box::new(enclosed)), None)
+ (State::Fn(Box::new(|t, c| enclosed(t, c, info))), None)
}
}
}
@@ -125,13 +160,13 @@ fn enclosed(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// ```markdown
/// <a\|>b>
/// ```
-fn enclosed_escape(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn enclosed_escape(tokenizer: &mut Tokenizer, code: Code, info: Info) -> StateFnResult {
match code {
Code::Char('<' | '>' | '\\') => {
tokenizer.consume(code);
- (State::Fn(Box::new(enclosed)), None)
+ (State::Fn(Box::new(|t, c| enclosed(t, c, info))), None)
}
- _ => enclosed(tokenizer, code),
+ _ => enclosed(tokenizer, code, info),
}
}
@@ -140,48 +175,44 @@ fn enclosed_escape(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// ```markdown
/// a|b
/// ```
-fn raw(tokenizer: &mut Tokenizer, code: Code, balance: usize) -> StateFnResult {
+fn raw(tokenizer: &mut Tokenizer, code: Code, mut info: Info) -> StateFnResult {
// To do: configurable.
let limit = usize::MAX;
match code {
Code::Char('(') => {
- if balance >= limit {
+ if info.balance >= limit {
(State::Nok, None)
} else {
tokenizer.consume(code);
- (
- State::Fn(Box::new(move |t, c| raw(t, c, balance + 1))),
- None,
- )
+ info.balance += 1;
+ (State::Fn(Box::new(move |t, c| raw(t, c, info))), None)
}
}
Code::Char(')') => {
- if balance == 0 {
+ if info.balance == 0 {
tokenizer.exit(TokenType::ChunkString);
- tokenizer.exit(TokenType::DefinitionDestinationString);
- tokenizer.exit(TokenType::DefinitionDestinationRaw);
- tokenizer.exit(TokenType::DefinitionDestination);
+ tokenizer.exit(info.options.string.clone());
+ tokenizer.exit(info.options.raw.clone());
+ tokenizer.exit(info.options.destination);
(State::Ok, Some(vec![code]))
} else {
tokenizer.consume(code);
- (
- State::Fn(Box::new(move |t, c| raw(t, c, balance - 1))),
- None,
- )
+ info.balance -= 1;
+ (State::Fn(Box::new(move |t, c| raw(t, c, info))), None)
}
}
Code::None
| Code::CarriageReturnLineFeed
| Code::VirtualSpace
| Code::Char('\t' | '\r' | '\n' | ' ') => {
- if balance > 0 {
+ if info.balance > 0 {
(State::Nok, None)
} else {
tokenizer.exit(TokenType::ChunkString);
- tokenizer.exit(TokenType::DefinitionDestinationString);
- tokenizer.exit(TokenType::DefinitionDestinationRaw);
- tokenizer.exit(TokenType::DefinitionDestination);
+ tokenizer.exit(info.options.string.clone());
+ tokenizer.exit(info.options.raw.clone());
+ tokenizer.exit(info.options.destination);
(State::Ok, Some(vec![code]))
}
}
@@ -189,13 +220,13 @@ fn raw(tokenizer: &mut Tokenizer, code: Code, balance: usize) -> StateFnResult {
Code::Char('\\') => {
tokenizer.consume(code);
(
- State::Fn(Box::new(move |t, c| raw_escape(t, c, balance))),
+ State::Fn(Box::new(move |t, c| raw_escape(t, c, info))),
None,
)
}
Code::Char(_) => {
tokenizer.consume(code);
- (State::Fn(Box::new(move |t, c| raw(t, c, balance))), None)
+ (State::Fn(Box::new(move |t, c| raw(t, c, info))), None)
}
}
}
@@ -205,15 +236,13 @@ fn raw(tokenizer: &mut Tokenizer, code: Code, balance: usize) -> StateFnResult {
/// ```markdown
/// a\|)b
/// ```
-fn raw_escape(tokenizer: &mut Tokenizer, code: Code, balance: usize) -> StateFnResult {
+fn raw_escape(tokenizer: &mut Tokenizer, code: Code, mut info: Info) -> StateFnResult {
match code {
Code::Char('(' | ')' | '\\') => {
tokenizer.consume(code);
- (
- State::Fn(Box::new(move |t, c| raw(t, c, balance + 1))),
- None,
- )
+ info.balance += 1;
+ (State::Fn(Box::new(move |t, c| raw(t, c, info))), None)
}
- _ => raw(tokenizer, code, balance),
+ _ => raw(tokenizer, code, info),
}
}