aboutsummaryrefslogtreecommitdiffstats
path: root/src/construct/partial_label.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-06-21 18:06:47 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-06-21 18:06:47 +0200
commitd1063268e62633a334bb9464f7cba2385da5812d (patch)
tree76035cdf260046cd2afe1841c0351b311ea66d99 /src/construct/partial_label.rs
parent56ff5c73c7ec19b349e7d60d04ce1057c006d6ec (diff)
downloadmarkdown-rs-d1063268e62633a334bb9464f7cba2385da5812d.tar.gz
markdown-rs-d1063268e62633a334bb9464f7cba2385da5812d.tar.bz2
markdown-rs-d1063268e62633a334bb9464f7cba2385da5812d.zip
Add support for passing token types to destination, label, title
Diffstat (limited to 'src/construct/partial_label.rs')
-rw-r--r--src/construct/partial_label.rs44
1 files changed, 29 insertions, 15 deletions
diff --git a/src/construct/partial_label.rs b/src/construct/partial_label.rs
index 194165c..c831eaf 100644
--- a/src/construct/partial_label.rs
+++ b/src/construct/partial_label.rs
@@ -60,6 +60,19 @@ use crate::construct::partial_space_or_tab::space_or_tab_opt;
use crate::tokenizer::{Code, State, StateFnResult, TokenType, Tokenizer};
use crate::util::link::link;
+/// Configuration.
+///
+/// You must pass the token types in that are used.
+#[derive(Debug)]
+pub struct Options {
+ /// Token for the whole label.
+ pub label: TokenType,
+ /// Token for the markers.
+ pub marker: TokenType,
+ /// Token for the string (inside the markers).
+ pub string: TokenType,
+}
+
/// State needed to parse labels.
#[derive(Debug)]
struct Info {
@@ -69,6 +82,8 @@ struct Info {
data: bool,
/// Number of characters in the label.
size: usize,
+ /// Configuration.
+ options: Options,
}
/// Before a label.
@@ -76,19 +91,20 @@ struct Info {
/// ```markdown
/// |[a]
/// ```
-pub fn start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+pub fn start(tokenizer: &mut Tokenizer, code: Code, options: Options) -> StateFnResult {
match code {
Code::Char('[') => {
- tokenizer.enter(TokenType::DefinitionLabel);
- tokenizer.enter(TokenType::DefinitionLabelMarker);
- tokenizer.consume(code);
- tokenizer.exit(TokenType::DefinitionLabelMarker);
- tokenizer.enter(TokenType::DefinitionLabelData);
let info = Info {
connect: false,
data: false,
size: 0,
+ options,
};
+ tokenizer.enter(info.options.label.clone());
+ tokenizer.enter(info.options.marker.clone());
+ tokenizer.consume(code);
+ tokenizer.exit(info.options.marker.clone());
+ tokenizer.enter(info.options.string.clone());
(State::Fn(Box::new(|t, c| at_break(t, c, info))), None)
}
_ => (State::Nok, None),
@@ -101,17 +117,17 @@ pub fn start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// [|a]
/// [a|]
/// ```
-fn at_break(tokenizer: &mut Tokenizer, code: Code, info: Info) -> StateFnResult {
+fn at_break(tokenizer: &mut Tokenizer, code: Code, mut info: Info) -> StateFnResult {
match code {
Code::None | Code::Char('[') => (State::Nok, None),
Code::Char(']') if !info.data => (State::Nok, None),
_ if info.size > LINK_REFERENCE_SIZE_MAX => (State::Nok, None),
Code::Char(']') => {
- tokenizer.exit(TokenType::DefinitionLabelData);
- tokenizer.enter(TokenType::DefinitionLabelMarker);
+ tokenizer.exit(info.options.string.clone());
+ tokenizer.enter(info.options.marker.clone());
tokenizer.consume(code);
- tokenizer.exit(TokenType::DefinitionLabelMarker);
- tokenizer.exit(TokenType::DefinitionLabel);
+ tokenizer.exit(info.options.marker.clone());
+ tokenizer.exit(info.options.label);
(State::Ok, None)
}
_ => {
@@ -120,6 +136,8 @@ fn at_break(tokenizer: &mut Tokenizer, code: Code, info: Info) -> StateFnResult
if info.connect {
let index = tokenizer.events.len() - 1;
link(&mut tokenizer.events, index);
+ } else {
+ info.connect = true;
}
label(tokenizer, code, info)
@@ -157,10 +175,6 @@ fn line_begin(tokenizer: &mut Tokenizer, code: Code, info: Info) -> StateFnResul
/// [a|b]
/// ```
fn label(tokenizer: &mut Tokenizer, code: Code, mut info: Info) -> StateFnResult {
- if !info.connect {
- info.connect = true;
- }
-
match code {
Code::None | Code::Char('[' | ']') => {
tokenizer.exit(TokenType::ChunkString);