aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--readme.md3
-rw-r--r--src/compiler.rs248
-rw-r--r--tests/character_escape.rs11
-rw-r--r--tests/character_reference.rs11
-rw-r--r--tests/definition.rs274
5 files changed, 333 insertions, 214 deletions
diff --git a/readme.md b/readme.md
index 478bf9f..16f81d9 100644
--- a/readme.md
+++ b/readme.md
@@ -147,8 +147,6 @@ cargo doc --document-private-items
`heading_atx`, `heading_setext`, `html_flow`, `misc_soft_break`,
`misc_tabs`, `thematic_break`)
- [ ] (3) Interrupting (html flow complete)
-- [ ] (5) label end (reference)\
- test (`character_escape`, `character_reference`, `definition`)
- [ ] (5) attention\
test (`character_reference`, `hard_break_escape`, `hard_break_trailing`,
`heading_atx`, `heading_setext`, `html_flow`, `thematic_break`)\
@@ -284,3 +282,4 @@ important.
- [x] (1) Move map handling from `resolve_media`
- [x] (5) Add support for sharing identifiers, references before definitions
- [x] (2) Refactor to externalize handlers of compiler
+- [x] (1) Add support for compiling shared references and definitions
diff --git a/src/compiler.rs b/src/compiler.rs
index bc31a15..3dd6ae4 100644
--- a/src/compiler.rs
+++ b/src/compiler.rs
@@ -83,7 +83,7 @@ struct Media {
/// tags are ignored.
label: Option<String>,
/// To do.
- // reference_id: String,
+ reference_id: Option<String>,
/// The destination (url).
/// Interpreted string content.
destination: Option<String>,
@@ -92,6 +92,17 @@ struct Media {
title: Option<String>,
}
+/// Representation of a definition.
+#[derive(Debug)]
+struct Definition {
+ /// The destination (url).
+ /// Interpreted string content.
+ destination: Option<String>,
+ /// The title.
+ /// Interpreted string content.
+ title: Option<String>,
+}
+
/// To do.
#[derive(Debug, Clone, PartialEq)]
struct DefinitionInfo {
@@ -227,6 +238,7 @@ struct CompileContext<'a> {
pub code_fenced_fences_count: Option<usize>,
pub character_reference_kind: Option<CharacterReferenceKind>,
pub media_stack: Vec<Media>,
+ pub definitions: HashMap<String, Definition>,
/// Fields used to influance the current compilation.
pub slurp_one_line_ending: bool,
pub ignore_encode: bool,
@@ -260,6 +272,7 @@ impl<'a> CompileContext<'a> {
code_fenced_fences_count: None,
character_reference_kind: None,
media_stack: vec![],
+ definitions: HashMap::new(),
slurp_one_line_ending: false,
ignore_encode: false,
last_was_tag: false,
@@ -279,6 +292,7 @@ impl<'a> CompileContext<'a> {
index: 0,
}
}
+
/// Push a buffer.
pub fn buffer(&mut self) {
self.buffers.push(vec![]);
@@ -362,9 +376,9 @@ impl<'a> CompileContext<'a> {
#[allow(clippy::too_many_lines)]
pub fn compile(events: &[Event], codes: &[Code], options: &Options) -> String {
// let mut slurp_all_line_endings = false;
- let mut definition: Option<DefinitionInfo> = None;
let mut index = 0;
let mut line_ending_inferred: Option<LineEnding> = None;
+
// To do: actually do a compile pass, so that `buffer`, `resume`, etc can be used.
while index < events.len() {
let event = &events[index];
@@ -378,24 +392,7 @@ pub fn compile(events: &[Event], codes: &[Code], options: &Options) -> String {
{
let codes = codes_from_span(codes, &from_exit_event(events, index));
line_ending_inferred = Some(LineEnding::from_code(*codes.first().unwrap()));
- }
-
- if event.event_type == EventType::Enter {
- if event.token_type == TokenType::Definition {
- definition = Some(DefinitionInfo {
- id: None,
- destination: None,
- title: None,
- });
- }
- } else if event.token_type == TokenType::Definition {
- definition = None;
- } else if event.token_type == TokenType::DefinitionLabelString
- || event.token_type == TokenType::DefinitionDestinationString
- || event.token_type == TokenType::DefinitionTitleString
- {
- let slice = serialize(codes, &from_exit_event(events, index), false);
- println!("set: {:?} {:?}", slice, definition);
+ break;
}
index += 1;
@@ -412,7 +409,6 @@ pub fn compile(events: &[Event], codes: &[Code], options: &Options) -> String {
let mut enter_map: Map = HashMap::new();
enter_map.insert(TokenType::CodeFencedFenceInfo, on_enter_buffer);
enter_map.insert(TokenType::CodeFencedFenceMeta, on_enter_buffer);
- enter_map.insert(TokenType::Definition, on_enter_buffer);
enter_map.insert(TokenType::HeadingAtxText, on_enter_buffer);
enter_map.insert(TokenType::HeadingSetextText, on_enter_buffer);
enter_map.insert(TokenType::Label, on_enter_buffer);
@@ -430,11 +426,22 @@ pub fn compile(events: &[Event], codes: &[Code], options: &Options) -> String {
on_enter_destination_string,
);
enter_map.insert(TokenType::Paragraph, on_enter_paragraph);
+ enter_map.insert(TokenType::Definition, on_enter_definition);
+ enter_map.insert(
+ TokenType::DefinitionDestinationString,
+ on_enter_definition_destination_string,
+ );
+ enter_map.insert(TokenType::DefinitionLabelString, on_enter_buffer);
+ enter_map.insert(TokenType::DefinitionTitleString, on_enter_buffer);
let mut exit_map: Map = HashMap::new();
exit_map.insert(TokenType::Label, on_exit_label);
exit_map.insert(TokenType::LabelText, on_exit_label_text);
exit_map.insert(
+ TokenType::ReferenceString,
+ on_exit_reference_destination_string,
+ );
+ exit_map.insert(
TokenType::ResourceDestinationString,
on_exit_resource_destination_string,
);
@@ -477,7 +484,6 @@ pub fn compile(events: &[Event], codes: &[Code], options: &Options) -> String {
exit_map.insert(TokenType::CodeFlowChunk, on_exit_code_flow_chunk);
exit_map.insert(TokenType::CodeText, on_exit_code_text);
exit_map.insert(TokenType::CodeTextLineEnding, on_exit_code_text_line_ending);
- exit_map.insert(TokenType::Definition, on_exit_definition);
exit_map.insert(TokenType::HardBreakEscape, on_exit_break);
exit_map.insert(TokenType::HardBreakTrailing, on_exit_break);
exit_map.insert(TokenType::HeadingAtx, on_exit_heading_atx);
@@ -495,11 +501,21 @@ pub fn compile(events: &[Event], codes: &[Code], options: &Options) -> String {
exit_map.insert(TokenType::LineEnding, on_exit_line_ending);
exit_map.insert(TokenType::Paragraph, on_exit_paragraph);
exit_map.insert(TokenType::ThematicBreak, on_exit_thematic_break);
+ exit_map.insert(TokenType::Definition, on_exit_definition);
+ exit_map.insert(
+ TokenType::DefinitionDestinationString,
+ on_exit_definition_destination_string,
+ );
+ exit_map.insert(
+ TokenType::DefinitionLabelString,
+ on_exit_definition_label_string,
+ );
+ exit_map.insert(
+ TokenType::DefinitionTitleString,
+ on_exit_definition_title_string,
+ );
- let mut index = 0;
- let mut context = CompileContext::new(events, codes, options, line_ending_default);
-
- while index < events.len() {
+ let handle = |context: &mut CompileContext, index: usize| {
let event = &events[index];
context.index = index;
@@ -508,14 +524,67 @@ pub fn compile(events: &[Event], codes: &[Code], options: &Options) -> String {
} else {
&exit_map
};
+
+ println!(
+ "handle {:?}:{:?} ({:?})",
+ event.event_type, event.token_type, index
+ );
+
if let Some(func) = map.get(&event.token_type) {
- func(&mut context, event);
+ func(context, event);
+ }
+ };
+
+ let mut context = CompileContext::new(events, codes, options, line_ending_default);
+ let mut definition_indices: Vec<(usize, usize)> = vec![];
+ let mut index = 0;
+ let mut definition_inside = false;
+
+ while index < events.len() {
+ let event = &events[index];
+
+ if definition_inside {
+ handle(&mut context, index);
+ }
+
+ if event.token_type == TokenType::Definition {
+ if event.event_type == EventType::Enter {
+ handle(&mut context, index); // also handle start.
+ definition_inside = true;
+ definition_indices.push((index, index));
+ } else {
+ definition_inside = false;
+ definition_indices.last_mut().unwrap().1 = index;
+ }
}
index += 1;
}
- assert!(context.buffers.len() == 1, "expected 1 final buffer");
+ println!("xxx: {:?}", definition_indices);
+
+ index = 0;
+ let jump_default = (events.len(), events.len());
+ let mut definition_index = 0;
+ let mut jump = definition_indices
+ .get(definition_index)
+ .unwrap_or(&jump_default);
+
+ while index < events.len() {
+ if index == jump.0 {
+ println!("jump {:?}", jump);
+ index = jump.1 + 1;
+ definition_index += 1;
+ jump = definition_indices
+ .get(definition_index)
+ .unwrap_or(&jump_default);
+ } else {
+ handle(&mut context, index);
+ index += 1;
+ }
+ }
+
+ assert_eq!(context.buffers.len(), 1, "expected 1 final buffer");
context
.buffers
.get(0)
@@ -564,7 +633,7 @@ fn on_enter_image(context: &mut CompileContext, _event: &Event) {
image: true,
label_id: None,
label: None,
- // reference_id: "".to_string(),
+ reference_id: None,
destination: None,
title: None,
});
@@ -576,7 +645,7 @@ fn on_enter_link(context: &mut CompileContext, _event: &Event) {
image: false,
label_id: None,
label: None,
- // reference_id: "".to_string(),
+ reference_id: None,
destination: None,
title: None,
});
@@ -614,6 +683,15 @@ fn on_exit_label_text(context: &mut CompileContext, _event: &Event) {
));
}
+fn on_exit_reference_destination_string(context: &mut CompileContext, _event: &Event) {
+ let media = context.media_stack.last_mut().unwrap();
+ media.reference_id = Some(serialize(
+ context.codes,
+ &from_exit_event(context.events, context.index),
+ false,
+ ));
+}
+
fn on_exit_resource_destination_string(context: &mut CompileContext, _event: &Event) {
let buf = context.resume();
let media = context.media_stack.last_mut().unwrap();
@@ -628,25 +706,43 @@ fn on_exit_resource_title_string(context: &mut CompileContext, _event: &Event) {
}
fn on_exit_media(context: &mut CompileContext, _event: &Event) {
- // let mut is_in_image = false;
- // let mut index = 0;
+ let mut is_in_image = false;
+ let mut index = 0;
// Skip current.
- // while index < (media_stack.len() - 1) {
- // if media_stack[index].image {
- // is_in_image = true;
- // break;
- // }
- // index += 1;
- // }
+ while index < (context.media_stack.len() - 1) {
+ if context.media_stack[index].image {
+ is_in_image = true;
+ break;
+ }
+ index += 1;
+ }
- // tags = is_in_image;
+ // context.tags = is_in_image;
let media = context.media_stack.pop().unwrap();
- println!("media: {:?}", media);
+ let id = media.reference_id.or(media.label_id);
let label = media.label.unwrap();
- // To do: get from definition.
- let destination = media.destination.unwrap_or_else(|| "".to_string());
- let title = if let Some(title) = media.title {
+ let definition = id.and_then(|id| context.definitions.get(&id));
+ let destination = if let Some(definition) = definition {
+ &definition.destination
+ } else {
+ &media.destination
+ };
+ let title = if let Some(definition) = definition {
+ &definition.title
+ } else {
+ &media.title
+ };
+
+ println!("media: {:?} {:?}", destination, title);
+
+ let destination = if let Some(destination) = destination {
+ destination.clone()
+ } else {
+ "".to_string()
+ };
+
+ let title = if let Some(title) = title {
format!(" title=\"{}\"", title)
} else {
"".to_string()
@@ -700,11 +796,9 @@ fn on_exit_autolink_protocol(context: &mut CompileContext, _event: &Event) {
&from_exit_event(context.events, context.index),
false,
);
- let href = sanitize_uri(slice.as_str(), &context.protocol_href);
- println!("xxx: {:?} {:?}", href, &context.protocol_href);
context.push(format!(
"<a href=\"{}\">{}</a>",
- href,
+ sanitize_uri(slice.as_str(), &context.protocol_href),
context.encode_opt(&slice)
));
}
@@ -836,11 +930,6 @@ fn on_exit_code_text_line_ending(context: &mut CompileContext, _event: &Event) {
context.push(" ".to_string());
}
-fn on_exit_definition(context: &mut CompileContext, _event: &Event) {
- context.resume();
- context.slurp_one_line_ending = true;
-}
-
fn on_exit_break(context: &mut CompileContext, _event: &Event) {
context.push("<br />".to_string());
}
@@ -929,3 +1018,58 @@ fn on_exit_paragraph(context: &mut CompileContext, _event: &Event) {
fn on_exit_thematic_break(context: &mut CompileContext, _event: &Event) {
context.push("<hr />".to_string());
}
+
+fn on_enter_definition(context: &mut CompileContext, _event: &Event) {
+ context.buffer();
+ context.media_stack.push(Media {
+ image: false,
+ label: None,
+ label_id: None,
+ reference_id: None,
+ destination: None,
+ title: None,
+ });
+}
+
+fn on_enter_definition_destination_string(context: &mut CompileContext, _event: &Event) {
+ context.buffer();
+ context.ignore_encode = true;
+}
+
+fn on_exit_definition_destination_string(context: &mut CompileContext, _event: &Event) {
+ let buf = context.resume();
+ let definition = context.media_stack.last_mut().unwrap();
+ definition.destination = Some(buf);
+ context.ignore_encode = false;
+}
+
+fn on_exit_definition_label_string(context: &mut CompileContext, _event: &Event) {
+ // Discard label, use the source content instead.
+ context.resume();
+ let definition = context.media_stack.last_mut().unwrap();
+ // To do: put this on `reference_id` instead?
+ definition.label_id = Some(serialize(
+ context.codes,
+ &from_exit_event(context.events, context.index),
+ false,
+ ));
+}
+
+fn on_exit_definition_title_string(context: &mut CompileContext, _event: &Event) {
+ let buf = context.resume();
+ let definition = context.media_stack.last_mut().unwrap();
+ definition.title = Some(buf);
+}
+
+fn on_exit_definition(context: &mut CompileContext, _event: &Event) {
+ let definition = context.media_stack.pop().unwrap();
+ let label_id = definition.label_id.unwrap();
+ let destination = definition.destination;
+ let title = definition.title;
+
+ context.resume();
+ context
+ .definitions
+ .insert(label_id, Definition { destination, title });
+ context.slurp_one_line_ending = true;
+}
diff --git a/tests/character_escape.rs b/tests/character_escape.rs
index 3e3e839..26e9336 100644
--- a/tests/character_escape.rs
+++ b/tests/character_escape.rs
@@ -67,12 +67,11 @@ fn character_escape() {
"should escape in resource and title"
);
- // To do: link (reference).
- // assert_eq!(
- // micromark("[foo]: /bar\\* \"ti\\*tle\"\n\n[foo]"),
- // "<p><a href=\"/bar*\" title=\"ti*tle\">foo</a></p>",
- // "should escape in definition resource and title"
- // );
+ assert_eq!(
+ micromark("[foo]: /bar\\* \"ti\\*tle\"\n\n[foo]"),
+ "<p><a href=\"/bar*\" title=\"ti*tle\">foo</a></p>",
+ "should escape in definition resource and title"
+ );
assert_eq!(
micromark("``` foo\\+bar\nfoo\n```"),
diff --git a/tests/character_reference.rs b/tests/character_reference.rs
index 3d2111e..3951e00 100644
--- a/tests/character_reference.rs
+++ b/tests/character_reference.rs
@@ -61,12 +61,11 @@ fn character_reference() {
"should support character references in resource URLs and titles"
);
- // To do: link (reference).
- // assert_eq!(
- // micromark("[foo]: /f&ouml;&ouml; \"f&ouml;&ouml;\"\n\n[foo]"),
- // "<p><a href=\"/f%C3%B6%C3%B6\" title=\"föö\">foo</a></p>",
- // "should support character references in definition URLs and titles"
- // );
+ assert_eq!(
+ micromark("[foo]: /f&ouml;&ouml; \"f&ouml;&ouml;\"\n\n[foo]"),
+ "<p><a href=\"/f%C3%B6%C3%B6\" title=\"föö\">foo</a></p>",
+ "should support character references in definition URLs and titles"
+ );
assert_eq!(
micromark("``` f&ouml;&ouml;\nfoo\n```"),
diff --git a/tests/definition.rs b/tests/definition.rs
index a8e8164..c112a96 100644
--- a/tests/definition.rs
+++ b/tests/definition.rs
@@ -9,12 +9,11 @@ const DANGER: &Options = &Options {
#[test]
fn definition() {
- // To do: link (reference).
- // assert_eq!(
- // micromark("[foo]: /url \"title\"\n\n[foo]"),
- // "<p><a href=\"/url\" title=\"title\">foo</a></p>",
- // "should support link definitions"
- // );
+ assert_eq!(
+ micromark("[foo]: /url \"title\"\n\n[foo]"),
+ "<p><a href=\"/url\" title=\"title\">foo</a></p>",
+ "should support link definitions"
+ );
assert_eq!(
micromark("[foo]:\n\n/url\n\n[foo]"),
@@ -22,33 +21,30 @@ fn definition() {
"should not support blank lines before destination"
);
- // To do: link (reference).
- // assert_eq!(
- // micromark(" [foo]: \n /url \n 'the title' \n\n[foo]"),
- // "<p><a href=\"/url\" title=\"the title\">foo</a></p>",
- // "should support whitespace and line endings in definitions"
- // );
+ assert_eq!(
+ micromark(" [foo]: \n /url \n 'the title' \n\n[foo]"),
+ "<p><a href=\"/url\" title=\"the title\">foo</a></p>",
+ "should support whitespace and line endings in definitions"
+ );
- // To do: link (reference).
+ // To do: some bug.
// assert_eq!(
// micromark("[Foo*bar\\]]:my_(url) 'title (with parens)'\n\n[Foo*bar\\]]"),
// "<p><a href=\"my_(url)\" title=\"title (with parens)\">Foo*bar]</a></p>",
// "should support complex definitions (1)"
// );
- // To do: link (reference).
- // assert_eq!(
- // micromark("[Foo bar]:\n<my url>\n'title'\n\n[Foo bar]"),
- // "<p><a href=\"my%20url\" title=\"title\">Foo bar</a></p>",
- // "should support complex definitions (2)"
- // );
+ assert_eq!(
+ micromark("[Foo bar]:\n<my url>\n'title'\n\n[Foo bar]"),
+ "<p><a href=\"my%20url\" title=\"title\">Foo bar</a></p>",
+ "should support complex definitions (2)"
+ );
- // To do: link (reference).
- // assert_eq!(
- // micromark("[foo]: /url '\ntitle\nline1\nline2\n'\n\n[foo]"),
- // "<p><a href=\"/url\" title=\"\ntitle\nline1\nline2\n\">foo</a></p>",
- // "should support line endings in titles"
- // );
+ assert_eq!(
+ micromark("[foo]: /url '\ntitle\nline1\nline2\n'\n\n[foo]"),
+ "<p><a href=\"/url\" title=\"\ntitle\nline1\nline2\n\">foo</a></p>",
+ "should support line endings in titles"
+ );
assert_eq!(
micromark("[foo]: /url 'title\n\nwith blank line'\n\n[foo]"),
@@ -56,12 +52,11 @@ fn definition() {
"should not support blank lines in titles"
);
- // To do: link (reference).
- // assert_eq!(
- // micromark("[foo]:\n/url\n\n[foo]"),
- // "<p><a href=\"/url\">foo</a></p>",
- // "should support definitions w/o title"
- // );
+ assert_eq!(
+ micromark("[foo]:\n/url\n\n[foo]"),
+ "<p><a href=\"/url\">foo</a></p>",
+ "should support definitions w/o title"
+ );
assert_eq!(
micromark("[foo]:\n\n[foo]"),
@@ -69,12 +64,11 @@ fn definition() {
"should not support definitions w/o destination"
);
- // To do: link (reference).
- // assert_eq!(
- // micromark("[foo]: <>\n\n[foo]"),
- // "<p><a href=\"\">foo</a></p>",
- // "should support definitions w/ explicit empty destinations"
- // );
+ assert_eq!(
+ micromark("[foo]: <>\n\n[foo]"),
+ "<p><a href=\"\">foo</a></p>",
+ "should support definitions w/ explicit empty destinations"
+ );
assert_eq!(
micromark_with_options("[foo]: <bar>(baz)\n\n[foo]", DANGER),
@@ -82,35 +76,34 @@ fn definition() {
"should not support definitions w/ no whitespace between destination and title"
);
- // To do: link (reference).
- // assert_eq!(
- // micromark("[foo]: /url\\bar\\*baz \"foo\\\"bar\\baz\"\n\n[foo]"),
- // "<p><a href=\"/url%5Cbar*baz\" title=\"foo&quot;bar\\baz\">foo</a></p>",
- // "should support character escapes in destinations and titles"
- // );
+ assert_eq!(
+ micromark("[foo]: /url\\bar\\*baz \"foo\\\"bar\\baz\"\n\n[foo]"),
+ "<p><a href=\"/url%5Cbar*baz\" title=\"foo&quot;bar\\baz\">foo</a></p>",
+ "should support character escapes in destinations and titles"
+ );
- // To do: link (reference).
+ // Some bug.
// assert_eq!(
// micromark("[foo]\n\n[foo]: url"),
// "<p><a href=\"url\">foo</a></p>\n",
// "should support a link before a definition"
// );
- // To do: link (reference).
+ // Some bug.
// assert_eq!(
// micromark("[foo]: first\n[foo]: second\n\n[foo]"),
// "<p><a href=\"first\">foo</a></p>",
// "should match w/ the first definition"
// );
- // To do: link (reference).
+ // Some bug.
// assert_eq!(
// micromark("[FOO]: /url\n\n[Foo]"),
// "<p><a href=\"/url\">Foo</a></p>",
// "should match w/ case-insensitive (1)"
// );
- // To do: link (reference).
+ // Some bug.
// assert_eq!(
// micromark("[ΑΓΩ]: /φου\n\n[αγω]"),
// "<p><a href=\"/%CF%86%CE%BF%CF%85\">αγω</a></p>",
@@ -129,19 +122,17 @@ fn definition() {
"should not contribute anything w/o reference (2)"
);
- // To do: link (reference).
- // assert_eq!(
- // micromark("[foo]: /url \"title\" \n\n[foo]"),
- // "<p><a href=\"/url\" title=\"title\">foo</a></p>",
- // "should support whitespace after title"
- // );
+ assert_eq!(
+ micromark("[foo]: /url \"title\" \n\n[foo]"),
+ "<p><a href=\"/url\" title=\"title\">foo</a></p>",
+ "should support whitespace after title"
+ );
- // To do: link (reference).
- // assert_eq!(
- // micromark("[foo]: /url\n\"title\" \n\n[foo]"),
- // "<p><a href=\"/url\" title=\"title\">foo</a></p>",
- // "should support whitespace after title on a separate line"
- // );
+ assert_eq!(
+ micromark("[foo]: /url\n\"title\" \n\n[foo]"),
+ "<p><a href=\"/url\" title=\"title\">foo</a></p>",
+ "should support whitespace after title on a separate line"
+ );
assert_eq!(
micromark("[foo]: /url \"title\" ok"),
@@ -173,28 +164,26 @@ fn definition() {
"should not support definitions in paragraphs"
);
- // To do: link (reference).
+ // To do: block quote.
// assert_eq!(
// micromark("# [Foo]\n[foo]: /url\n> bar"),
// "<h1><a href=\"/url\">Foo</a></h1>\n<blockquote>\n<p>bar</p>\n</blockquote>",
// "should not support definitions in headings"
// );
- // To do: link (reference).
- // assert_eq!(
- // micromark("[foo]: /url\nbar\n===\n[foo]"),
- // "<h1>bar</h1>\n<p><a href=\"/url\">foo</a></p>",
- // "should support setext headings after definitions"
- // );
+ assert_eq!(
+ micromark("[foo]: /url\nbar\n===\n[foo]"),
+ "<h1>bar</h1>\n<p><a href=\"/url\">foo</a></p>",
+ "should support setext headings after definitions"
+ );
- // To do: link (reference).
- // assert_eq!(
- // micromark("[foo]: /url\n===\n[foo]"),
- // "<p>===\n<a href=\"/url\">foo</a></p>",
- // "should not support setext heading underlines after definitions"
- // );
+ assert_eq!(
+ micromark("[foo]: /url\n===\n[foo]"),
+ "<p>===\n<a href=\"/url\">foo</a></p>",
+ "should not support setext heading underlines after definitions"
+ );
- // To do: link (reference).
+ // To do: some bug.
// assert_eq!(
// micromark(
// "[foo]: /foo-url \"foo\"\n[bar]: /bar-url\n \"bar\"\n[baz]: /baz-url\n\n[foo],\n[bar],\n[baz]"
@@ -203,7 +192,7 @@ fn definition() {
// "should support definitions after definitions"
// );
- // To do: link (reference).
+ // To do: block quote.
// assert_eq!(
// micromark("> [foo]: /url\n\n[foo]"),
// "<blockquote>\n</blockquote>\n<p><a href=\"/url\">foo</a></p>",
@@ -211,26 +200,24 @@ fn definition() {
// );
// Extra
- // To do: link (reference).
+ // To do: some bug.
// assert_eq!(
// micromark("[\\[\\+\\]]: example.com\n\nLink: [\\[\\+\\]]."),
// "<p>Link: <a href=\"example.com\">[+]</a>.</p>",
// "should match w/ character escapes"
// );
- // To do: link (reference).
- // assert_eq!(
- // micromark("[x]: \\\"&#x20;\\(\\)\\\"\n\n[x]"),
- // "<p><a href=\"%22%20()%22\">x</a></p>",
- // "should support character escapes & references in unenclosed destinations"
- // );
+ assert_eq!(
+ micromark("[x]: \\\"&#x20;\\(\\)\\\"\n\n[x]"),
+ "<p><a href=\"%22%20()%22\">x</a></p>",
+ "should support character escapes & references in unenclosed destinations"
+ );
- // To do: link (reference).
- // assert_eq!(
- // micromark("[x]: <\\>&#x20;\\+\\>>\n\n[x]"),
- // "<p><a href=\"%3E%20+%3E\">x</a></p>",
- // "should support character escapes & references in enclosed destinations"
- // );
+ assert_eq!(
+ micromark("[x]: <\\>&#x20;\\+\\>>\n\n[x]"),
+ "<p><a href=\"%3E%20+%3E\">x</a></p>",
+ "should support character escapes & references in enclosed destinations"
+ );
assert_eq!(
micromark("[x]: <\n\n[x]"),
@@ -256,42 +243,38 @@ fn definition() {
"should not support ascii control characters in destination"
);
- // To do: link (reference).
- // assert_eq!(
- // micromark("[x]: <\u{000b}a>\n\n[x]"),
- // "<p><a href=\"%0Ba\">x</a></p>",
- // "should support ascii control characters at the start of enclosed destination"
- // );
+ assert_eq!(
+ micromark("[x]: <\u{000b}a>\n\n[x]"),
+ "<p><a href=\"%0Ba\">x</a></p>",
+ "should support ascii control characters at the start of enclosed destination"
+ );
- // To do: link (reference).
- // assert_eq!(
- // micromark("[x]: <a\u{000b}b>\n\n[x]"),
- // "<p><a href=\"a%0Bb\">x</a></p>",
- // "should support ascii control characters in enclosed destinations"
- // );
+ assert_eq!(
+ micromark("[x]: <a\u{000b}b>\n\n[x]"),
+ "<p><a href=\"a%0Bb\">x</a></p>",
+ "should support ascii control characters in enclosed destinations"
+ );
- // To do: link (reference).
- // assert_eq!(
- // micromark("[x]: a \"\\\"\"\n\n[x]"),
- // "<p><a href=\"a\" title=\"&quot;\">x</a></p>",
- // "should support character escapes at the start of a title"
- // );
+ assert_eq!(
+ micromark("[x]: a \"\\\"\"\n\n[x]"),
+ "<p><a href=\"a\" title=\"&quot;\">x</a></p>",
+ "should support character escapes at the start of a title"
+ );
- // To do: link (reference).
+ // To do: some bug.
// assert_eq!(
// micromark("[x]: a \"\\\"\"\n\n[x]"),
// "<p><a href=\"a\" title=\"\"\">x</a></p>",
// "should support double quoted titles"
// );
- // To do: link (reference).
- // assert_eq!(
- // micromark("[x]: a '\"'\n\n[x]"),
- // "<p><a href=\"a\" title=\"&quot;\">x</a></p>",
- // "should support double quoted titles"
- // );
+ assert_eq!(
+ micromark("[x]: a '\"'\n\n[x]"),
+ "<p><a href=\"a\" title=\"&quot;\">x</a></p>",
+ "should support double quoted titles"
+ );
- // To do: link (reference).
+ // To do: some bug.
// assert_eq!(
// micromark("[x]: a (\"\")\n\n[x]"),
// "<p><a href=\"a\" title=\"&quot;\"\">x</a></p>",
@@ -304,12 +287,11 @@ fn definition() {
"should not support more opening than closing parens in the destination"
);
- // To do: link (reference).
- // assert_eq!(
- // micromark("[x]: a(())\n\n[x]"),
- // "<p><a href=\"a(())\">x</a></p>",
- // "should support balanced opening and closing parens in the destination"
- // );
+ assert_eq!(
+ micromark("[x]: a(())\n\n[x]"),
+ "<p><a href=\"a(())\">x</a></p>",
+ "should support balanced opening and closing parens in the destination"
+ );
assert_eq!(
micromark("[x]: a())\n\n[x]"),
@@ -317,43 +299,39 @@ fn definition() {
"should not support more closing than opening parens in the destination"
);
- // To do: link (reference).
- // assert_eq!(
- // micromark("[x]: a \t\n\n[x]"),
- // "<p><a href=\"a\">x</a></p>",
- // "should support trailing whitespace after a destination"
- // );
+ assert_eq!(
+ micromark("[x]: a \t\n\n[x]"),
+ "<p><a href=\"a\">x</a></p>",
+ "should support trailing whitespace after a destination"
+ );
- // To do: link (reference).
+ // To do: some bug.
// assert_eq!(
// micromark("[x]: a \"\"X \t\n\n[x]"),
// "<p><a href=\"a\" title=\"\"X>x</a></p>",
// "should support trailing whitespace after a destination"
// );
- // To do: link (reference).
- // assert_eq!(
- // micromark("[&amp;&copy;&]: example.com/&amp;&copy;& \"&amp;&copy;&\"\n\n[&amp;&copy;&]"),
- // "<p><a href=\"example.com/&amp;%C2%A9&amp;\" title=\"&amp;©&amp;\">&amp;©&amp;</a></p>",
- // "should support character references in definitions"
- // );
+ assert_eq!(
+ micromark("[&amp;&copy;&]: example.com/&amp;&copy;& \"&amp;&copy;&\"\n\n[&amp;&copy;&]"),
+ "<p><a href=\"example.com/&amp;%C2%A9&amp;\" title=\"&amp;©&amp;\">&amp;©&amp;</a></p>",
+ "should support character references in definitions"
+ );
- // To do: link (reference).
- // assert_eq!(
- // micromark("[x]:\nexample.com\n\n[x]"),
- // "<p><a href=\"example.com\">x</a></p>",
- // "should support a line ending before a destination"
- // );
+ assert_eq!(
+ micromark("[x]:\nexample.com\n\n[x]"),
+ "<p><a href=\"example.com\">x</a></p>",
+ "should support a line ending before a destination"
+ );
- // To do: link (reference).
- // assert_eq!(
- // micromark("[x]: \t\nexample.com\n\n[x]"),
- // "<p><a href=\"example.com\">x</a></p>",
- // "should support whitespace before a destination"
- // );
+ assert_eq!(
+ micromark("[x]: \t\nexample.com\n\n[x]"),
+ "<p><a href=\"example.com\">x</a></p>",
+ "should support whitespace before a destination"
+ );
// See: <https://github.com/commonmark/commonmark.js/issues/192>
- // To do: link (reference).
+ // To do: some bug.
// assert_eq!(
// micromark("[x]: <> \"\"\n[][x]"),
// "<p><a href=\"\"></a></p>",
@@ -384,7 +362,7 @@ fn definition() {
"should not support an extra right paren (`)`) in a raw destination"
);
- // To do: link (reference).
+ // To do: some bug.
// assert_eq!(
// micromark("[a]\n\n[a]: a(1(2(3(4()))))b"),
// "<p><a href=\"a(1(2(3(4()))))b\">a</a></p>\n",
@@ -403,14 +381,14 @@ fn definition() {
"should not support a final (unbalanced) right paren in a raw destination “before” a title"
);
- // To do: link (reference).
+ // To do: some bug.
// assert_eq!(
// micromark(" [a]: b \"c\"\n [d]: e\n [f]: g \"h\"\n [i]: j\n\t[k]: l (m)\n\t n [k] o"),
// "<p>n <a href=\"l\" title=\"m\">k</a> o</p>",
// "should support subsequent indented definitions"
// );
- // To do: link (reference).
+ // To do: some bug.
// assert_eq!(
// micromark("[a\n b]: c\n\n[a\n b]"),
// "<p><a href=\"c\">a\nb</a></p>",