aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-06-13 14:50:48 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-06-13 14:56:28 +0200
commit4277dac07db06f24ba30a75b4c1dec542e32dae8 (patch)
treebcad7ace156eecf64fd8d0fb03a4b091bac2f845
parentefdf90959f78d1582da312bffbefaabb79f264b7 (diff)
downloadmarkdown-rs-4277dac07db06f24ba30a75b4c1dec542e32dae8.tar.gz
markdown-rs-4277dac07db06f24ba30a75b4c1dec542e32dae8.tar.bz2
markdown-rs-4277dac07db06f24ba30a75b4c1dec542e32dae8.zip
Add support for sanitizing urls
* Add support for properly encoding characters in urls * Add support for sanitizing potentially dangerous urls * Add safe defaults, optionally live dangerously
-rw-r--r--examples/lib.rs3
-rw-r--r--readme.md4
-rw-r--r--src/compiler.rs34
-rw-r--r--src/construct/autolink.rs10
-rw-r--r--src/lib.rs1
-rw-r--r--src/util.rs100
-rw-r--r--tests/autolink.rs63
-rw-r--r--tests/html_flow.rs1
8 files changed, 173 insertions, 43 deletions
diff --git a/examples/lib.rs b/examples/lib.rs
index 4d01161..00f45dc 100644
--- a/examples/lib.rs
+++ b/examples/lib.rs
@@ -15,7 +15,8 @@ fn main() {
micromark_with_options(
"<div style=\"color: tomato\">\n\n# Hello, tomato!\n\n</div>",
&CompileOptions {
- allow_dangerous_html: true
+ allow_dangerous_html: true,
+ allow_dangerous_protocol: true
}
)
);
diff --git a/readme.md b/readme.md
index 26035c4..829d132 100644
--- a/readme.md
+++ b/readme.md
@@ -68,10 +68,8 @@ cargo doc --document-private-items
### Small things
-- [ ] (3) Encode urls
- [ ] (1) Parse initial and final whitespace of paragraphs (in text)
- [ ] (3) Clean compiler
-- [ ] (1) Optionally remove dangerous protocols when compiling
- [ ] (1) Use preferred line ending style in markdown
- [ ] (1) Handle BOM at start
- [ ] (1) Make sure tabs are handled properly and that positional info is perfect
@@ -155,6 +153,8 @@ cargo doc --document-private-items
- [x] (8) Subtokenization: figure out a good, fast way to deal with constructs in
one content type that also are another content type
+- [x] (3) Encode urls
+- [x] (1) Optionally remove dangerous protocols when compiling
### Extensions
diff --git a/src/compiler.rs b/src/compiler.rs
index df26f1b..c451887 100644
--- a/src/compiler.rs
+++ b/src/compiler.rs
@@ -3,7 +3,7 @@ use crate::construct::character_reference::Kind as CharacterReferenceKind;
use crate::tokenizer::{Code, Event, EventType, TokenType};
use crate::util::{
decode_named_character_reference, decode_numeric_character_reference, encode, get_span,
- slice_serialize,
+ sanitize_uri, slice_serialize,
};
/// Configuration (optional).
@@ -13,6 +13,11 @@ pub struct CompileOptions {
/// The default is `false`, you can turn it on to `true` for trusted
/// content.
pub allow_dangerous_html: bool,
+
+ /// Whether to allow (dangerous) protocols in links and images.
+ /// The default is `false`, you can turn it on to `true` for trusted
+ /// content.
+ pub allow_dangerous_protocol: bool,
}
/// Turn events and codes into a string of HTML.
@@ -28,6 +33,17 @@ pub fn compile(events: &[Event], codes: &[Code], options: &CompileOptions) -> St
let mut slurp_one_line_ending = false;
let mut ignore_encode = false;
let mut character_reference_kind: Option<CharacterReferenceKind> = None;
+ let protocol_href = if options.allow_dangerous_protocol {
+ None
+ } else {
+ Some(vec!["http", "https", "irc", "ircs", "mailto", "xmpp"])
+ };
+ // let protocol_src = if options.allow_dangerous_protocol {
+ // None
+ // } else {
+ // Some(vec!["http", "https"])
+ // };
+
// let mut slurp_all_line_endings = false;
println!("events: {:#?}", events);
@@ -238,20 +254,20 @@ pub fn compile(events: &[Event], codes: &[Code], options: &CompileOptions) -> St
TokenType::AutolinkProtocol => {
let slice = slice_serialize(codes, &get_span(events, index), false);
let buf = buf_tail_mut(buffers);
- // To do: options.allowDangerousProtocol ? undefined : protocolHref
- // let url = sanitize_uri(slice);
- let url = encode(&slice);
- buf.push(format!("<a href=\"{}\">", url));
+ buf.push(format!(
+ "<a href=\"{}\">",
+ sanitize_uri(slice.as_str(), &protocol_href)
+ ));
buf.push(encode(&slice));
buf.push("</a>".to_string());
}
TokenType::AutolinkEmail => {
let slice = slice_serialize(codes, &get_span(events, index), false);
let buf = buf_tail_mut(buffers);
- // To do: options.allowDangerousProtocol ? undefined : protocolHref
- // let url = sanitize_uri(slice);
- let url = encode(&slice);
- buf.push(format!("<a href=\"mailto:{}\">", url));
+ buf.push(format!(
+ "<a href=\"mailto:{}\">",
+ sanitize_uri(slice.as_str(), &protocol_href)
+ ));
buf.push(encode(&slice));
buf.push("</a>".to_string());
}
diff --git a/src/construct/autolink.rs b/src/construct/autolink.rs
index 24f2c20..c190d40 100644
--- a/src/construct/autolink.rs
+++ b/src/construct/autolink.rs
@@ -41,12 +41,12 @@
//! Interestingly, there are a couple of things that are valid autolinks in
//! markdown but in HTML would be valid tags, such as `<svg:rect>` and
//! `<xml:lang/>`.
-//! However, because CommonMark employs a naïve HTML parsing algorithm, those
+//! However, because `CommonMark` employs a naïve HTML parsing algorithm, those
//! are not considered HTML.
//!
-//! While CommonMark restricts links from occurring in other links in the case
-//! of bracketed links, this restriction is not in place for autolinks inside
-//! autolinks:
+//! While `CommonMark` restricts links from occurring in other links in the
+//! case of bracketed links, this restriction is not in place for autolinks
+//! inside autolinks:
//!
//! ```markdown
//! [<https://example.com>](#)
@@ -74,8 +74,6 @@
//! [autolink_scheme_size_max]: crate::constant::AUTOLINK_SCHEME_SIZE_MAX
//! [autolink_domain_size_max]: crate::constant::AUTOLINK_DOMAIN_SIZE_MAX
//! [html-a]: https://html.spec.whatwg.org/multipage/text-level-semantics.html#the-a-element
-//!
-//! <!-- To do: link to `encode` -->
use crate::constant::{AUTOLINK_DOMAIN_SIZE_MAX, AUTOLINK_SCHEME_SIZE_MAX};
use crate::tokenizer::{Code, State, StateFnResult, TokenType, Tokenizer};
diff --git a/src/lib.rs b/src/lib.rs
index cf0b05b..906cd4b 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -42,6 +42,7 @@ pub fn micromark(value: &str) -> String {
///
/// let result = micromark_with_options("<div>\n\n# Hello, world!\n\n</div>", &CompileOptions {
/// allow_dangerous_html: true,
+/// allow_dangerous_protocol: true,
/// });
///
/// assert_eq!(result, "<div>\n<h1>Hello, world!</h1>\n</div>");
diff --git a/src/util.rs b/src/util.rs
index 5a916cd..accc48e 100644
--- a/src/util.rs
+++ b/src/util.rs
@@ -31,6 +31,106 @@ pub fn encode(value: &str) -> String {
.replace('>', "&gt;")
}
+/// Make a value safe for injection as a URL.
+///
+/// This encodes unsafe characters with percent-encoding and skips already
+/// encoded sequences (see `normalize_uri` below).
+/// Further unsafe characters are encoded as character references (see
+/// `encode`).
+///
+/// Then, a vec of (lowercase) allowed protocols can be given, in which case
+/// the URL is sanitized.
+///
+/// For example, `Some(vec!["http", "https", "irc", "ircs", "mailto", "xmpp"])`
+/// can be used for `a[href]`, or `Some(vec!["http", "https"])` for `img[src]`.
+/// If the URL includes an unknown protocol (one not matched by `protocol`, such
+/// as a dangerous example, `javascript:`), the value is ignored.
+pub fn sanitize_uri(value: &str, protocols: &Option<Vec<&str>>) -> String {
+ let value = encode(&normalize_uri(value));
+
+ if let Some(protocols) = protocols {
+ let chars: Vec<char> = value.chars().collect();
+ let mut index = 0;
+ let mut colon: Option<usize> = None;
+
+ while index < chars.len() {
+ let char = chars[index];
+
+ match char {
+ ':' => {
+ colon = Some(index);
+ break;
+ }
+ '?' | '#' | '/' => break,
+ _ => {}
+ }
+
+ index += 1;
+ }
+
+ // If there is no protocol, or the first colon is after `?`, `#`, or `/`, it’s relative.
+ // It is a protocol, it should be allowed.
+ if let Some(colon) = colon {
+ let protocol = chars[0..colon].iter().collect::<String>().to_lowercase();
+ if !protocols.contains(&protocol.as_str()) {
+ return "".to_string();
+ }
+ }
+ }
+
+ value
+}
+
+/// Normalize a URL (such as used in definitions).
+///
+/// Encode unsafe characters with percent-encoding, skipping already encoded
+/// sequences.
+fn normalize_uri(value: &str) -> String {
+ let chars: Vec<char> = value.chars().collect();
+ let mut result: Vec<String> = vec![];
+ let mut index = 0;
+ let mut start = 0;
+ let mut buff = [0; 4];
+
+ while index < chars.len() {
+ let char = chars[index];
+
+ // A correct percent encoded value.
+ if char == '%'
+ && index + 2 < chars.len()
+ && chars[index + 1].is_ascii_alphanumeric()
+ && chars[index + 2].is_ascii_alphanumeric()
+ {
+ index += 3;
+ continue;
+ }
+
+ // Note: Rust already takes care of lone astral surrogates.
+ // Non-ascii or not allowed ascii.
+ if char >= '\u{0080}'
+ || !matches!(char, '!' | '#' | '$' | '&'..=';' | '=' | '?'..='Z' | '_' | 'a'..='z' | '~')
+ {
+ result.push(chars[start..index].iter().collect::<String>());
+
+ char.encode_utf8(&mut buff);
+ result.push(
+ buff[0..char.len_utf8()]
+ .iter()
+ .map(|&byte| format!("%{:X}", byte))
+ .collect::<String>(),
+ );
+
+ start = index + 1;
+ }
+
+ index += 1;
+ }
+
+ result.push(chars[start..].iter().collect::<String>());
+
+ result.join("")
+}
+
/// Decode numeric character references.
///
/// Turn the number (in string form as either hexadecimal or decimal) coming
diff --git a/tests/autolink.rs b/tests/autolink.rs
index fc49dcb..9d394d7 100644
--- a/tests/autolink.rs
+++ b/tests/autolink.rs
@@ -1,5 +1,10 @@
extern crate micromark;
-use micromark::micromark;
+use micromark::{micromark, micromark_with_options, CompileOptions};
+
+const DANGER: &CompileOptions = &CompileOptions {
+ allow_dangerous_html: true,
+ allow_dangerous_protocol: true,
+};
#[test]
fn autolink() {
@@ -33,19 +38,29 @@ fn autolink() {
"should support protocol autolinks in uppercase"
);
- // To do: safety.
- // assert_eq!(
- // micromark("<a+b+c:d>", {allowDangerousProtocol: true}),
- // "<p><a href=\"a+b+c:d\">a+b+c:d</a></p>",
- // "should support protocol autolinks w/ incorrect URIs (1)"
- // );
+ assert_eq!(
+ micromark("<a+b+c:d>"),
+ "<p><a href=\"\">a+b+c:d</a></p>",
+ "should support protocol autolinks w/ incorrect URIs (1, default)"
+ );
- // To do: safety.
- // assert_eq!(
- // micromark("<made-up-scheme://foo,bar>", {allowDangerousProtocol: true}),
- // "<p><a href=\"made-up-scheme://foo,bar\">made-up-scheme://foo,bar</a></p>",
- // "should support protocol autolinks w/ incorrect URIs (2)"
- // );
+ assert_eq!(
+ micromark_with_options("<a+b+c:d>", DANGER),
+ "<p><a href=\"a+b+c:d\">a+b+c:d</a></p>",
+ "should support protocol autolinks w/ incorrect URIs (1, danger)"
+ );
+
+ assert_eq!(
+ micromark("<made-up-scheme://foo,bar>"),
+ "<p><a href=\"\">made-up-scheme://foo,bar</a></p>",
+ "should support protocol autolinks w/ incorrect URIs (2, default)"
+ );
+
+ assert_eq!(
+ micromark_with_options("<made-up-scheme://foo,bar>", DANGER),
+ "<p><a href=\"made-up-scheme://foo,bar\">made-up-scheme://foo,bar</a></p>",
+ "should support protocol autolinks w/ incorrect URIs (2, danger)"
+ );
assert_eq!(
micromark("<http://../>"),
@@ -53,12 +68,11 @@ fn autolink() {
"should support protocol autolinks w/ incorrect URIs (3)"
);
- // To do: safety.
- // assert_eq!(
- // micromark("<localhost:5001/foo>", {allowDangerousProtocol: true}),
- // "<p><a href=\"localhost:5001/foo\">localhost:5001/foo</a></p>",
- // "should support protocol autolinks w/ incorrect URIs (4)"
- // );
+ assert_eq!(
+ micromark_with_options("<localhost:5001/foo>", DANGER),
+ "<p><a href=\"localhost:5001/foo\">localhost:5001/foo</a></p>",
+ "should support protocol autolinks w/ incorrect URIs (4)"
+ );
assert_eq!(
micromark("<http://foo.bar/baz bim>"),
@@ -66,12 +80,11 @@ fn autolink() {
"should not support protocol autolinks w/ spaces"
);
- // To do: encode urls.
- // assert_eq!(
- // micromark("<http://example.com/\\[\\>"),
- // "<p><a href=\"http://example.com/%5C%5B%5C\">http://example.com/\\[\\</a></p>",
- // "should not support character escapes in protocol autolinks"
- // );
+ assert_eq!(
+ micromark("<http://example.com/\\[\\>"),
+ "<p><a href=\"http://example.com/%5C%5B%5C\">http://example.com/\\[\\</a></p>",
+ "should not support character escapes in protocol autolinks"
+ );
assert_eq!(
micromark("<foo@bar.example.com>"),
diff --git a/tests/html_flow.rs b/tests/html_flow.rs
index 7969487..6445af3 100644
--- a/tests/html_flow.rs
+++ b/tests/html_flow.rs
@@ -3,6 +3,7 @@ use micromark::{micromark, micromark_with_options, CompileOptions};
const DANGER: &CompileOptions = &CompileOptions {
allow_dangerous_html: true,
+ allow_dangerous_protocol: false,
};
#[test]