aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-06-20 12:34:38 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-06-20 12:34:38 +0200
commit5bf187fab2df0122e51523d1c731e457ab366121 (patch)
tree02a661ef24cece9656af279e3d2405ada60333e5
parent912f91b2b35be6b0d05dd768d67599d76151bc5c (diff)
downloadmarkdown-rs-5bf187fab2df0122e51523d1c731e457ab366121.tar.gz
markdown-rs-5bf187fab2df0122e51523d1c731e457ab366121.tar.bz2
markdown-rs-5bf187fab2df0122e51523d1c731e457ab366121.zip
Remove unneeded `pub` keywords
Diffstat (limited to '')
-rw-r--r--readme.md2
-rw-r--r--src/construct/autolink.rs18
-rw-r--r--src/construct/code_text.rs8
-rw-r--r--src/construct/definition.rs37
-rw-r--r--src/construct/heading_setext.rs2
-rw-r--r--src/construct/html_text.rs61
-rw-r--r--src/construct/partial_title.rs5
7 files changed, 59 insertions, 74 deletions
diff --git a/readme.md b/readme.md
index a00a658..0cd5bd2 100644
--- a/readme.md
+++ b/readme.md
@@ -66,7 +66,6 @@ cargo doc --document-private-items
### Small things
-- [ ] (1) Remove all `pub fn`s from constructs, except for start
- [ ] (1) Remove `content` content type, as it is no longer needed
- [ ] (1) Connect `ChunkString` in label, destination, title
- [ ] (1) Add support for line endings in `string`
@@ -174,6 +173,7 @@ cargo doc --document-private-items
- [x] (1) Setext headings: can they be solved in content, or do they have to be
solved in flow somehow
- [x] (1) Add docs to partials
+- [x] (1) Remove all `pub fn`s from constructs, except for start
### Extensions
diff --git a/src/construct/autolink.rs b/src/construct/autolink.rs
index 78003fb..c9596a6 100644
--- a/src/construct/autolink.rs
+++ b/src/construct/autolink.rs
@@ -124,7 +124,7 @@ pub fn start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// a<|https://example.com>b
/// a<|user@example.com>b
/// ```
-pub fn open(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn open(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char(char) if char.is_ascii_alphabetic() => {
tokenizer.consume(code);
@@ -141,7 +141,7 @@ pub fn open(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// a<h|ttps://example.com>b
/// a<u|ser@example.com>b
/// ```
-pub fn scheme_or_email_atext(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn scheme_or_email_atext(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
// Whether this character can be both a protocol and email atext.
let unknown = match code {
Code::Char('+' | '-' | '.') => true,
@@ -162,7 +162,7 @@ pub fn scheme_or_email_atext(tokenizer: &mut Tokenizer, code: Code) -> StateFnRe
/// a<ht|tps://example.com>b
/// a<us|er@example.com>b
/// ```
-pub fn scheme_inside_or_email_atext(
+fn scheme_inside_or_email_atext(
tokenizer: &mut Tokenizer,
code: Code,
size: usize,
@@ -199,7 +199,7 @@ pub fn scheme_inside_or_email_atext(
/// ```markdown
/// a<https:|//example.com>b
/// ```
-pub fn url_inside(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn url_inside(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char('>') => {
tokenizer.exit(TokenType::AutolinkProtocol);
@@ -221,7 +221,7 @@ pub fn url_inside(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// ```markdown
/// a<user.na|me@example.com>b
/// ```
-pub fn email_atext(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn email_atext(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char('@') => {
tokenizer.consume(code);
@@ -244,7 +244,7 @@ pub fn email_atext(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// a<user.name@|example.com>b
/// a<user.name@example.|com>b
/// ```
-pub fn email_at_sign_or_dot(tokenizer: &mut Tokenizer, code: Code, size: usize) -> StateFnResult {
+fn email_at_sign_or_dot(tokenizer: &mut Tokenizer, code: Code, size: usize) -> StateFnResult {
match code {
Code::Char(char) if char.is_ascii_alphanumeric() => email_value(tokenizer, code, size),
_ => (State::Nok, None),
@@ -256,7 +256,7 @@ pub fn email_at_sign_or_dot(tokenizer: &mut Tokenizer, code: Code, size: usize)
/// ```markdown
/// a<user.name@ex|ample.com>b
/// ```
-pub fn email_label(tokenizer: &mut Tokenizer, code: Code, size: usize) -> StateFnResult {
+fn email_label(tokenizer: &mut Tokenizer, code: Code, size: usize) -> StateFnResult {
match code {
Code::Char('.') => {
tokenizer.consume(code);
@@ -285,7 +285,7 @@ pub fn email_label(tokenizer: &mut Tokenizer, code: Code, size: usize) -> StateF
/// ```markdown
/// a<user.name@ex-|ample.com>b
/// ```
-pub fn email_value(tokenizer: &mut Tokenizer, code: Code, size: usize) -> StateFnResult {
+fn email_value(tokenizer: &mut Tokenizer, code: Code, size: usize) -> StateFnResult {
let ok = match code {
Code::Char('-') if size < AUTOLINK_DOMAIN_SIZE_MAX => true,
Code::Char(char) if char.is_ascii_alphanumeric() && size < AUTOLINK_DOMAIN_SIZE_MAX => true,
@@ -311,7 +311,7 @@ pub fn email_value(tokenizer: &mut Tokenizer, code: Code, size: usize) -> StateF
/// a<https://example.com|>b
/// a<user@example.com|>b
/// ```
-pub fn end(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn end(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char('>') => {
tokenizer.enter(TokenType::AutolinkMarker);
diff --git a/src/construct/code_text.rs b/src/construct/code_text.rs
index 3c01070..1f34e41 100644
--- a/src/construct/code_text.rs
+++ b/src/construct/code_text.rs
@@ -109,7 +109,7 @@ pub fn start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// ```markdown
/// `|`a``
/// ```
-pub fn sequence_open(tokenizer: &mut Tokenizer, code: Code, size: usize) -> StateFnResult {
+fn sequence_open(tokenizer: &mut Tokenizer, code: Code, size: usize) -> StateFnResult {
if let Code::Char('`') = code {
tokenizer.consume(code);
(
@@ -130,7 +130,7 @@ pub fn sequence_open(tokenizer: &mut Tokenizer, code: Code, size: usize) -> Stat
/// `|a`
/// `a|`
/// ```
-pub fn between(tokenizer: &mut Tokenizer, code: Code, size_open: usize) -> StateFnResult {
+fn between(tokenizer: &mut Tokenizer, code: Code, size_open: usize) -> StateFnResult {
match code {
Code::None => (State::Nok, None),
Code::CarriageReturnLineFeed | Code::Char('\r' | '\n') => {
@@ -160,7 +160,7 @@ pub fn between(tokenizer: &mut Tokenizer, code: Code, size_open: usize) -> State
/// ```markdown
/// `a|b`
/// ```
-pub fn data(tokenizer: &mut Tokenizer, code: Code, size_open: usize) -> StateFnResult {
+fn data(tokenizer: &mut Tokenizer, code: Code, size_open: usize) -> StateFnResult {
match code {
Code::None | Code::CarriageReturnLineFeed | Code::Char('\r' | '\n' | '`') => {
tokenizer.exit(TokenType::CodeTextData);
@@ -183,7 +183,7 @@ pub fn data(tokenizer: &mut Tokenizer, code: Code, size_open: usize) -> StateFnR
/// ```markdown
/// ``a`|`
/// ```
-pub fn sequence_close(
+fn sequence_close(
tokenizer: &mut Tokenizer,
code: Code,
size_open: usize,
diff --git a/src/construct/definition.rs b/src/construct/definition.rs
index 3035a20..65c0991 100644
--- a/src/construct/definition.rs
+++ b/src/construct/definition.rs
@@ -79,7 +79,7 @@ pub fn start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// ```markdown
/// [a]|: b "c"
/// ```
-pub fn label_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn label_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
// To do: get the identifier:
// identifier = normalizeIdentifier(
// self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
@@ -104,7 +104,7 @@ pub fn label_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// [a]:| ␊
/// b "c"
/// ```
-pub fn marker_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn marker_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
tokenizer.attempt(
|t, c| whitespace(t, c, TokenType::Whitespace),
|_ok| Box::new(marker_after_optional_whitespace),
@@ -119,7 +119,7 @@ pub fn marker_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// [a]: |␊
/// b "c"
/// ```
-pub fn marker_after_optional_whitespace(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn marker_after_optional_whitespace(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::CarriageReturnLineFeed | Code::Char('\r' | '\n') => {
tokenizer.enter(TokenType::LineEnding);
@@ -137,7 +137,7 @@ pub fn marker_after_optional_whitespace(tokenizer: &mut Tokenizer, code: Code) -
/// [a]:
/// | b "c"
/// ```
-pub fn marker_after_optional_line_ending(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn marker_after_optional_line_ending(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
tokenizer.attempt(
|t, c| whitespace(t, c, TokenType::Whitespace),
|_ok| Box::new(destination_before),
@@ -152,7 +152,7 @@ pub fn marker_after_optional_line_ending(tokenizer: &mut Tokenizer, code: Code)
/// [a]:
/// |b "c"
/// ```
-pub fn destination_before(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn destination_before(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
let event = tokenizer.events.last().unwrap();
// Blank line not ok.
let char_nok = matches!(
@@ -177,7 +177,7 @@ pub fn destination_before(tokenizer: &mut Tokenizer, code: Code) -> StateFnResul
/// [a]: b| ␊
/// "c"
/// ```
-pub fn destination_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn destination_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
tokenizer.attempt(title_before, |_ok| Box::new(after))(tokenizer, code)
}
@@ -187,7 +187,7 @@ pub fn destination_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult
/// [a]: b|
/// [a]: b "c"|
/// ```
-pub fn after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
tokenizer.attempt(
|t, c| whitespace(t, c, TokenType::Whitespace),
|_ok| Box::new(after_whitespace),
@@ -200,7 +200,7 @@ pub fn after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// [a]: b |
/// [a]: b "c"|
/// ```
-pub fn after_whitespace(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn after_whitespace(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None | Code::CarriageReturnLineFeed | Code::Char('\r' | '\n') => {
tokenizer.exit(TokenType::Definition);
@@ -218,7 +218,7 @@ pub fn after_whitespace(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult
/// [a]: b| ␊
/// "c"
/// ```
-pub fn title_before(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn title_before(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
tokenizer.attempt(
|t, c| whitespace(t, c, TokenType::Whitespace),
|_ok| Box::new(title_before_after_optional_whitespace),
@@ -233,10 +233,7 @@ pub fn title_before(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// [a]: b |␊
/// "c"
/// ```
-pub fn title_before_after_optional_whitespace(
- tokenizer: &mut Tokenizer,
- code: Code,
-) -> StateFnResult {
+fn title_before_after_optional_whitespace(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::CarriageReturnLineFeed | Code::Char('\r' | '\n') => {
tokenizer.enter(TokenType::LineEnding);
@@ -257,10 +254,7 @@ pub fn title_before_after_optional_whitespace(
/// [a]: b␊
/// | "c"
/// ```
-pub fn title_before_after_optional_line_ending(
- tokenizer: &mut Tokenizer,
- code: Code,
-) -> StateFnResult {
+fn title_before_after_optional_line_ending(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
tokenizer.attempt(
|t, c| whitespace(t, c, TokenType::Whitespace),
|_ok| Box::new(title_before_marker),
@@ -273,7 +267,7 @@ pub fn title_before_after_optional_line_ending(
/// [a]: b␊
/// | "c"
/// ```
-pub fn title_before_marker(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn title_before_marker(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
let event = tokenizer.events.last().unwrap();
if event.token_type == TokenType::LineEnding || event.token_type == TokenType::Whitespace {
@@ -291,7 +285,7 @@ pub fn title_before_marker(tokenizer: &mut Tokenizer, code: Code) -> StateFnResu
/// [a]: b␊
/// "c"|
/// ```
-pub fn title_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn title_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
tokenizer.attempt(
|t, c| whitespace(t, c, TokenType::Whitespace),
|_ok| Box::new(title_after_after_optional_whitespace),
@@ -305,10 +299,7 @@ pub fn title_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
///
/// [a]: b "c" |
/// ```
-pub fn title_after_after_optional_whitespace(
- _tokenizer: &mut Tokenizer,
- code: Code,
-) -> StateFnResult {
+fn title_after_after_optional_whitespace(_tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None | Code::CarriageReturnLineFeed | Code::Char('\r' | '\n') => {
(State::Ok, Some(vec![code]))
diff --git a/src/construct/heading_setext.rs b/src/construct/heading_setext.rs
index 8cc4f6d..f4c6001 100644
--- a/src/construct/heading_setext.rs
+++ b/src/construct/heading_setext.rs
@@ -90,7 +90,7 @@ pub fn start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// bra|vo
/// ==
/// ```
-pub fn text_inside(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn text_inside(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None => (State::Nok, None),
Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
diff --git a/src/construct/html_text.rs b/src/construct/html_text.rs
index c118006..d50a8ce 100644
--- a/src/construct/html_text.rs
+++ b/src/construct/html_text.rs
@@ -75,7 +75,7 @@ pub fn start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// a <|!doctype> b
/// a <|!--xxx--/> b
/// ```
-pub fn open(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn open(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char('!') => {
tokenizer.consume(code);
@@ -104,7 +104,7 @@ pub fn open(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// a <!|--xxx--> b
/// a <!|[CDATA[>&<]]> b
/// ```
-pub fn declaration_open(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn declaration_open(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char('-') => {
tokenizer.consume(code);
@@ -133,7 +133,7 @@ pub fn declaration_open(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult
/// ```markdown
/// a <!-|-xxx--> b
/// ```
-pub fn comment_open_inside(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn comment_open_inside(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char('-') => {
tokenizer.consume(code);
@@ -155,7 +155,7 @@ pub fn comment_open_inside(tokenizer: &mut Tokenizer, code: Code) -> StateFnResu
/// ```
///
/// [html_flow]: crate::construct::html_flow
-pub fn comment_start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn comment_start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None | Code::Char('>') => (State::Nok, None),
Code::Char('-') => {
@@ -178,7 +178,7 @@ pub fn comment_start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// ```
///
/// [html_flow]: crate::construct::html_flow
-pub fn comment_start_dash(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn comment_start_dash(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None | Code::Char('>') => (State::Nok, None),
_ => comment(tokenizer, code),
@@ -191,7 +191,7 @@ pub fn comment_start_dash(tokenizer: &mut Tokenizer, code: Code) -> StateFnResul
/// a <!--|xxx--> b
/// a <!--x|xx--> b
/// ```
-pub fn comment(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn comment(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None => (State::Nok, None),
Code::CarriageReturnLineFeed | Code::Char('\r' | '\n') => {
@@ -214,7 +214,7 @@ pub fn comment(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// a <!--xxx-|-> b
/// a <!--xxx-|yyy--> b
/// ```
-pub fn comment_close(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn comment_close(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char('-') => {
tokenizer.consume(code);
@@ -233,7 +233,7 @@ pub fn comment_close(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// a <![CDAT|A[>&<]]> b
/// a <![CDATA|[>&<]]> b
/// ```
-pub fn cdata_open_inside(
+fn cdata_open_inside(
tokenizer: &mut Tokenizer,
code: Code,
buffer: Vec<char>,
@@ -263,7 +263,7 @@ pub fn cdata_open_inside(
/// ```markdown
/// a <![CDATA[|>&<]]> b
/// ```
-pub fn cdata(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn cdata(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None => (State::Nok, None),
Code::CarriageReturnLineFeed | Code::Char('\r' | '\n') => {
@@ -285,7 +285,7 @@ pub fn cdata(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// ```markdown
/// a <![CDATA[>&<]|]> b
/// ```
-pub fn cdata_close(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn cdata_close(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char(']') => {
tokenizer.consume(code);
@@ -300,7 +300,7 @@ pub fn cdata_close(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// ```markdown
/// a <![CDATA[>&<]]|> b
/// ```
-pub fn cdata_end(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn cdata_end(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char('>') => end(tokenizer, code),
Code::Char(']') => cdata_close(tokenizer, code),
@@ -313,7 +313,7 @@ pub fn cdata_end(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// ```markdown
/// a <!a|b> b
/// ```
-pub fn declaration(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn declaration(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None | Code::Char('>') => end(tokenizer, code),
Code::CarriageReturnLineFeed | Code::Char('\r' | '\n') => {
@@ -332,7 +332,7 @@ pub fn declaration(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// a <?|ab?> b
/// a <?a|b?> b
/// ```
-pub fn instruction(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn instruction(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None => (State::Nok, None),
Code::CarriageReturnLineFeed | Code::Char('\r' | '\n') => {
@@ -355,7 +355,7 @@ pub fn instruction(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// a <?aa?|> b
/// a <?aa?|bb?> b
/// ```
-pub fn instruction_close(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn instruction_close(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char('>') => end(tokenizer, code),
_ => instruction(tokenizer, code),
@@ -367,7 +367,7 @@ pub fn instruction_close(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult
/// ```markdown
/// a </|x> b
/// ```
-pub fn tag_close_start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn tag_close_start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char(char) if char.is_ascii_alphabetic() => {
tokenizer.consume(code);
@@ -383,7 +383,7 @@ pub fn tag_close_start(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// a </x|> b
/// a </x|y> b
/// ```
-pub fn tag_close(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn tag_close(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char(char) if char == '-' || char.is_ascii_alphanumeric() => {
tokenizer.consume(code);
@@ -399,7 +399,7 @@ pub fn tag_close(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// a </x| > b
/// a </xy |> b
/// ```
-pub fn tag_close_between(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn tag_close_between(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::CarriageReturnLineFeed | Code::Char('\r' | '\n') => {
at_line_ending(tokenizer, code, Box::new(tag_close_between))
@@ -417,7 +417,7 @@ pub fn tag_close_between(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult
/// ```markdown
/// a <x|> b
/// ```
-pub fn tag_open(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn tag_open(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char(char) if char == '-' || char.is_ascii_alphanumeric() => {
tokenizer.consume(code);
@@ -437,7 +437,7 @@ pub fn tag_open(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// a <x |y="z"> b
/// a <x |/> b
/// ```
-pub fn tag_open_between(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn tag_open_between(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::CarriageReturnLineFeed | Code::Char('\r' | '\n') => {
at_line_ending(tokenizer, code, Box::new(tag_open_between))
@@ -465,7 +465,7 @@ pub fn tag_open_between(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult
/// a <x _|> b
/// a <x a|> b
/// ```
-pub fn tag_open_attribute_name(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn tag_open_attribute_name(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char(char)
if char == '-'
@@ -489,7 +489,7 @@ pub fn tag_open_attribute_name(tokenizer: &mut Tokenizer, code: Code) -> StateFn
/// a <x a|=b> b
/// a <x a|="c"> b
/// ```
-pub fn tag_open_attribute_name_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn tag_open_attribute_name_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::CarriageReturnLineFeed | Code::Char('\r' | '\n') => {
at_line_ending(tokenizer, code, Box::new(tag_open_attribute_name_after))
@@ -513,7 +513,7 @@ pub fn tag_open_attribute_name_after(tokenizer: &mut Tokenizer, code: Code) -> S
/// a <x a=|b> b
/// a <x a=|"c"> b
/// ```
-pub fn tag_open_attribute_value_before(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn tag_open_attribute_value_before(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None | Code::Char('<' | '=' | '>' | '`') => (State::Nok, None),
Code::CarriageReturnLineFeed | Code::Char('\r' | '\n') => {
@@ -545,7 +545,7 @@ pub fn tag_open_attribute_value_before(tokenizer: &mut Tokenizer, code: Code) ->
/// a <x a="|"> b
/// a <x a='|'> b
/// ```
-pub fn tag_open_attribute_value_quoted(
+fn tag_open_attribute_value_quoted(
tokenizer: &mut Tokenizer,
code: Code,
marker: char,
@@ -583,7 +583,7 @@ pub fn tag_open_attribute_value_quoted(
/// ```markdown
/// a <x a=b|c> b
/// ```
-pub fn tag_open_attribute_value_unquoted(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn tag_open_attribute_value_unquoted(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None | Code::Char('"' | '\'' | '<' | '=' | '`') => (State::Nok, None),
Code::CarriageReturnLineFeed
@@ -602,10 +602,7 @@ pub fn tag_open_attribute_value_unquoted(tokenizer: &mut Tokenizer, code: Code)
/// ```markdown
/// a <x a="b"|> b
/// ```
-pub fn tag_open_attribute_value_quoted_after(
- tokenizer: &mut Tokenizer,
- code: Code,
-) -> StateFnResult {
+fn tag_open_attribute_value_quoted_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::CarriageReturnLineFeed
| Code::VirtualSpace
@@ -621,7 +618,7 @@ pub fn tag_open_attribute_value_quoted_after(
/// a <!--xx--|> b
/// a <x /|> b
/// ```
-pub fn end(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
+fn end(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::Char('>') => {
tokenizer.consume(code);
@@ -642,7 +639,7 @@ pub fn end(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
/// a <!--a|
/// b--> b
/// ```
-pub fn at_line_ending(
+fn at_line_ending(
tokenizer: &mut Tokenizer,
code: Code,
return_state: Box<StateFn>,
@@ -671,7 +668,7 @@ pub fn at_line_ending(
/// a <!--a
/// |b--> b
/// ```
-pub fn after_line_ending(
+fn after_line_ending(
tokenizer: &mut Tokenizer,
code: Code,
return_state: Box<StateFn>,
@@ -691,7 +688,7 @@ pub fn after_line_ending(
/// a <!--a
/// |b--> b
/// ```
-pub fn after_line_ending_prefix(
+fn after_line_ending_prefix(
tokenizer: &mut Tokenizer,
code: Code,
return_state: Box<StateFn>,
diff --git a/src/construct/partial_title.rs b/src/construct/partial_title.rs
index a626c50..7b5fa64 100644
--- a/src/construct/partial_title.rs
+++ b/src/construct/partial_title.rs
@@ -123,10 +123,7 @@ fn at_break(tokenizer: &mut Tokenizer, code: Code, kind: Kind) -> StateFnResult
tokenizer.enter(TokenType::LineEnding);
tokenizer.consume(code);
tokenizer.exit(TokenType::LineEnding);
- (
- State::Fn(Box::new(|t, c| line_start(t, c, kind))),
- None,
- )
+ (State::Fn(Box::new(|t, c| line_start(t, c, kind))), None)
}
_ => {
// To do: link.