aboutsummaryrefslogtreecommitdiffstats
path: root/src/content/string.rs
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-12 14:21:53 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-08-12 14:21:53 +0200
commit504729a4a0c8f3e0d8fc9159e0273150b169e184 (patch)
treea6bf291322decccd6011580337b1feed6151b554 /src/content/string.rs
parentdb5a491e6c2223d1db9b458307431a54db3c40f2 (diff)
downloadmarkdown-rs-504729a4a0c8f3e0d8fc9159e0273150b169e184.tar.gz
markdown-rs-504729a4a0c8f3e0d8fc9159e0273150b169e184.tar.bz2
markdown-rs-504729a4a0c8f3e0d8fc9159e0273150b169e184.zip
Refactor to improve docs of each function
Diffstat (limited to 'src/content/string.rs')
-rw-r--r--src/content/string.rs18
1 files changed, 17 insertions, 1 deletions
diff --git a/src/content/string.rs b/src/content/string.rs
index 1eefd30..ec4fce2 100644
--- a/src/content/string.rs
+++ b/src/content/string.rs
@@ -17,9 +17,15 @@ use crate::resolve::Name as ResolveName;
use crate::state::{Name as StateName, State};
use crate::tokenizer::Tokenizer;
+/// Characters that can start something in string.
const MARKERS: [u8; 2] = [b'&', b'\\'];
/// Start of string.
+///
+/// ````markdown
+/// > | ```js
+/// ^
+/// ````
pub fn start(tokenizer: &mut Tokenizer) -> State {
tokenizer.register_resolver(ResolveName::String);
tokenizer.tokenize_state.markers = &MARKERS;
@@ -27,6 +33,11 @@ pub fn start(tokenizer: &mut Tokenizer) -> State {
}
/// Before string.
+///
+/// ````markdown
+/// > | ```js
+/// ^
+/// ````
pub fn before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None => State::Ok,
@@ -49,12 +60,17 @@ pub fn before(tokenizer: &mut Tokenizer) -> State {
}
/// At data.
+///
+/// ````markdown
+/// > | ```js
+/// ^
+/// ````
pub fn before_data(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(State::Next(StateName::StringBefore), State::Nok);
State::Retry(StateName::DataStart)
}
-/// Resolve whitespace.
+/// Resolve whitespace in string.
pub fn resolve(tokenizer: &mut Tokenizer) {
resolve_whitespace(tokenizer, false, false);
}