diff options
| author | 2022-09-28 17:54:39 +0200 | |
|---|---|---|
| committer | 2022-09-28 17:55:44 +0200 | |
| commit | b33a81e40620b8b3eaeeec9d0e0b34ca5958dead (patch) | |
| tree | c91e56db38777b30cdcef591d0f7cd9bd1ac0ee8 /src/construct | |
| parent | a0c84c505d733be2e987a333a34244c1befb56cb (diff) | |
| download | markdown-rs-b33a81e40620b8b3eaeeec9d0e0b34ca5958dead.tar.gz markdown-rs-b33a81e40620b8b3eaeeec9d0e0b34ca5958dead.tar.bz2 markdown-rs-b33a81e40620b8b3eaeeec9d0e0b34ca5958dead.zip | |
Add support for turning mdast to hast
Diffstat (limited to '')
| -rw-r--r-- | src/construct/attention.rs | 5 | ||||
| -rw-r--r-- | src/construct/gfm_table.rs | 6 | ||||
| -rw-r--r-- | src/construct/heading_atx.rs | 6 | ||||
| -rw-r--r-- | src/construct/heading_setext.rs | 6 | ||||
| -rw-r--r-- | src/construct/label_end.rs | 4 | ||||
| -rw-r--r-- | src/construct/list_item.rs | 6 | ||||
| -rw-r--r-- | src/construct/partial_data.rs | 6 | ||||
| -rw-r--r-- | src/construct/partial_mdx_expression.rs | 2 | ||||
| -rw-r--r-- | src/construct/string.rs | 6 | ||||
| -rw-r--r-- | src/construct/text.rs | 5 | 
10 files changed, 24 insertions, 28 deletions
| diff --git a/src/construct/attention.rs b/src/construct/attention.rs index 4d58610..d99a52c 100644 --- a/src/construct/attention.rs +++ b/src/construct/attention.rs @@ -88,7 +88,6 @@ use crate::util::{      },      slice::Slice,  }; -use alloc::string::String;  use alloc::{vec, vec::Vec};  /// Attentention sequence that we can take markers from. @@ -152,7 +151,7 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {  }  /// Resolve sequences. -pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> { +pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {      // Find all sequences, gather info about them.      let mut sequences = get_sequences(tokenizer); @@ -224,7 +223,7 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> {      tokenizer.map.consume(&mut tokenizer.events); -    Ok(None) +    None  }  /// Get sequences. diff --git a/src/construct/gfm_table.rs b/src/construct/gfm_table.rs index 63772c4..547358f 100644 --- a/src/construct/gfm_table.rs +++ b/src/construct/gfm_table.rs @@ -232,7 +232,7 @@ use crate::state::{Name as StateName, State};  use crate::subtokenize::Subresult;  use crate::tokenizer::Tokenizer;  use crate::util::{constant::TAB_SIZE, skip::opt_back as skip_opt_back}; -use alloc::{string::String, vec}; +use alloc::vec;  /// Start of a GFM table.  /// @@ -772,7 +772,7 @@ pub fn body_row_escape(tokenizer: &mut Tokenizer) -> State {  }  /// Resolve GFM table. -pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> { +pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {      let mut index = 0;      let mut in_first_cell_awaiting_pipe = true;      let mut in_row = false; @@ -887,7 +887,7 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> {          flush_table_end(tokenizer, last_table_end, last_table_has_body);      } -    Ok(None) +    None  }  /// Generate a cell. diff --git a/src/construct/heading_atx.rs b/src/construct/heading_atx.rs index b76e455..c867117 100644 --- a/src/construct/heading_atx.rs +++ b/src/construct/heading_atx.rs @@ -69,7 +69,7 @@ use crate::state::{Name as StateName, State};  use crate::subtokenize::Subresult;  use crate::tokenizer::Tokenizer;  use crate::util::constant::{HEADING_ATX_OPENING_FENCE_SIZE_MAX, TAB_SIZE}; -use alloc::{string::String, vec}; +use alloc::vec;  /// Start of a heading (atx).  /// @@ -223,7 +223,7 @@ pub fn data(tokenizer: &mut Tokenizer) -> State {  }  /// Resolve heading (atx). -pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> { +pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {      let mut index = 0;      let mut heading_inside = false;      let mut data_start: Option<usize> = None; @@ -283,5 +283,5 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> {          index += 1;      } -    Ok(None) +    None  } diff --git a/src/construct/heading_setext.rs b/src/construct/heading_setext.rs index 3a484e1..1e6fd00 100644 --- a/src/construct/heading_setext.rs +++ b/src/construct/heading_setext.rs @@ -77,7 +77,7 @@ use crate::state::{Name as StateName, State};  use crate::subtokenize::Subresult;  use crate::tokenizer::Tokenizer;  use crate::util::{constant::TAB_SIZE, skip}; -use alloc::{string::String, vec}; +use alloc::vec;  /// At start of heading (setext) underline.  /// @@ -184,7 +184,7 @@ pub fn after(tokenizer: &mut Tokenizer) -> State {  }  /// Resolve heading (setext). -pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> { +pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {      tokenizer.map.consume(&mut tokenizer.events);      let mut enter = skip::to(&tokenizer.events, 0, &[Name::HeadingSetextUnderline]); @@ -281,5 +281,5 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> {      tokenizer.map.consume(&mut tokenizer.events); -    Ok(None) +    None  } diff --git a/src/construct/label_end.rs b/src/construct/label_end.rs index 95b9a27..ca71245 100644 --- a/src/construct/label_end.rs +++ b/src/construct/label_end.rs @@ -661,7 +661,7 @@ pub fn reference_collapsed_open(tokenizer: &mut Tokenizer) -> State {  ///  /// This turns matching label starts and label ends into links, images, and  /// footnotes, and turns unmatched label starts back into data. -pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> { +pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {      // Inject labels.      let labels = tokenizer.tokenize_state.labels.split_off(0);      inject_labels(tokenizer, &labels); @@ -673,7 +673,7 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> {      tokenizer.map.consume(&mut tokenizer.events); -    Ok(None) +    None  }  /// Inject links/images/footnotes. diff --git a/src/construct/list_item.rs b/src/construct/list_item.rs index 13b740b..a4f166d 100644 --- a/src/construct/list_item.rs +++ b/src/construct/list_item.rs @@ -69,7 +69,7 @@ use crate::util::{      skip,      slice::{Position, Slice},  }; -use alloc::{string::String, vec, vec::Vec}; +use alloc::{vec, vec::Vec};  /// Start of list item.  /// @@ -371,7 +371,7 @@ pub fn cont_filled(tokenizer: &mut Tokenizer) -> State {  }  /// Find adjacent list items with the same marker. -pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> { +pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {      let mut lists_wip: Vec<(u8, usize, usize, usize)> = vec![];      let mut lists: Vec<(u8, usize, usize, usize)> = vec![];      let mut index = 0; @@ -474,5 +474,5 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> {          index += 1;      } -    Ok(None) +    None  } diff --git a/src/construct/partial_data.rs b/src/construct/partial_data.rs index b36d9f0..a27730c 100644 --- a/src/construct/partial_data.rs +++ b/src/construct/partial_data.rs @@ -10,7 +10,7 @@ use crate::event::{Kind, Name};  use crate::state::{Name as StateName, State};  use crate::subtokenize::Subresult;  use crate::tokenizer::Tokenizer; -use alloc::{string::String, vec}; +use alloc::vec;  /// At beginning of data.  /// @@ -73,7 +73,7 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {  }  /// Merge adjacent data events. -pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> { +pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {      let mut index = 0;      // Loop through events and merge adjacent data events. @@ -105,5 +105,5 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> {          index += 1;      } -    Ok(None) +    None  } diff --git a/src/construct/partial_mdx_expression.rs b/src/construct/partial_mdx_expression.rs index 3ebd0f0..789443e 100644 --- a/src/construct/partial_mdx_expression.rs +++ b/src/construct/partial_mdx_expression.rs @@ -219,7 +219,7 @@ fn parse_expression(tokenizer: &mut Tokenizer, parse: &MdxExpressionParse) -> St      };      // Parse and handle what was signaled back. -    match parse(&result.value, kind) { +    match parse(&result.value, &kind) {          MdxSignal::Ok => State::Ok,          MdxSignal::Error(message, place) => {              let point = place_to_point(&result, place); diff --git a/src/construct/string.rs b/src/construct/string.rs index cf2f222..cad570d 100644 --- a/src/construct/string.rs +++ b/src/construct/string.rs @@ -17,7 +17,6 @@ use crate::resolve::Name as ResolveName;  use crate::state::{Name as StateName, State};  use crate::subtokenize::Subresult;  use crate::tokenizer::Tokenizer; -use alloc::string::String;  /// Characters that can start something in string.  const MARKERS: [u8; 2] = [b'&', b'\\']; @@ -76,8 +75,7 @@ pub fn before_data(tokenizer: &mut Tokenizer) -> State {  }  /// Resolve whitespace in string. -pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> { +pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {      resolve_whitespace(tokenizer, false, false); - -    Ok(None) +    None  } diff --git a/src/construct/text.rs b/src/construct/text.rs index 2648531..0ea0913 100644 --- a/src/construct/text.rs +++ b/src/construct/text.rs @@ -30,7 +30,6 @@ use crate::resolve::Name as ResolveName;  use crate::state::{Name as StateName, State};  use crate::subtokenize::Subresult;  use crate::tokenizer::Tokenizer; -use alloc::string::String;  /// Characters that can start something in text.  const MARKERS: [u8; 16] = [ @@ -244,7 +243,7 @@ pub fn before_data(tokenizer: &mut Tokenizer) -> State {  }  /// Resolve whitespace. -pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> { +pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {      resolve_whitespace(          tokenizer,          tokenizer.parse_state.options.constructs.hard_break_trailing, @@ -260,5 +259,5 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, String> {          resolve_gfm_autolink_literal(tokenizer);      } -    Ok(None) +    None  } | 
