diff options
author | Titus Wormer <tituswormer@gmail.com> | 2022-07-28 16:51:25 +0200 |
---|---|---|
committer | Titus Wormer <tituswormer@gmail.com> | 2022-07-28 16:51:25 +0200 |
commit | 6f61649ac8d08fff85a99172afbf4cd852dda2e6 (patch) | |
tree | fecb8227527337613c85ba731d7a01f270330a7f /src | |
parent | f7e5fb852dc9c416b9eeb1f0d4f2d51ba5b68456 (diff) | |
download | markdown-rs-6f61649ac8d08fff85a99172afbf4cd852dda2e6.tar.gz markdown-rs-6f61649ac8d08fff85a99172afbf4cd852dda2e6.tar.bz2 markdown-rs-6f61649ac8d08fff85a99172afbf4cd852dda2e6.zip |
Refactor to use `debug_assert`
Diffstat (limited to 'src')
-rw-r--r-- | src/construct/html_flow.rs | 1 | ||||
-rw-r--r-- | src/content/document.rs | 2 | ||||
-rw-r--r-- | src/subtokenize.rs | 14 | ||||
-rw-r--r-- | src/tokenizer.rs | 20 | ||||
-rw-r--r-- | src/util/slice.rs | 2 |
5 files changed, 19 insertions, 20 deletions
diff --git a/src/construct/html_flow.rs b/src/construct/html_flow.rs index 238963d..e2b66e5 100644 --- a/src/construct/html_flow.rs +++ b/src/construct/html_flow.rs @@ -399,7 +399,6 @@ fn tag_name(tokenizer: &mut Tokenizer, mut info: Info) -> State { .serialize() .trim() .to_lowercase(); - println!("name: {:?}", name); if !slash && info.start_tag && HTML_RAW_NAMES.contains(&name.as_str()) { info.kind = Kind::Raw; diff --git a/src/content/document.rs b/src/content/document.rs index 2924f6c..935c4ef 100644 --- a/src/content/document.rs +++ b/src/content/document.rs @@ -316,7 +316,7 @@ fn container_new_after(tokenizer: &mut Tokenizer, mut info: DocumentInfo) -> Sta } } - assert!(found, "expected to find container token to exit"); + debug_assert!(found, "expected to find container token to exit"); // If we did not continue all existing containers, and there is a new one, // close the flow and those containers. diff --git a/src/subtokenize.rs b/src/subtokenize.rs index a78f5e2..c641419 100644 --- a/src/subtokenize.rs +++ b/src/subtokenize.rs @@ -37,10 +37,10 @@ pub fn link(events: &mut [Event], index: usize) { /// Link two arbitrary [`Event`][]s together. pub fn link_to(events: &mut [Event], pevious: usize, next: usize) { - assert_eq!(events[pevious].event_type, EventType::Enter); - assert_eq!(events[pevious + 1].event_type, EventType::Exit); - assert_eq!(events[pevious + 1].token_type, events[pevious].token_type); - assert_eq!(events[next].event_type, EventType::Enter); + debug_assert_eq!(events[pevious].event_type, EventType::Enter); + debug_assert_eq!(events[pevious + 1].event_type, EventType::Exit); + debug_assert_eq!(events[pevious + 1].token_type, events[pevious].token_type); + debug_assert_eq!(events[next].event_type, EventType::Enter); // Note: the exit of this event may not exist, so don’t check for that. let link_previous = events[pevious] @@ -51,7 +51,7 @@ pub fn link_to(events: &mut [Event], pevious: usize, next: usize) { let link_next = events[next].link.as_mut().expect("expected `link` on next"); link_next.previous = Some(pevious); - assert_eq!( + debug_assert_eq!( events[pevious].link.as_ref().unwrap().content_type, events[next].link.as_ref().unwrap().content_type ); @@ -70,7 +70,7 @@ pub fn subtokenize(events: &mut Vec<Event>, parse_state: &ParseState) -> bool { // Find each first opening chunk. if let Some(ref link) = event.link { - assert_eq!(event.event_type, EventType::Enter); + debug_assert_eq!(event.event_type, EventType::Enter); // No need to enter linked events again. if link.previous == None { @@ -89,7 +89,7 @@ pub fn subtokenize(events: &mut Vec<Event>, parse_state: &ParseState) -> bool { while let Some(index) = link_index { let enter = &events[index]; let link_curr = enter.link.as_ref().expect("expected link"); - assert_eq!(enter.event_type, EventType::Enter); + debug_assert_eq!(enter.event_type, EventType::Enter); if link_curr.previous != None { tokenizer.define_skip(&enter.point); diff --git a/src/tokenizer.rs b/src/tokenizer.rs index ec70a2b..3cbad0f 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -298,7 +298,7 @@ impl<'a> Tokenizer<'a> { /// Prepare for a next code to get consumed. pub fn expect(&mut self, char: Option<char>) { - assert!(self.consumed, "expected previous character to be consumed"); + debug_assert!(self.consumed, "expected previous character to be consumed"); self.consumed = false; self.current = char; } @@ -308,7 +308,7 @@ impl<'a> Tokenizer<'a> { /// used, or call a next `StateFn`. pub fn consume(&mut self) { log::debug!("consume: `{:?}` ({:?})", self.current, self.point); - assert!(!self.consumed, "expected code to not have been consumed: this might be because `x(code)` instead of `x` was returned"); + debug_assert!(!self.consumed, "expected code to not have been consumed: this might be because `x(code)` instead of `x` was returned"); self.move_one(); @@ -406,7 +406,7 @@ impl<'a> Tokenizer<'a> { pub fn exit(&mut self, token_type: Token) { let current_token = self.stack.pop().expect("cannot close w/o open tokens"); - assert_eq!( + debug_assert_eq!( current_token, token_type, "expected exit token to match current token" ); @@ -414,7 +414,7 @@ impl<'a> Tokenizer<'a> { let previous = self.events.last().expect("cannot close w/o open event"); let mut point = self.point.clone(); - assert!( + debug_assert!( current_token != previous.token_type || previous.point.index != point.index || previous.point.vs != point.vs, @@ -422,7 +422,7 @@ impl<'a> Tokenizer<'a> { ); if VOID_TOKENS.iter().any(|d| d == &token_type) { - assert!( + debug_assert!( current_token == previous.token_type, "expected token to be void (`{:?}`), instead of including `{:?}`", current_token, @@ -471,12 +471,12 @@ impl<'a> Tokenizer<'a> { self.previous = previous.previous; self.current = previous.current; self.point = previous.point; - assert!( + debug_assert!( self.events.len() >= previous.events_len, "expected to restore less events than before" ); self.events.truncate(previous.events_len); - assert!( + debug_assert!( self.stack.len() >= previous.stack_len, "expected to restore less stack items than before" ); @@ -642,8 +642,8 @@ impl<'a> Tokenizer<'a> { max: usize, start: impl FnOnce(&mut Tokenizer) -> State + 'static, ) -> State { - assert!(!self.resolved, "cannot feed after drain"); - assert!(min >= self.point.index, "cannot move backwards"); + debug_assert!(!self.resolved, "cannot feed after drain"); + debug_assert!(min >= self.point.index, "cannot move backwards"); // To do: accept `vs`? self.move_to((min, 0)); @@ -720,7 +720,7 @@ impl<'a> Tokenizer<'a> { } } - assert!(matches!(state, State::Ok), "must be ok"); + debug_assert!(matches!(state, State::Ok), "must be ok"); if resolve { self.resolved = true; diff --git a/src/util/slice.rs b/src/util/slice.rs index 2134069..14fd527 100644 --- a/src/util/slice.rs +++ b/src/util/slice.rs @@ -22,7 +22,7 @@ impl<'a> Position<'a> { /// When `micromark` is used, this function never panics. pub fn from_exit_event(events: &'a [Event], index: usize) -> Position<'a> { let exit = &events[index]; - assert_eq!( + debug_assert_eq!( exit.event_type, EventType::Exit, "expected `from_exit_event` to be called on `exit` event" |