diff options
author | Titus Wormer <tituswormer@gmail.com> | 2022-07-29 10:49:07 +0200 |
---|---|---|
committer | Titus Wormer <tituswormer@gmail.com> | 2022-07-29 10:49:07 +0200 |
commit | 148ede7f0f42f0ccb1620b13d91f35d0c7d04c2f (patch) | |
tree | 7655ffebe0c6a917c3c391edacde03d754f2de4f /src/construct/list.rs | |
parent | 6f61649ac8d08fff85a99172afbf4cd852dda2e6 (diff) | |
download | markdown-rs-148ede7f0f42f0ccb1620b13d91f35d0c7d04c2f.tar.gz markdown-rs-148ede7f0f42f0ccb1620b13d91f35d0c7d04c2f.tar.bz2 markdown-rs-148ede7f0f42f0ccb1620b13d91f35d0c7d04c2f.zip |
Refactor to work on bytes (`u8`)
Diffstat (limited to 'src/construct/list.rs')
-rw-r--r-- | src/construct/list.rs | 45 |
1 files changed, 21 insertions, 24 deletions
diff --git a/src/construct/list.rs b/src/construct/list.rs index 355eeee..9b59130 100644 --- a/src/construct/list.rs +++ b/src/construct/list.rs @@ -102,19 +102,19 @@ enum Kind { } impl Kind { - /// Turn a [char] into a kind. + /// Turn a byte ([u8]) into a kind. /// /// ## Panics /// - /// Panics if `char` is not `.`, `)`, `*`, `+`, or `-`. - fn from_char(char: char) -> Kind { - match char { - '.' => Kind::Dot, - ')' => Kind::Paren, - '*' => Kind::Asterisk, - '+' => Kind::Plus, - '-' => Kind::Dash, - _ => unreachable!("invalid char"), + /// Panics if `byte` is not `.`, `)`, `*`, `+`, or `-`. + fn from_byte(byte: u8) -> Kind { + match byte { + b'.' => Kind::Dot, + b')' => Kind::Paren, + b'*' => Kind::Asterisk, + b'+' => Kind::Plus, + b'-' => Kind::Dash, + _ => unreachable!("invalid byte"), } } } @@ -149,11 +149,11 @@ pub fn start(tokenizer: &mut Tokenizer) -> State { fn before(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { // Unordered. - Some('*' | '+' | '-') => tokenizer.check(thematic_break, |ok| { + Some(b'*' | b'+' | b'-') => tokenizer.check(thematic_break, |ok| { Box::new(if ok { nok } else { before_unordered }) })(tokenizer), // Ordered. - Some(char) if char.is_ascii_digit() && (!tokenizer.interrupt || char == '1') => { + Some(byte) if byte.is_ascii_digit() && (!tokenizer.interrupt || byte == b'1') => { tokenizer.enter(Token::ListItemPrefix); tokenizer.enter(Token::ListItemValue); inside(tokenizer, 0) @@ -183,11 +183,11 @@ fn before_unordered(tokenizer: &mut Tokenizer) -> State { /// ``` fn inside(tokenizer: &mut Tokenizer, size: usize) -> State { match tokenizer.current { - Some(char) if char.is_ascii_digit() && size + 1 < LIST_ITEM_VALUE_SIZE_MAX => { + Some(byte) if byte.is_ascii_digit() && size + 1 < LIST_ITEM_VALUE_SIZE_MAX => { tokenizer.consume(); State::Fn(Box::new(move |t| inside(t, size + 1))) } - Some('.' | ')') if !tokenizer.interrupt || size < 2 => { + Some(b'.' | b')') if !tokenizer.interrupt || size < 2 => { tokenizer.exit(Token::ListItemValue); marker(tokenizer) } @@ -262,7 +262,7 @@ fn whitespace(tokenizer: &mut Tokenizer) -> State { /// ^ /// ``` fn whitespace_after(tokenizer: &mut Tokenizer) -> State { - if matches!(tokenizer.current, Some('\t' | ' ')) { + if matches!(tokenizer.current, Some(b'\t' | b' ')) { State::Nok } else { State::Ok @@ -277,7 +277,7 @@ fn whitespace_after(tokenizer: &mut Tokenizer) -> State { /// ``` fn prefix_other(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { - Some('\t' | ' ') => { + Some(b'\t' | b' ') => { tokenizer.enter(Token::SpaceOrTab); tokenizer.consume(); tokenizer.exit(Token::SpaceOrTab); @@ -303,7 +303,7 @@ fn after(tokenizer: &mut Tokenizer, blank: bool) -> State { &[Token::ListItem], ); let mut prefix = Slice::from_position( - &tokenizer.parse_state.chars, + tokenizer.parse_state.bytes, &Position { start: &tokenizer.events[start].point, end: &tokenizer.point, @@ -400,13 +400,10 @@ pub fn resolve_list_item(tokenizer: &mut Tokenizer) { if event.event_type == EventType::Enter { let end = skip::opt(&tokenizer.events, index, &[Token::ListItem]) - 1; let marker = skip::to(&tokenizer.events, index, &[Token::ListItemMarker]); - let kind = Kind::from_char( - Slice::from_point( - &tokenizer.parse_state.chars, - &tokenizer.events[marker].point, - ) - .head() - .unwrap(), + let kind = Kind::from_byte( + Slice::from_point(tokenizer.parse_state.bytes, &tokenizer.events[marker].point) + .head() + .unwrap(), ); let current = (kind, balance, index, end); |