aboutsummaryrefslogtreecommitdiffstats
path: root/src/content
diff options
context:
space:
mode:
authorLibravatar Titus Wormer <tituswormer@gmail.com>2022-07-07 18:56:06 +0200
committerLibravatar Titus Wormer <tituswormer@gmail.com>2022-07-07 18:56:06 +0200
commit92b42e06f943338ce8b54b7e22cbb116ff598fa6 (patch)
treeff51df093f52dc33bfac5e1c236b41cfbd21c220 /src/content
parentfdb1f1694f44cfbc59d303a10371300b48d74627 (diff)
downloadmarkdown-rs-92b42e06f943338ce8b54b7e22cbb116ff598fa6.tar.gz
markdown-rs-92b42e06f943338ce8b54b7e22cbb116ff598fa6.tar.bz2
markdown-rs-92b42e06f943338ce8b54b7e22cbb116ff598fa6.zip
Refactor to move token types to `token`
Diffstat (limited to '')
-rw-r--r--src/content/document.rs11
-rw-r--r--src/content/flow.rs11
2 files changed, 10 insertions, 12 deletions
diff --git a/src/content/document.rs b/src/content/document.rs
index dd5038f..b1f3083 100644
--- a/src/content/document.rs
+++ b/src/content/document.rs
@@ -14,9 +14,8 @@ use crate::construct::block_quote::{
use crate::content::flow::start as flow;
use crate::parser::ParseState;
use crate::subtokenize::subtokenize;
-use crate::tokenizer::{
- Code, Event, EventType, Point, State, StateFn, StateFnResult, TokenType, Tokenizer,
-};
+use crate::token::Token;
+use crate::tokenizer::{Code, Event, EventType, Point, State, StateFn, StateFnResult, Tokenizer};
use crate::util::edit_map::EditMap;
use crate::util::{
normalize_identifier::normalize_identifier,
@@ -44,9 +43,7 @@ pub fn document(parse_state: &mut ParseState, point: Point, index: usize) -> Vec
while index < tokenizer.events.len() {
let event = &tokenizer.events[index];
- if event.event_type == EventType::Exit
- && event.token_type == TokenType::DefinitionLabelString
- {
+ if event.event_type == EventType::Exit && event.token_type == Token::DefinitionLabelString {
next_definitions.insert(normalize_identifier(
serialize(
&parse_state.codes,
@@ -409,7 +406,7 @@ fn flow_end(
// To do: blank lines? Other things?
if tokenizer.events.len() > 2
- && tokenizer.events[tokenizer.events.len() - 1].token_type == TokenType::LineEnding
+ && tokenizer.events[tokenizer.events.len() - 1].token_type == Token::LineEnding
{
info.last_line_ending_index = Some(tokenizer.events.len() - 2);
} else {
diff --git a/src/content/flow.rs b/src/content/flow.rs
index f406685..e52f113 100644
--- a/src/content/flow.rs
+++ b/src/content/flow.rs
@@ -26,7 +26,8 @@ use crate::construct::{
html_flow::start as html_flow, paragraph::start as paragraph,
thematic_break::start as thematic_break,
};
-use crate::tokenizer::{Code, State, StateFnResult, TokenType, Tokenizer};
+use crate::token::Token;
+use crate::tokenizer::{Code, State, StateFnResult, Tokenizer};
/// Before flow.
///
@@ -88,9 +89,9 @@ fn blank_line_after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None => (State::Ok, None),
Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
- tokenizer.enter(TokenType::BlankLineEnding);
+ tokenizer.enter(Token::BlankLineEnding);
tokenizer.consume(code);
- tokenizer.exit(TokenType::BlankLineEnding);
+ tokenizer.exit(Token::BlankLineEnding);
// Feel free to interrupt.
tokenizer.interrupt = false;
(State::Fn(Box::new(start)), None)
@@ -112,9 +113,9 @@ fn after(tokenizer: &mut Tokenizer, code: Code) -> StateFnResult {
match code {
Code::None => (State::Ok, None),
Code::CarriageReturnLineFeed | Code::Char('\n' | '\r') => {
- tokenizer.enter(TokenType::LineEnding);
+ tokenizer.enter(Token::LineEnding);
tokenizer.consume(code);
- tokenizer.exit(TokenType::LineEnding);
+ tokenizer.exit(Token::LineEnding);
(State::Fn(Box::new(start)), None)
}
_ => unreachable!("expected eol/eof"),