aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--src/content/flow.rs2
-rw-r--r--src/content/string.rs2
-rw-r--r--src/parser.rs2
-rw-r--r--src/tokenizer.rs10
4 files changed, 8 insertions, 8 deletions
diff --git a/src/content/flow.rs b/src/content/flow.rs
index ac987e1..693ffb5 100644
--- a/src/content/flow.rs
+++ b/src/content/flow.rs
@@ -32,7 +32,7 @@ use crate::util::get_span;
/// Turn `codes` as the flow content type into events.
// To do: remove this `allow` when all the content types are glued together.
#[allow(dead_code)]
-pub fn flow(codes: Vec<Code>) -> Vec<Event> {
+pub fn flow(codes: &[Code]) -> Vec<Event> {
let mut tokenizer = Tokenizer::new();
let (state, remainder) = tokenizer.feed(codes, Box::new(start), true);
diff --git a/src/content/string.rs b/src/content/string.rs
index a8a81b2..1239a36 100644
--- a/src/content/string.rs
+++ b/src/content/string.rs
@@ -18,7 +18,7 @@ use crate::tokenizer::{Code, Event, State, StateFnResult, TokenType, Tokenizer};
/// Turn `codes` as the string content type into events.
// To do: remove this `allow` when all the content types are glued together.
#[allow(dead_code)]
-pub fn string(codes: Vec<Code>) -> Vec<Event> {
+pub fn string(codes: &[Code]) -> Vec<Event> {
let mut tokenizer = Tokenizer::new();
let (state, remainder) = tokenizer.feed(codes, Box::new(before), true);
diff --git a/src/parser.rs b/src/parser.rs
index 10c6e7a..e156e33 100644
--- a/src/parser.rs
+++ b/src/parser.rs
@@ -9,6 +9,6 @@ use crate::tokenizer::{as_codes, Code, Event};
pub fn parse(value: &str) -> (Vec<Event>, Vec<Code>) {
let codes = as_codes(value);
// To do: pass a reference to this around, and slices in the (back)feeding. Might be tough.
- let events = flow(codes.clone());
+ let events = flow(&codes);
(events, codes)
}
diff --git a/src/tokenizer.rs b/src/tokenizer.rs
index 4239520..faee8d9 100644
--- a/src/tokenizer.rs
+++ b/src/tokenizer.rs
@@ -334,7 +334,7 @@ impl Tokenizer {
tokenizer.point
);
let result = done(ok);
- tokenizer.feed(codes, result, false)
+ tokenizer.feed(&codes, result, false)
},
)
}
@@ -373,7 +373,7 @@ impl Tokenizer {
tokenizer.point
);
let result = done(ok);
- tokenizer.feed(codes, result, false)
+ tokenizer.feed(&codes, result, false)
},
)
}
@@ -440,11 +440,11 @@ impl Tokenizer {
/// When `done: true` is passed, the EOF is fed.
pub fn feed(
&mut self,
- codes: Vec<Code>,
+ codes: &[Code],
start: impl FnOnce(&mut Tokenizer, Code) -> StateFnResult + 'static,
drain: bool,
) -> StateFnResult {
- let mut codes = codes;
+ let codes = codes;
let mut state = State::Fn(Box::new(start));
let mut index = 0;
@@ -474,7 +474,7 @@ impl Tokenizer {
// Yield to a higher loop if we shouldn’t feed EOFs.
if !drain {
- return (state, Some(codes.split_off(index)));
+ return (state, Some(codes[index..].to_vec()));
}
loop {