summaryrefslogtreecommitdiff
path: root/src/iterators/tokens.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/iterators/tokens.rs')
-rw-r--r--src/iterators/tokens.rs38
1 files changed, 33 insertions, 5 deletions
diff --git a/src/iterators/tokens.rs b/src/iterators/tokens.rs
index 0d46271..41cbc47 100644
--- a/src/iterators/tokens.rs
+++ b/src/iterators/tokens.rs
@@ -27,19 +27,19 @@ pub struct Tokens<'i, R> {
/// # Safety:
///
/// All `QueueableToken`s' `input_pos` must be valid character boundary indices into `input`.
- queue: Rc<Vec<QueueableToken<R>>>,
+ queue: Rc<Vec<QueueableToken<'i, R>>>,
input: &'i str,
start: usize,
end: usize,
}
// TODO(safety): QueueableTokens must be valid indices into input.
-pub fn new<R: RuleType>(
- queue: Rc<Vec<QueueableToken<R>>>,
- input: &str,
+pub fn new<'i, R: RuleType>(
+ queue: Rc<Vec<QueueableToken<'i, R>>>,
+ input: &'i str,
start: usize,
end: usize,
-) -> Tokens<'_, R> {
+) -> Tokens<'i, R> {
if cfg!(debug_assertions) {
for tok in queue.iter() {
match *tok {
@@ -92,6 +92,12 @@ impl<'i, R: RuleType> Tokens<'i, R> {
}
}
+impl<'i, R: RuleType> ExactSizeIterator for Tokens<'i, R> {
+ fn len(&self) -> usize {
+ self.end - self.start
+ }
+}
+
impl<'i, R: RuleType> Iterator for Tokens<'i, R> {
type Item = Token<'i, R>;
@@ -106,6 +112,11 @@ impl<'i, R: RuleType> Iterator for Tokens<'i, R> {
Some(token)
}
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let len = <Self as ExactSizeIterator>::len(self);
+ (len, Some(len))
+ }
}
impl<'i, R: RuleType> DoubleEndedIterator for Tokens<'i, R> {
@@ -143,4 +154,21 @@ mod tests {
let reverse_tokens = pairs.tokens().rev().collect::<Vec<Token<'_, Rule>>>();
assert_eq!(tokens, reverse_tokens);
}
+
+ #[test]
+ fn exact_size_iter_for_tokens() {
+ let tokens = AbcParser::parse(Rule::a, "abcde").unwrap().tokens();
+ assert_eq!(tokens.len(), tokens.count());
+
+ let tokens = AbcParser::parse(Rule::a, "我很漂亮e").unwrap().tokens();
+ assert_eq!(tokens.len(), tokens.count());
+
+ let tokens = AbcParser::parse(Rule::a, "abcde").unwrap().tokens().rev();
+ assert_eq!(tokens.len(), tokens.count());
+
+ let mut tokens = AbcParser::parse(Rule::a, "abcde").unwrap().tokens();
+ let tokens_len = tokens.len();
+ let _ = tokens.next().unwrap();
+ assert_eq!(tokens.count() + 1, tokens_len);
+ }
}