initiative_core/command/token/
token_match_iterator.rs

1use crate::command::prelude::*;
2
3const MAX_DEPTH: usize = 5;
4
5#[derive(Clone, Debug, Eq, PartialEq)]
6pub struct TokenMatchIterator<'a, 'b> {
7    root_token_match: &'b TokenMatch<'a>,
8    cursor: [usize; MAX_DEPTH],
9    cursor_depth: usize,
10    has_returned_root: bool,
11}
12
13impl<'a, 'b> TokenMatchIterator<'a, 'b> {
14    pub fn new(root_token_match: &'b TokenMatch<'a>) -> Self {
15        TokenMatchIterator {
16            root_token_match,
17            cursor: [0; MAX_DEPTH],
18            cursor_depth: 0,
19            has_returned_root: false,
20        }
21    }
22
23    fn cursor(&self) -> &[usize] {
24        assert!(
25            self.cursor_depth <= MAX_DEPTH,
26            "Token match exceeds maximum recursion depth of {} for TokenMatchIterator\n\n{:?}",
27            MAX_DEPTH,
28            self.root_token_match
29        );
30        &self.cursor[..self.cursor_depth]
31    }
32}
33
34impl<'a, 'b> Iterator for TokenMatchIterator<'a, 'b> {
35    type Item = &'b TokenMatch<'a>;
36
37    fn next(&mut self) -> Option<Self::Item> {
38        loop {
39            if self.cursor_depth == 0 {
40                if self.has_returned_root {
41                    return None;
42                } else {
43                    self.has_returned_root = true;
44                    if has_children(self.root_token_match) {
45                        self.cursor_depth += 1;
46                    }
47                    return Some(self.root_token_match);
48                }
49            } else if let Some(token_match) = get_token_index(self.root_token_match, self.cursor())
50            {
51                if has_children(token_match) {
52                    self.cursor_depth += 1;
53                } else {
54                    self.cursor[self.cursor_depth - 1] += 1;
55                }
56
57                return Some(token_match);
58            } else {
59                self.cursor_depth -= 1;
60                self.cursor[self.cursor_depth] = 0;
61                if let Some(i) = self.cursor_depth.checked_sub(1) {
62                    self.cursor[i] += 1;
63                }
64            }
65        }
66    }
67}
68
69fn has_children(token_match: &TokenMatch<'_>) -> bool {
70    match &token_match.match_meta {
71        MatchMeta::Single(_) => true,
72        MatchMeta::Sequence(v) => !v.is_empty(),
73        MatchMeta::None | MatchMeta::Phrase(_) | MatchMeta::Record(_) => false,
74    }
75}
76
77fn get_token_index<'a, 'b>(
78    token_match: &'b TokenMatch<'a>,
79    cursor: &[usize],
80) -> Option<&'b TokenMatch<'a>> {
81    if cursor.is_empty() {
82        Some(token_match)
83    } else {
84        match &token_match.match_meta {
85            MatchMeta::Sequence(v) => v
86                .get(cursor[0])
87                .and_then(|t| get_token_index(t, &cursor[1..])),
88            MatchMeta::Single(t) if cursor[0] == 0 => get_token_index(t, &cursor[1..]),
89            MatchMeta::None
90            | MatchMeta::Phrase(_)
91            | MatchMeta::Record(_)
92            | MatchMeta::Single(_) => None,
93        }
94    }
95}
96
97#[cfg(test)]
98mod tests {
99    use super::*;
100
101    use crate::storage::{Record, RecordStatus};
102    use crate::test_utils as test;
103
104    use futures::StreamExt;
105
106    #[test]
107    fn token_match_iterator_test_no_meta() {
108        let token = keyword("badger");
109        let token_match = TokenMatch::from(&token);
110
111        assert_eq!(
112            vec![&token_match],
113            TokenMatchIterator::new(&token_match).collect::<Vec<_>>()
114        );
115    }
116
117    #[test]
118    fn token_match_iterator_test_phrase() {
119        let token = any_word();
120        let token_match = TokenMatch::new(&token, "abc");
121
122        assert_eq!(
123            vec![&token_match],
124            TokenMatchIterator::new(&token_match).collect::<Vec<_>>()
125        );
126    }
127
128    #[test]
129    fn token_match_iterator_test_record() {
130        let token = name();
131        let token_match = TokenMatch::new(
132            &token,
133            Record {
134                status: RecordStatus::Unsaved,
135                thing: test::thing::odysseus(),
136            },
137        );
138
139        let mut iter = TokenMatchIterator::new(&token_match);
140
141        assert_eq!(Some(&token_match), iter.next());
142        assert_eq!(None, iter.next());
143        assert_eq!(None, iter.next());
144    }
145
146    #[test]
147    fn token_match_iterator_test_single() {
148        let child_token = keyword("badger");
149        let root_token = optional(child_token.clone());
150        let token_match = TokenMatch::new(&root_token, TokenMatch::from(&child_token));
151
152        let mut iter = TokenMatchIterator::new(&token_match);
153
154        assert_eq!(Some(&token_match), iter.next());
155        assert_eq!(Some(&TokenMatch::from(&child_token)), iter.next());
156        assert_eq!(None, iter.next());
157        assert_eq!(None, iter.next());
158    }
159
160    #[test]
161    fn token_match_iterator_test_sequence() {
162        let child_tokens = [keyword("badger"), keyword("mushroom"), keyword("snake")];
163        let root_token = sequence(child_tokens.clone());
164        let token_match = TokenMatch::new(
165            &root_token,
166            child_tokens
167                .iter()
168                .map(TokenMatch::from)
169                .collect::<Vec<_>>(),
170        );
171
172        let mut iter = TokenMatchIterator::new(&token_match);
173
174        assert_eq!(Some(&token_match), iter.next());
175        assert_eq!(Some(&TokenMatch::from(&child_tokens[0])), iter.next());
176        assert_eq!(Some(&TokenMatch::from(&child_tokens[1])), iter.next());
177        assert_eq!(Some(&TokenMatch::from(&child_tokens[2])), iter.next());
178        assert_eq!(None, iter.next());
179        assert_eq!(None, iter.next());
180    }
181
182    #[test]
183    fn token_match_iterator_test_recursive() {
184        let grandchild_token = any_word();
185        let child_token = optional(grandchild_token.clone());
186        let root_token = optional(child_token.clone());
187
188        let token_match = TokenMatch::new(
189            &root_token,
190            TokenMatch::new(&child_token, TokenMatch::from(&grandchild_token)),
191        );
192
193        let mut iter = TokenMatchIterator::new(&token_match);
194
195        assert_eq!(Some(&token_match), iter.next());
196        assert_eq!(
197            Some(&TokenMatch::new(
198                &child_token,
199                TokenMatch::from(&grandchild_token)
200            )),
201            iter.next()
202        );
203        assert_eq!(Some(&TokenMatch::from(&grandchild_token)), iter.next());
204        assert_eq!(None, iter.next());
205        assert_eq!(None, iter.next());
206    }
207
208    #[tokio::test]
209    async fn token_match_iterator_test_nested() {
210        let token = sequence_m(
211            0,
212            [
213                sequence_m(1, [keyword_m(2, "badger"), keyword_m(3, "mushroom")]),
214                optional_m(4, sequence_m(5, [keyword_m(6, "snake")])),
215            ],
216        );
217
218        let app_meta = test::app_meta();
219        let mut stream = token.match_input("badger mushroom snake", &app_meta);
220        let token_match = loop {
221            match stream.next().await {
222                Some(FuzzyMatch::Exact(token_match)) => break token_match,
223                Some(_) => {}
224                None => panic!(),
225            }
226        };
227
228        let mut iter = TokenMatchIterator::new(&token_match);
229
230        for i in 0..=6 {
231            assert!(iter
232                .next()
233                .is_some_and(|token_match| token_match.is_marked_with(i)));
234        }
235
236        assert_eq!(None, iter.next());
237        assert_eq!(None, iter.next());
238    }
239}