initiative_core/command/token/
mod.rs1pub mod constructors;
2
3mod any_of;
4mod any_phrase;
5mod any_word;
6mod keyword;
7mod keyword_list;
8mod name;
9mod optional;
10mod or;
11mod sequence;
12mod token_match_iterator;
13
14use std::hash::{DefaultHasher, Hash, Hasher};
15
16use token_match_iterator::TokenMatchIterator;
17
18use crate::app::AppMeta;
19use crate::storage::Record;
20use crate::utils::Substr;
21use initiative_macros::From;
22
23use futures::prelude::*;
24
25#[derive(Debug, Eq, PartialEq)]
26#[cfg_attr(test, derive(Clone))]
27pub struct Token {
28 pub token_type: TokenType,
29 marker: u64,
30}
31
32#[derive(Clone, Debug, Eq, PartialEq)]
33pub struct TokenMatch<'a> {
34 pub token: &'a Token,
35 pub match_meta: MatchMeta<'a>,
36}
37
38#[derive(Clone, Debug, Eq, PartialEq)]
39pub enum FuzzyMatch<'a> {
40 Overflow(TokenMatch<'a>, Substr<'a>),
41 Exact(TokenMatch<'a>),
42 Partial(TokenMatch<'a>, Option<String>),
43}
44
45#[derive(Debug, Eq, PartialEq)]
46#[cfg_attr(test, derive(Clone))]
47pub enum TokenType {
48 AnyOf(Vec<Token>),
50
51 AnyPhrase,
53
54 AnyWord,
56
57 Keyword(&'static str),
59
60 KeywordList(Vec<&'static str>),
62
63 Name,
65
66 Optional(Box<Token>),
68
69 Or(Vec<Token>),
71
72 Sequence(Vec<Token>),
74}
75
76#[derive(Clone, Debug, Eq, From, PartialEq)]
77pub enum MatchMeta<'a> {
78 None,
79 Phrase(&'a str),
80 Record(Record),
81 Sequence(Vec<TokenMatch<'a>>),
82 Single(Box<TokenMatch<'a>>),
83}
84
85impl Token {
86 pub fn new(token_type: TokenType) -> Token {
87 Token {
88 token_type,
89 marker: 0,
90 }
91 }
92
93 pub fn new_m<M: Hash>(marker: M, token_type: TokenType) -> Token {
94 Token {
95 token_type,
96 marker: hash_marker(marker),
97 }
98 }
99
100 pub fn match_input<'a, 'b>(
101 &'a self,
102 input: &'a str,
103 app_meta: &'b AppMeta,
104 ) -> impl Stream<Item = FuzzyMatch<'a>> + 'b
105 where
106 'a: 'b,
107 {
108 match &self.token_type {
109 TokenType::AnyOf(..) => any_of::match_input(self, input, app_meta),
110 TokenType::AnyPhrase => any_phrase::match_input(self, input),
111 TokenType::AnyWord => any_word::match_input(self, input),
112 TokenType::Keyword(..) => keyword::match_input(self, input),
113 TokenType::KeywordList(..) => keyword_list::match_input(self, input),
114 TokenType::Name => name::match_input(self, input, app_meta),
115 TokenType::Optional(..) => optional::match_input(self, input, app_meta),
116 TokenType::Or(..) => or::match_input(self, input, app_meta),
117 TokenType::Sequence(..) => sequence::match_input(self, input, app_meta),
118 }
119 }
120
121 pub fn match_input_exact<'a, 'b>(
122 &'a self,
123 input: &'a str,
124 app_meta: &'b AppMeta,
125 ) -> impl Stream<Item = TokenMatch<'a>> + 'b
126 where
127 'a: 'b,
128 {
129 self.match_input(input, app_meta)
130 .filter_map(|fuzzy_match| future::ready(fuzzy_match.into_exact()))
131 }
132}
133
134impl<'a> TokenMatch<'a> {
135 pub fn new(token: &'a Token, match_meta: impl Into<MatchMeta<'a>>) -> Self {
173 TokenMatch {
174 token,
175 match_meta: match_meta.into(),
176 }
177 }
178
179 #[cfg_attr(not(any(test, feature = "integration-tests")), expect(dead_code))]
183 pub fn contains_marker<M>(&'a self, marker: M) -> bool
184 where
185 M: Hash,
186 {
187 self.find_marker(marker).is_some()
188 }
189
190 pub fn find_marker<M>(&'a self, marker: M) -> Option<&'a TokenMatch<'a>>
194 where
195 M: Hash,
196 {
197 TokenMatchIterator::new(self).find(move |token_match| token_match.is_marked_with(&marker))
198 }
199
200 #[cfg_attr(not(any(test, feature = "integration-tests")), expect(dead_code))]
202 pub fn find_markers<'b, M>(
203 &'a self,
204 markers: &'b [M],
205 ) -> impl Iterator<Item = &'a TokenMatch<'a>> + 'b
206 where
207 M: Hash,
208 'a: 'b,
209 {
210 TokenMatchIterator::new(self)
211 .filter(move |token_match| markers.iter().any(|m| token_match.is_marked_with(m)))
212 }
213
214 pub fn is_marked_with<M>(&self, marker: M) -> bool
218 where
219 M: Hash,
220 {
221 self.token.marker != 0 && self.token.marker == hash_marker(marker)
222 }
223
224 #[cfg_attr(not(feature = "integration-tests"), expect(dead_code))]
225 pub fn meta_phrase(&self) -> Option<&str> {
226 self.match_meta.phrase()
227 }
228
229 #[cfg_attr(not(feature = "integration-tests"), expect(dead_code))]
230 pub fn meta_record(&self) -> Option<&Record> {
231 self.match_meta.record()
232 }
233
234 #[cfg_attr(not(feature = "integration-tests"), expect(dead_code))]
235 pub fn meta_sequence(&self) -> Option<&[TokenMatch<'a>]> {
236 self.match_meta.sequence()
237 }
238
239 #[cfg_attr(not(feature = "integration-tests"), expect(dead_code))]
240 pub fn meta_single(&self) -> Option<&TokenMatch<'a>> {
241 self.match_meta.single()
242 }
243}
244
245impl<'a> From<&'a Token> for TokenMatch<'a> {
246 fn from(input: &'a Token) -> Self {
247 TokenMatch::new(input, MatchMeta::None)
248 }
249}
250
251impl<'a> FuzzyMatch<'a> {
252 pub fn map<F>(self, f: F) -> Self
253 where
254 F: FnOnce(TokenMatch<'a>) -> TokenMatch<'a>,
255 {
256 match self {
257 FuzzyMatch::Overflow(token_match, overflow) => {
258 FuzzyMatch::Overflow(f(token_match), overflow)
259 }
260 FuzzyMatch::Exact(token_match) => FuzzyMatch::Exact(f(token_match)),
261 FuzzyMatch::Partial(token_match, completion) => {
262 FuzzyMatch::Partial(f(token_match), completion)
263 }
264 }
265 }
266
267 #[cfg_attr(not(feature = "integration-tests"), expect(dead_code))]
268 pub fn token_match(&self) -> &TokenMatch<'a> {
269 match self {
270 FuzzyMatch::Overflow(token_match, _)
271 | FuzzyMatch::Exact(token_match)
272 | FuzzyMatch::Partial(token_match, _) => token_match,
273 }
274 }
275
276 pub fn into_exact(self) -> Option<TokenMatch<'a>> {
277 if let FuzzyMatch::Exact(token_match) = self {
278 Some(token_match)
279 } else {
280 None
281 }
282 }
283}
284
285impl<'a> MatchMeta<'a> {
286 pub fn phrase(&self) -> Option<&str> {
287 if let MatchMeta::Phrase(s) = self {
288 Some(s)
289 } else {
290 None
291 }
292 }
293
294 pub fn record(&self) -> Option<&Record> {
295 if let MatchMeta::Record(r) = self {
296 Some(r)
297 } else {
298 None
299 }
300 }
301
302 #[cfg_attr(not(feature = "integration-tests"), expect(dead_code))]
303 pub fn into_record(self) -> Option<Record> {
304 if let MatchMeta::Record(r) = self {
305 Some(r)
306 } else {
307 None
308 }
309 }
310
311 pub fn sequence(&self) -> Option<&[TokenMatch<'a>]> {
312 if let MatchMeta::Sequence(v) = self {
313 Some(v.as_slice())
314 } else {
315 None
316 }
317 }
318
319 pub fn into_sequence(self) -> Option<Vec<TokenMatch<'a>>> {
320 if let MatchMeta::Sequence(v) = self {
321 Some(v)
322 } else {
323 None
324 }
325 }
326
327 pub fn single(&self) -> Option<&TokenMatch<'a>> {
328 if let MatchMeta::Single(b) = self {
329 Some(b.as_ref())
330 } else {
331 None
332 }
333 }
334}
335
336impl<'a> From<TokenMatch<'a>> for MatchMeta<'a> {
337 fn from(input: TokenMatch<'a>) -> MatchMeta<'a> {
338 Box::new(input).into()
339 }
340}
341
342fn hash_marker<M: Hash>(marker: M) -> u64 {
343 let mut hasher = DefaultHasher::default();
344 marker.hash(&mut hasher);
345 hasher.finish()
346}
347
348#[cfg(test)]
349mod tests {
350 use super::*;
351 use crate::command::prelude::*;
352 use crate::test_utils as test;
353
354 #[derive(Hash)]
355 enum Marker {
356 One,
357 Two,
358 Three,
359 }
360
361 #[test]
362 fn token_match_find_marker_contains_marker_test() {
363 let keyword_token = keyword_m(Marker::Two, "badger");
364 let sequence_token = sequence_m(Marker::One, [keyword_token.clone()]);
365
366 let token_match = TokenMatch::new(&sequence_token, vec![TokenMatch::from(&keyword_token)]);
367
368 assert_eq!(Some(&token_match), token_match.find_marker(Marker::One));
369 assert_eq!(
370 Some(&TokenMatch::from(&keyword_token)),
371 token_match.find_marker(Marker::Two),
372 );
373 assert_eq!(None, token_match.find_marker(Marker::Three));
374
375 assert!(token_match.contains_marker(Marker::One));
376 assert!(token_match.contains_marker(Marker::Two));
377 assert!(!token_match.contains_marker(Marker::Three));
378 }
379
380 #[test]
381 fn token_match_find_markers_test() {
382 let tokens = [
383 keyword_m(Marker::One, "badger"),
384 keyword_m(Marker::Two, "mushroom"),
385 keyword_m(Marker::Three, "snake"),
386 ];
387 let sequence_token = sequence_m(Marker::One, tokens.clone());
388 let token_match = TokenMatch::new(
389 &sequence_token,
390 tokens.iter().map(TokenMatch::from).collect::<Vec<_>>(),
391 );
392
393 assert_eq!(
394 vec![
395 &token_match,
396 &TokenMatch::from(&tokens[0]),
397 &TokenMatch::from(&tokens[1]),
398 ],
399 token_match
400 .find_markers(&[Marker::One, Marker::Two])
401 .collect::<Vec<_>>(),
402 );
403
404 assert_eq!(
405 vec![&TokenMatch::from(&tokens[2])],
406 token_match
407 .find_markers(&[Marker::Three])
408 .collect::<Vec<_>>(),
409 );
410 }
411
412 #[test]
413 fn token_match_is_marked_with_test() {
414 let marked_token = keyword_m(Marker::One, "badger");
415 let unmarked_token = keyword("mushroom");
416
417 assert!(TokenMatch::from(&marked_token).is_marked_with(&Marker::One));
418 assert!(!TokenMatch::from(&marked_token).is_marked_with(&Marker::Two));
419 assert!(!TokenMatch::from(&unmarked_token).is_marked_with(&Marker::One));
420 }
421
422 #[tokio::test]
423 async fn token_match_new_test() {
424 let token = keyword("I am a token");
425 let record = test::app_meta::with_test_data()
426 .await
427 .repository
428 .get_by_uuid(&test::npc::odysseus::UUID)
429 .await
430 .unwrap();
431
432 let token_match = TokenMatch::from(&token);
433 assert_eq!(MatchMeta::None, token_match.match_meta);
434
435 let token_match = TokenMatch::new(&token, "word");
436 assert_eq!(MatchMeta::Phrase("word"), token_match.match_meta);
437
438 let token_match = TokenMatch::new(&token, record);
439 assert!(matches!(token_match.match_meta, MatchMeta::Record(_)));
440
441 let token_match = TokenMatch::new(&token, vec![TokenMatch::from(&token)]);
442 assert!(matches!(token_match.match_meta, MatchMeta::Sequence(_)));
443
444 let token_match = TokenMatch::new(&token, TokenMatch::from(&token));
445 assert!(matches!(token_match.match_meta, MatchMeta::Single(_)));
446 }
447}