···104104 (FORCE_REPLIES) || roll <= chance
105105 }
106106107107- fn extract_final_token(msg: &str) -> Option<Token> {
107107+ fn extract_final_word(msg: &str) -> Option<String> {
108108 msg.split_ascii_whitespace()
109109 .last()
110110- .map(Self::normalize_token)
110110+ .and_then(Self::normalize_token)
111111 }
112112113113- fn random_token(&self, rand: &mut fastrand::Rng) -> Option<Token> {
113113+ fn random_token(&self, rand: &mut fastrand::Rng) -> Option<&Token> {
114114 let len = self.0.len();
115115 if len == 0 {
116116 None
117117 } else {
118118 let i = rand.usize(..len);
119119- self.0.keys().nth(i).cloned()
119119+ self.0.keys().nth(i)
120120 }
121121 }
122122123123 pub fn ingest(&mut self, msg: &str) -> bool {
124124- let mut learned_new_word = false;
125125- // This is a silly way to do windows rust ppl :sob:
126126- let _ = Self::parse(msg)
124124+ // Using reduce instead of .any here to prevent short circuting
125125+ Self::parse(msg)
127126 .map_windows(|[from, to]| {
128127 if let Some(edge) = self.0.get_mut(from) {
129128 edge.increment_token(to);
129129+ false
130130 } else {
131131 let new = Edges(HashMap::from_iter([(to.clone(), 1)]), 1);
132132 self.0.insert(from.clone(), new);
133133- learned_new_word = true;
133133+ true
134134 }
135135 })
136136- .collect::<Vec<_>>();
137137-138138- learned_new_word
136136+ .reduce(|acc, c| acc || c)
137137+ .unwrap_or_default()
139138 }
140139141140 pub fn merge_from(&mut self, other: Self) {
···148147 }
149148 }
150149150150+ fn next_from(&self, tok: &Token, rand: &mut fastrand::Rng, allow_end: bool) -> Option<&Token> {
151151+ // Get the edges for the current token
152152+ // If we have that token, sample its edges
153153+ // Otherwise, if we don't know that token, and allow_end is false, try to pick a random token instead
154154+ self.0
155155+ .get(tok)
156156+ .and_then(|edges| edges.sample(rand, allow_end))
157157+ .or_else(|| {
158158+ if allow_end {
159159+ None
160160+ } else {
161161+ self.random_token(rand)
162162+ }
163163+ })
164164+ }
165165+151166 pub fn respond(
152167 &self,
153168 msg: &str,
···165180 return None;
166181 }
167182168168- // Get our final token, or a random one if the message has nothing, or don't reply at all
169169- // if we have no tokens at all.
170170- let last_token = Self::extract_final_token(msg).or_else(|| self.random_token(&mut rng))?;
171171- let mut current_token = &last_token;
183183+ // Get the final token
184184+ let last_token = Self::extract_final_word(msg);
185185+186186+ let mut current_token = if let Some(t) = last_token {
187187+ // We found a word at the end of the previous message
188188+ &Some(t)
189189+ } else {
190190+ // We couldn't find a word at the end of the last message, pick a random one
191191+ // If we *still* don't have a token, return early
192192+ self.random_token(&mut rng)?
193193+ };
172194173195 let mut chain = Vec::with_capacity(MAX_TOKENS);
174174- let mut has_triggered_typing = false;
175175-176176- while current_token.is_some() && chain.len() <= MAX_TOKENS {
177177- if let Some(edges) = self.0.get(current_token) {
178178- let next = edges.sample(&mut rng, chain.len() > 2);
179196180180- if let Some(ref tok) = next {
181181- if let Some(s) = tok {
182182- // Is this a non-ending token? If so, push it to our chain!
183183- chain.push(s.clone());
184184- if !has_triggered_typing && let Some(typ) = typing_oneshot.take() {
185185- typ.send(true).ok();
186186- }
187187- current_token = tok;
188188- } else {
189189- // If we reached an end token, stop chaining
190190- break;
191191- }
192192- } else {
193193- // If we failed to sample any tokens, we can't continue the chain
194194- break;
195195- }
196196- } else {
197197- // If we don't know the current word, we can't continue the chain
198198- break;
197197+ while let Some(next @ Some(s)) = self.next_from(current_token, &mut rng, !chain.is_empty())
198198+ && chain.len() <= MAX_TOKENS
199199+ {
200200+ chain.push(s.clone());
201201+ if let Some(typ) = typing_oneshot.take() {
202202+ typ.send(true).ok();
199203 }
204204+ current_token = next;
200205 }
201206202207 if let Some(typ) = typing_oneshot.take() {
···308313 }
309314310315 #[test]
311311- fn at_least_2_tokens() {
316316+ fn at_least_1_token() {
312317 let mut brain = Brain::default();
313318 brain.ingest("hello world");
314314- brain.ingest("hello");
315315- brain.ingest("hello");
316316- brain.ingest("hello");
319319+ for _ in 0..100 {
320320+ brain.ingest("hello");
321321+ }
317322318323 for _ in 0..100 {
319324 // I'm too lazy to mock lazyrand LOL!!
···331336 }
332337333338 #[test]
334334- fn none_on_end() {
339339+ fn random_on_end() {
335340 let mut brain = Brain::default();
336341 brain.ingest("world hello");
337342338343 let reply = brain.respond("hello", false, false, None);
339339- assert_eq!(reply, None);
344344+ assert!(reply.is_some());
340345 }
341346342347 #[test]