···11import modelPrompt from "../model/prompt.txt";
22-import { ChatMessage, Conversation } from "@skyware/bot";
22+import { ChatMessage, Conversation, RichText } from "@skyware/bot";
33import * as c from "../core";
44import * as tools from "../tools";
55import consola from "consola";
···3737 parts: [
3838 {
3939 text: modelPrompt
4040- .replace("{{ handle }}", env.HANDLE),
4040+ .replace("$handle", env.HANDLE),
4141 },
4242 ],
4343 },
···102102 return inference;
103103}
104104105105+function addCitations(
106106+ inference: Awaited<ReturnType<typeof c.ai.models.generateContent>>,
107107+) {
108108+ let originalText = inference.text ?? "";
109109+ if (!inference.candidates) {
110110+ return originalText;
111111+ }
112112+ const supports = inference.candidates[0]?.groundingMetadata
113113+ ?.groundingSupports;
114114+ const chunks = inference.candidates[0]?.groundingMetadata?.groundingChunks;
115115+116116+ const richText = new RichText();
117117+118118+ if (!supports || !chunks || originalText === "") {
119119+ return richText.addText(originalText);
120120+ }
121121+122122+ const sortedSupports = [...supports].sort(
123123+ (a, b) => (b.segment?.endIndex ?? 0) - (a.segment?.endIndex ?? 0),
124124+ );
125125+126126+ let currentText = originalText;
127127+128128+ for (const support of sortedSupports) {
129129+ const endIndex = support.segment?.endIndex;
130130+ if (endIndex === undefined || !support.groundingChunkIndices?.length) {
131131+ continue;
132132+ }
133133+134134+ const citationLinks = support.groundingChunkIndices
135135+ .map((i) => {
136136+ const uri = chunks[i]?.web?.uri;
137137+ if (uri) {
138138+ return { index: i + 1, uri };
139139+ }
140140+ return null;
141141+ })
142142+ .filter(Boolean);
143143+144144+ if (citationLinks.length > 0) {
145145+ richText.addText(currentText.slice(endIndex));
146146+147147+ citationLinks.forEach((citation, idx) => {
148148+ if (citation) {
149149+ richText.addLink(`[${citation.index}]`, citation.uri);
150150+ if (idx < citationLinks.length - 1) {
151151+ richText.addText(", ");
152152+ }
153153+ }
154154+ });
155155+156156+ currentText = currentText.slice(0, endIndex);
157157+ }
158158+ }
159159+160160+ richText.addText(currentText);
161161+162162+ return richText;
163163+}
164164+105165export async function handler(message: ChatMessage): Promise<void> {
106166 const conversation = await message.getConversation();
107167 // ? Conversation should always be able to be found, but just in case:
···162222 }
163223164224 const responseText = inference.text;
225225+ const responseWithCitations = addCitations(inference);
165226166166- if (responseText) {
167167- logger.success("Generated text:", inference.text);
168168- saveMessage(conversation, env.DID, inference.text!);
227227+ if (responseWithCitations) {
228228+ logger.success("Generated text:", responseText);
229229+ saveMessage(conversation, env.DID, responseText!);
169230170170- if (exceedsGraphemes(responseText)) {
171171- multipartResponse(conversation, responseText);
231231+ if (exceedsGraphemes(responseWithCitations)) {
232232+ multipartResponse(conversation, responseWithCitations);
172233 } else {
173234 conversation.sendMessage({
174174- text: responseText,
235235+ text: responseWithCitations,
175236 });
176237 }
177238 }
+1-1
src/model/prompt.txt
···11You are Aero, a neutral and helpful assistant on Bluesky.
22Your job is to give clear, factual, and concise explanations or context about posts users send you.
3344-Handle: {{ handle }}
44+Handle: $handle
5566Guidelines:
77
+33-5
src/utils/conversation.ts
···22 type ChatMessage,
33 type Conversation,
44 graphemeLength,
55+ RichText,
56} from "@skyware/bot";
67import * as yaml from "js-yaml";
78import db from "../db";
···6162 did: user.did,
6263 postUri,
6364 revision: _convo.revision,
6464- text: initialMessage.text,
6565+ text:
6666+ !initialMessage.text ||
6767+ initialMessage.text.trim().length == 0
6868+ ? "Explain this post."
6969+ : initialMessage.text,
6570 });
66716772 return _convo!;
···110115 did: getUserDid(convo).did,
111116 postUri: row.postUri,
112117 revision: row.revision,
113113- text: latestMessage!.text,
118118+ text: postUri &&
119119+ (!latestMessage.text ||
120120+ latestMessage.text.trim().length == 0)
121121+ ? "Explain this post."
122122+ : latestMessage.text,
114123 });
115124 }
116125···196205/*
197206 Reponse Utilities
198207*/
199199-export function exceedsGraphemes(content: string) {
208208+export function exceedsGraphemes(content: string | RichText) {
209209+ if (content instanceof RichText) {
210210+ return graphemeLength(content.text) > MAX_GRAPHEMES;
211211+ }
200212 return graphemeLength(content) > MAX_GRAPHEMES;
201213}
202214···224236 return chunks.map((chunk, i) => `(${i + 1}/${total}) ${chunk}`);
225237}
226238227227-export async function multipartResponse(convo: Conversation, content: string) {
228228- const parts = splitResponse(content).filter((p) => p.trim().length > 0);
239239+export async function multipartResponse(
240240+ convo: Conversation,
241241+ content: string | RichText,
242242+) {
243243+ let parts: (string | RichText)[];
244244+245245+ if (content instanceof RichText) {
246246+ if (exceedsGraphemes(content)) {
247247+ // If RichText exceeds grapheme limit, convert to plain text for splitting
248248+ parts = splitResponse(content.text);
249249+ } else {
250250+ // Otherwise, send the RichText directly as a single part
251251+ parts = [content];
252252+ }
253253+ } else {
254254+ // If content is a string, behave as before
255255+ parts = splitResponse(content);
256256+ }
229257230258 for (const segment of parts) {
231259 await convo.sendMessage({