···1import modelPrompt from "../model/prompt.txt";
2-import { ChatMessage, Conversation } from "@skyware/bot";
3import * as c from "../core";
4import * as tools from "../tools";
5import consola from "consola";
···37 parts: [
38 {
39 text: modelPrompt
40- .replace("{{ handle }}", env.HANDLE),
41 },
42 ],
43 },
···102 return inference;
103}
104000000000000000000000000000000000000000000000000000000000000105export async function handler(message: ChatMessage): Promise<void> {
106 const conversation = await message.getConversation();
107 // ? Conversation should always be able to be found, but just in case:
···162 }
163164 const responseText = inference.text;
0165166- if (responseText) {
167- logger.success("Generated text:", inference.text);
168- saveMessage(conversation, env.DID, inference.text!);
169170- if (exceedsGraphemes(responseText)) {
171- multipartResponse(conversation, responseText);
172 } else {
173 conversation.sendMessage({
174- text: responseText,
175 });
176 }
177 }
···1import modelPrompt from "../model/prompt.txt";
2+import { ChatMessage, Conversation, RichText } from "@skyware/bot";
3import * as c from "../core";
4import * as tools from "../tools";
5import consola from "consola";
···37 parts: [
38 {
39 text: modelPrompt
40+ .replace("$handle", env.HANDLE),
41 },
42 ],
43 },
···102 return inference;
103}
104105+function addCitations(
106+ inference: Awaited<ReturnType<typeof c.ai.models.generateContent>>,
107+) {
108+ let originalText = inference.text ?? "";
109+ if (!inference.candidates) {
110+ return originalText;
111+ }
112+ const supports = inference.candidates[0]?.groundingMetadata
113+ ?.groundingSupports;
114+ const chunks = inference.candidates[0]?.groundingMetadata?.groundingChunks;
115+116+ const richText = new RichText();
117+118+ if (!supports || !chunks || originalText === "") {
119+ return richText.addText(originalText);
120+ }
121+122+ const sortedSupports = [...supports].sort(
123+ (a, b) => (b.segment?.endIndex ?? 0) - (a.segment?.endIndex ?? 0),
124+ );
125+126+ let currentText = originalText;
127+128+ for (const support of sortedSupports) {
129+ const endIndex = support.segment?.endIndex;
130+ if (endIndex === undefined || !support.groundingChunkIndices?.length) {
131+ continue;
132+ }
133+134+ const citationLinks = support.groundingChunkIndices
135+ .map((i) => {
136+ const uri = chunks[i]?.web?.uri;
137+ if (uri) {
138+ return { index: i + 1, uri };
139+ }
140+ return null;
141+ })
142+ .filter(Boolean);
143+144+ if (citationLinks.length > 0) {
145+ richText.addText(currentText.slice(endIndex));
146+147+ citationLinks.forEach((citation, idx) => {
148+ if (citation) {
149+ richText.addLink(`[${citation.index}]`, citation.uri);
150+ if (idx < citationLinks.length - 1) {
151+ richText.addText(", ");
152+ }
153+ }
154+ });
155+156+ currentText = currentText.slice(0, endIndex);
157+ }
158+ }
159+160+ richText.addText(currentText);
161+162+ return richText;
163+}
164+165export async function handler(message: ChatMessage): Promise<void> {
166 const conversation = await message.getConversation();
167 // ? Conversation should always be able to be found, but just in case:
···222 }
223224 const responseText = inference.text;
225+ const responseWithCitations = addCitations(inference);
226227+ if (responseWithCitations) {
228+ logger.success("Generated text:", responseText);
229+ saveMessage(conversation, env.DID, responseText!);
230231+ if (exceedsGraphemes(responseWithCitations)) {
232+ multipartResponse(conversation, responseWithCitations);
233 } else {
234 conversation.sendMessage({
235+ text: responseWithCitations,
236 });
237 }
238 }
+1-1
src/model/prompt.txt
···1You are Aero, a neutral and helpful assistant on Bluesky.
2Your job is to give clear, factual, and concise explanations or context about posts users send you.
34-Handle: {{ handle }}
56Guidelines:
7
···1You are Aero, a neutral and helpful assistant on Bluesky.
2Your job is to give clear, factual, and concise explanations or context about posts users send you.
34+Handle: $handle
56Guidelines:
7
+33-5
src/utils/conversation.ts
···2 type ChatMessage,
3 type Conversation,
4 graphemeLength,
05} from "@skyware/bot";
6import * as yaml from "js-yaml";
7import db from "../db";
···61 did: user.did,
62 postUri,
63 revision: _convo.revision,
64- text: initialMessage.text,
000065 });
6667 return _convo!;
···110 did: getUserDid(convo).did,
111 postUri: row.postUri,
112 revision: row.revision,
113- text: latestMessage!.text,
0000114 });
115 }
116···196/*
197 Reponse Utilities
198*/
199-export function exceedsGraphemes(content: string) {
000200 return graphemeLength(content) > MAX_GRAPHEMES;
201}
202···224 return chunks.map((chunk, i) => `(${i + 1}/${total}) ${chunk}`);
225}
226227-export async function multipartResponse(convo: Conversation, content: string) {
228- const parts = splitResponse(content).filter((p) => p.trim().length > 0);
0000000000000000229230 for (const segment of parts) {
231 await convo.sendMessage({
···2 type ChatMessage,
3 type Conversation,
4 graphemeLength,
5+ RichText,
6} from "@skyware/bot";
7import * as yaml from "js-yaml";
8import db from "../db";
···62 did: user.did,
63 postUri,
64 revision: _convo.revision,
65+ text:
66+ !initialMessage.text ||
67+ initialMessage.text.trim().length == 0
68+ ? "Explain this post."
69+ : initialMessage.text,
70 });
7172 return _convo!;
···115 did: getUserDid(convo).did,
116 postUri: row.postUri,
117 revision: row.revision,
118+ text: postUri &&
119+ (!latestMessage.text ||
120+ latestMessage.text.trim().length == 0)
121+ ? "Explain this post."
122+ : latestMessage.text,
123 });
124 }
125···205/*
206 Reponse Utilities
207*/
208+export function exceedsGraphemes(content: string | RichText) {
209+ if (content instanceof RichText) {
210+ return graphemeLength(content.text) > MAX_GRAPHEMES;
211+ }
212 return graphemeLength(content) > MAX_GRAPHEMES;
213}
214···236 return chunks.map((chunk, i) => `(${i + 1}/${total}) ${chunk}`);
237}
238239+export async function multipartResponse(
240+ convo: Conversation,
241+ content: string | RichText,
242+) {
243+ let parts: (string | RichText)[];
244+245+ if (content instanceof RichText) {
246+ if (exceedsGraphemes(content)) {
247+ // If RichText exceeds grapheme limit, convert to plain text for splitting
248+ parts = splitResponse(content.text);
249+ } else {
250+ // Otherwise, send the RichText directly as a single part
251+ parts = [content];
252+ }
253+ } else {
254+ // If content is a string, behave as before
255+ parts = splitResponse(content);
256+ }
257258 for (const segment of parts) {
259 await convo.sendMessage({