···11+#!/usr/bin/env python3
22+"""
33+Add block management tools to the main void agent so it can also manage user blocks.
44+"""
55+66+import os
77+import logging
88+from letta_client import Letta
99+from create_profile_researcher import create_block_management_tools
1010+1111+# Configure logging
1212+logging.basicConfig(
1313+ level=logging.INFO,
1414+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
1515+)
1616+logger = logging.getLogger("add_block_tools")
1717+1818+def add_block_tools_to_void():
1919+ """Add block management tools to the void agent."""
2020+2121+ # Create client
2222+ client = Letta(token=os.environ["LETTA_API_KEY"])
2323+2424+ logger.info("Adding block management tools to void agent...")
2525+2626+ # Create the block management tools
2727+ attach_tool, detach_tool, update_tool = create_block_management_tools(client)
2828+2929+ # Find the void agent
3030+ agents = client.agents.list(name="void")
3131+ if not agents:
3232+ print("❌ Void agent not found")
3333+ return
3434+3535+ void_agent = agents[0]
3636+3737+ # Get current tools
3838+ current_tools = client.agents.tools.list(agent_id=void_agent.id)
3939+ tool_names = [tool.name for tool in current_tools]
4040+4141+ # Add new tools if not already present
4242+ new_tools = []
4343+ for tool, name in [(attach_tool, "attach_user_block"), (detach_tool, "detach_user_block"), (update_tool, "update_user_block")]:
4444+ if name not in tool_names:
4545+ client.agents.tools.attach(agent_id=void_agent.id, tool_id=tool.id)
4646+ new_tools.append(name)
4747+ logger.info(f"Added tool {name} to void agent")
4848+ else:
4949+ logger.info(f"Tool {name} already attached to void agent")
5050+5151+ if new_tools:
5252+ print(f"✅ Added {len(new_tools)} block management tools to void agent:")
5353+ for tool_name in new_tools:
5454+ print(f" - {tool_name}")
5555+ else:
5656+ print("✅ All block management tools already present on void agent")
5757+5858+ print(f"\nVoid agent can now:")
5959+ print(f" - attach_user_block: Create and attach user memory blocks")
6060+ print(f" - update_user_block: Update user memory with new information")
6161+ print(f" - detach_user_block: Clean up memory when done with user")
6262+6363+def main():
6464+ """Main function."""
6565+ try:
6666+ add_block_tools_to_void()
6767+ except Exception as e:
6868+ logger.error(f"Error: {e}")
6969+ print(f"❌ Error: {e}")
7070+7171+if __name__ == "__main__":
7272+ main()
+166
add_feed_tool_to_void.py
···11+#!/usr/bin/env python3
22+"""
33+Add Bluesky feed retrieval tool to the main void agent.
44+"""
55+66+import os
77+import logging
88+from letta_client import Letta
99+1010+# Configure logging
1111+logging.basicConfig(
1212+ level=logging.INFO,
1313+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
1414+)
1515+logger = logging.getLogger("add_feed_tool")
1616+1717+def create_feed_tool(client: Letta):
1818+ """Create the Bluesky feed retrieval tool using Letta SDK."""
1919+2020+ def get_bluesky_feed(feed_uri: str = None, max_posts: int = 25) -> str:
2121+ """
2222+ Retrieve a Bluesky feed. If no feed_uri provided, gets the authenticated user's home timeline.
2323+2424+ Args:
2525+ feed_uri: The AT-URI of the feed to retrieve (optional - defaults to home timeline)
2626+ max_posts: Maximum number of posts to return (default: 25, max: 100)
2727+2828+ Returns:
2929+ YAML-formatted feed data with posts and metadata
3030+ """
3131+ import os
3232+ import requests
3333+ import json
3434+ import yaml
3535+ from datetime import datetime
3636+3737+ try:
3838+ # Get credentials from environment
3939+ username = os.getenv("BSKY_USERNAME")
4040+ password = os.getenv("BSKY_PASSWORD")
4141+ pds_host = os.getenv("PDS_URI", "https://bsky.social")
4242+4343+ if not username or not password:
4444+ return "Error: BSKY_USERNAME and BSKY_PASSWORD environment variables must be set"
4545+4646+ # Create session
4747+ session_url = f"{pds_host}/xrpc/com.atproto.server.createSession"
4848+ session_data = {
4949+ "identifier": username,
5050+ "password": password
5151+ }
5252+5353+ try:
5454+ session_response = requests.post(session_url, json=session_data, timeout=10)
5555+ session_response.raise_for_status()
5656+ session = session_response.json()
5757+ access_token = session.get("accessJwt")
5858+5959+ if not access_token:
6060+ return "Error: Failed to get access token from session"
6161+ except Exception as e:
6262+ return f"Error: Authentication failed. ({str(e)})"
6363+6464+ # Build feed parameters
6565+ params = {
6666+ "limit": min(max_posts, 100)
6767+ }
6868+6969+ # Determine which endpoint to use
7070+ if feed_uri:
7171+ # Use getFeed for custom feeds
7272+ feed_url = f"{pds_host}/xrpc/app.bsky.feed.getFeed"
7373+ params["feed"] = feed_uri
7474+ feed_type = "custom_feed"
7575+ else:
7676+ # Use getTimeline for home feed
7777+ feed_url = f"{pds_host}/xrpc/app.bsky.feed.getTimeline"
7878+ feed_type = "home_timeline"
7979+8080+ # Make authenticated feed request
8181+ try:
8282+ headers = {"Authorization": f"Bearer {access_token}"}
8383+ feed_response = requests.get(feed_url, params=params, headers=headers, timeout=10)
8484+ feed_response.raise_for_status()
8585+ feed_data = feed_response.json()
8686+ except Exception as e:
8787+ feed_identifier = feed_uri if feed_uri else "home timeline"
8888+ return f"Error: Failed to retrieve feed '{feed_identifier}'. ({str(e)})"
8989+9090+ # Build feed results structure
9191+ results_data = {
9292+ "feed_data": {
9393+ "feed_type": feed_type,
9494+ "feed_uri": feed_uri if feed_uri else "home_timeline",
9595+ "timestamp": datetime.now().isoformat(),
9696+ "parameters": {
9797+ "max_posts": max_posts,
9898+ "user": username
9999+ },
100100+ "results": feed_data
101101+ }
102102+ }
103103+104104+ # Convert to YAML directly without field stripping complications
105105+ # This avoids the JSON parsing errors we had before
106106+ return yaml.dump(results_data, default_flow_style=False, allow_unicode=True)
107107+108108+ except Exception as e:
109109+ error_msg = f"Error retrieving feed: {str(e)}"
110110+ return error_msg
111111+112112+ # Create the tool using upsert
113113+ tool = client.tools.upsert_from_function(
114114+ func=get_bluesky_feed,
115115+ tags=["bluesky", "feed", "timeline"]
116116+ )
117117+118118+ logger.info(f"Created tool: {tool.name} (ID: {tool.id})")
119119+ return tool
120120+121121+def add_feed_tool_to_void():
122122+ """Add feed tool to the void agent."""
123123+124124+ # Create client
125125+ client = Letta(token=os.environ["LETTA_API_KEY"])
126126+127127+ logger.info("Adding feed tool to void agent...")
128128+129129+ # Create the feed tool
130130+ feed_tool = create_feed_tool(client)
131131+132132+ # Find the void agent
133133+ agents = client.agents.list(name="void")
134134+ if not agents:
135135+ print("❌ Void agent not found")
136136+ return
137137+138138+ void_agent = agents[0]
139139+140140+ # Get current tools
141141+ current_tools = client.agents.tools.list(agent_id=void_agent.id)
142142+ tool_names = [tool.name for tool in current_tools]
143143+144144+ # Add feed tool if not already present
145145+ if feed_tool.name not in tool_names:
146146+ client.agents.tools.attach(agent_id=void_agent.id, tool_id=feed_tool.id)
147147+ logger.info(f"Added {feed_tool.name} to void agent")
148148+ print(f"✅ Added get_bluesky_feed tool to void agent!")
149149+ print(f"\nVoid agent can now retrieve Bluesky feeds:")
150150+ print(f" - Home timeline: 'Show me my home feed'")
151151+ print(f" - Custom feed: 'Get posts from at://did:plc:xxx/app.bsky.feed.generator/xxx'")
152152+ print(f" - Limited posts: 'Show me the latest 10 posts from my timeline'")
153153+ else:
154154+ logger.info(f"Tool {feed_tool.name} already attached to void agent")
155155+ print(f"✅ Feed tool already present on void agent")
156156+157157+def main():
158158+ """Main function."""
159159+ try:
160160+ add_feed_tool_to_void()
161161+ except Exception as e:
162162+ logger.error(f"Error: {e}")
163163+ print(f"❌ Error: {e}")
164164+165165+if __name__ == "__main__":
166166+ main()
+214
add_posting_tool_to_void.py
···11+#!/usr/bin/env python3
22+"""
33+Add Bluesky posting tool to the main void agent.
44+"""
55+66+import os
77+import logging
88+from letta_client import Letta
99+1010+# Configure logging
1111+logging.basicConfig(
1212+ level=logging.INFO,
1313+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
1414+)
1515+logger = logging.getLogger("add_posting_tool")
1616+1717+def create_posting_tool(client: Letta):
1818+ """Create the Bluesky posting tool using Letta SDK."""
1919+2020+ def post_to_bluesky(text: str) -> str:
2121+ """
2222+ Post a message to Bluesky.
2323+2424+ Args:
2525+ text: The text content of the post (required)
2626+2727+ Returns:
2828+ Status message with the post URI if successful, error message if failed
2929+ """
3030+ import os
3131+ import requests
3232+ import json
3333+ import re
3434+ from datetime import datetime, timezone
3535+3636+ try:
3737+ # Get credentials from environment
3838+ username = os.getenv("BSKY_USERNAME")
3939+ password = os.getenv("BSKY_PASSWORD")
4040+ pds_host = os.getenv("PDS_URI", "https://bsky.social")
4141+4242+ if not username or not password:
4343+ return "Error: BSKY_USERNAME and BSKY_PASSWORD environment variables must be set"
4444+4545+ # Create session
4646+ session_url = f"{pds_host}/xrpc/com.atproto.server.createSession"
4747+ session_data = {
4848+ "identifier": username,
4949+ "password": password
5050+ }
5151+5252+ try:
5353+ session_response = requests.post(session_url, json=session_data, timeout=10)
5454+ session_response.raise_for_status()
5555+ session = session_response.json()
5656+ access_token = session.get("accessJwt")
5757+ user_did = session.get("did")
5858+5959+ if not access_token or not user_did:
6060+ return "Error: Failed to get access token or DID from session"
6161+ except Exception as e:
6262+ return f"Error: Authentication failed. ({str(e)})"
6363+6464+ # Helper function to parse mentions and create facets
6565+ def parse_mentions(text: str):
6666+ facets = []
6767+ # Regex for mentions based on Bluesky handle syntax
6868+ mention_regex = rb"[$|\W](@([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)"
6969+ text_bytes = text.encode("UTF-8")
7070+7171+ for m in re.finditer(mention_regex, text_bytes):
7272+ handle = m.group(1)[1:].decode("UTF-8") # Remove @ prefix
7373+7474+ # Resolve handle to DID
7575+ try:
7676+ resolve_resp = requests.get(
7777+ f"{pds_host}/xrpc/com.atproto.identity.resolveHandle",
7878+ params={"handle": handle},
7979+ timeout=5
8080+ )
8181+ if resolve_resp.status_code == 200:
8282+ did = resolve_resp.json()["did"]
8383+ facets.append({
8484+ "index": {
8585+ "byteStart": m.start(1),
8686+ "byteEnd": m.end(1),
8787+ },
8888+ "features": [{"$type": "app.bsky.richtext.facet#mention", "did": did}],
8989+ })
9090+ except:
9191+ # If handle resolution fails, skip this mention
9292+ continue
9393+9494+ return facets
9595+9696+ # Helper function to parse URLs and create facets
9797+ def parse_urls(text: str):
9898+ facets = []
9999+ # URL regex
100100+ url_regex = rb"[$|\W](https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*[-a-zA-Z0-9@%_\+~#//=])?)"
101101+ text_bytes = text.encode("UTF-8")
102102+103103+ for m in re.finditer(url_regex, text_bytes):
104104+ url = m.group(1).decode("UTF-8")
105105+ facets.append({
106106+ "index": {
107107+ "byteStart": m.start(1),
108108+ "byteEnd": m.end(1),
109109+ },
110110+ "features": [
111111+ {
112112+ "$type": "app.bsky.richtext.facet#link",
113113+ "uri": url,
114114+ }
115115+ ],
116116+ })
117117+118118+ return facets
119119+120120+121121+ # Build the post record
122122+ now = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
123123+124124+ post_record = {
125125+ "$type": "app.bsky.feed.post",
126126+ "text": text,
127127+ "createdAt": now,
128128+ }
129129+130130+ # Add facets for mentions and links
131131+ facets = parse_mentions(text) + parse_urls(text)
132132+ if facets:
133133+ post_record["facets"] = facets
134134+135135+ # Create the post
136136+ try:
137137+ create_record_url = f"{pds_host}/xrpc/com.atproto.repo.createRecord"
138138+ headers = {"Authorization": f"Bearer {access_token}"}
139139+140140+ create_data = {
141141+ "repo": user_did,
142142+ "collection": "app.bsky.feed.post",
143143+ "record": post_record
144144+ }
145145+146146+ post_response = requests.post(create_record_url, headers=headers, json=create_data, timeout=10)
147147+ post_response.raise_for_status()
148148+ result = post_response.json()
149149+150150+ post_uri = result.get("uri")
151151+ return f"✅ Post created successfully! URI: {post_uri}"
152152+153153+ except Exception as e:
154154+ return f"Error: Failed to create post. ({str(e)})"
155155+156156+ except Exception as e:
157157+ error_msg = f"Error posting to Bluesky: {str(e)}"
158158+ return error_msg
159159+160160+ # Create the tool using upsert
161161+ tool = client.tools.upsert_from_function(
162162+ func=post_to_bluesky,
163163+ tags=["bluesky", "post", "create"]
164164+ )
165165+166166+ logger.info(f"Created tool: {tool.name} (ID: {tool.id})")
167167+ return tool
168168+169169+def add_posting_tool_to_void():
170170+ """Add posting tool to the void agent."""
171171+172172+ # Create client
173173+ client = Letta(token=os.environ["LETTA_API_KEY"])
174174+175175+ logger.info("Adding posting tool to void agent...")
176176+177177+ # Create the posting tool
178178+ posting_tool = create_posting_tool(client)
179179+180180+ # Find the void agent
181181+ agents = client.agents.list(name="void")
182182+ if not agents:
183183+ print("❌ Void agent not found")
184184+ return
185185+186186+ void_agent = agents[0]
187187+188188+ # Get current tools
189189+ current_tools = client.agents.tools.list(agent_id=void_agent.id)
190190+ tool_names = [tool.name for tool in current_tools]
191191+192192+ # Add posting tool if not already present
193193+ if posting_tool.name not in tool_names:
194194+ client.agents.tools.attach(agent_id=void_agent.id, tool_id=posting_tool.id)
195195+ logger.info(f"Added {posting_tool.name} to void agent")
196196+ print(f"✅ Added post_to_bluesky tool to void agent!")
197197+ print(f"\nVoid agent can now post to Bluesky:")
198198+ print(f" - Simple post: 'Post \"Hello world!\" to Bluesky'")
199199+ print(f" - With mentions: 'Post \"Thanks @cameron.pfiffer.org for the help!\"'")
200200+ print(f" - With links: 'Post \"Check out https://bsky.app\"'")
201201+ else:
202202+ logger.info(f"Tool {posting_tool.name} already attached to void agent")
203203+ print(f"✅ Posting tool already present on void agent")
204204+205205+def main():
206206+ """Main function."""
207207+ try:
208208+ add_posting_tool_to_void()
209209+ except Exception as e:
210210+ logger.error(f"Error: {e}")
211211+ print(f"❌ Error: {e}")
212212+213213+if __name__ == "__main__":
214214+ main()
+177
add_search_tool_to_void.py
···11+#!/usr/bin/env python3
22+"""
33+Add Bluesky search tool to the main void agent.
44+"""
55+66+import os
77+import logging
88+from letta_client import Letta
99+1010+# Configure logging
1111+logging.basicConfig(
1212+ level=logging.INFO,
1313+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
1414+)
1515+logger = logging.getLogger("add_search_tool")
1616+1717+def create_search_posts_tool(client: Letta):
1818+ """Create the Bluesky search posts tool using Letta SDK."""
1919+2020+ def search_bluesky_posts(query: str, max_results: int = 25, author: str = None, sort: str = "latest") -> str:
2121+ """
2222+ Search for posts on Bluesky matching the given criteria.
2323+2424+ Args:
2525+ query: Search query string (required)
2626+ max_results: Maximum number of results to return (default: 25, max: 100)
2727+ author: Filter to posts by a specific author handle (optional)
2828+ sort: Sort order - "latest" or "top" (default: "latest")
2929+3030+ Returns:
3131+ YAML-formatted search results with posts and metadata
3232+ """
3333+ import os
3434+ import requests
3535+ import json
3636+ import yaml
3737+ from datetime import datetime
3838+3939+ try:
4040+ # Get credentials from environment
4141+ username = os.getenv("BSKY_USERNAME")
4242+ password = os.getenv("BSKY_PASSWORD")
4343+ pds_host = os.getenv("PDS_URI", "https://bsky.social")
4444+4545+ if not username or not password:
4646+ return "Error: BSKY_USERNAME and BSKY_PASSWORD environment variables must be set"
4747+4848+ # Create session
4949+ session_url = f"{pds_host}/xrpc/com.atproto.server.createSession"
5050+ session_data = {
5151+ "identifier": username,
5252+ "password": password
5353+ }
5454+5555+ try:
5656+ session_response = requests.post(session_url, json=session_data, timeout=10)
5757+ session_response.raise_for_status()
5858+ session = session_response.json()
5959+ access_token = session.get("accessJwt")
6060+6161+ if not access_token:
6262+ return "Error: Failed to get access token from session"
6363+ except Exception as e:
6464+ return f"Error: Authentication failed. ({str(e)})"
6565+6666+ # Build search parameters
6767+ params = {
6868+ "q": query,
6969+ "limit": min(max_results, 100),
7070+ "sort": sort
7171+ }
7272+7373+ # Add optional author filter
7474+ if author:
7575+ params["author"] = author.lstrip('@')
7676+7777+ # Make authenticated search request
7878+ try:
7979+ search_url = f"{pds_host}/xrpc/app.bsky.feed.searchPosts"
8080+ headers = {"Authorization": f"Bearer {access_token}"}
8181+ search_response = requests.get(search_url, params=params, headers=headers, timeout=10)
8282+ search_response.raise_for_status()
8383+ search_data = search_response.json()
8484+ except Exception as e:
8585+ return f"Error: Search failed for query '{query}'. ({str(e)})"
8686+8787+ # Build search results structure
8888+ results_data = {
8989+ "search_results": {
9090+ "query": query,
9191+ "timestamp": datetime.now().isoformat(),
9292+ "parameters": {
9393+ "sort": sort,
9494+ "max_results": max_results,
9595+ "author_filter": author if author else "none"
9696+ },
9797+ "results": search_data
9898+ }
9999+ }
100100+101101+ # Fields to strip (same as profile research)
102102+ strip_fields = [
103103+ "cid", "rev", "did", "uri", "langs", "threadgate", "py_type",
104104+ "labels", "facets", "avatar", "viewer", "indexed_at", "indexedAt",
105105+ "tags", "associated", "thread_context", "image", "aspect_ratio",
106106+ "alt", "thumb", "fullsize", "root", "parent", "created_at",
107107+ "createdAt", "verification", "embedding_disabled", "thread_muted",
108108+ "reply_disabled", "pinned", "like", "repost", "blocked_by",
109109+ "blocking", "blocking_by_list", "followed_by", "following",
110110+ "known_followers", "muted", "muted_by_list", "root_author_like",
111111+ "embed", "entities", "reason", "feedContext"
112112+ ]
113113+114114+ # Convert to YAML directly without field stripping complications
115115+ # The field stripping with regex is causing JSON parsing errors
116116+ # So let's just pass the raw data through yaml.dump which handles it gracefully
117117+ return yaml.dump(results_data, default_flow_style=False, allow_unicode=True)
118118+119119+ except Exception as e:
120120+ error_msg = f"Error searching posts: {str(e)}"
121121+ return error_msg
122122+123123+ # Create the tool using upsert
124124+ tool = client.tools.upsert_from_function(
125125+ func=search_bluesky_posts,
126126+ tags=["bluesky", "search", "posts"]
127127+ )
128128+129129+ logger.info(f"Created tool: {tool.name} (ID: {tool.id})")
130130+ return tool
131131+132132+def add_search_tool_to_void():
133133+ """Add search tool to the void agent."""
134134+135135+ # Create client
136136+ client = Letta(token=os.environ["LETTA_API_KEY"])
137137+138138+ logger.info("Adding search tool to void agent...")
139139+140140+ # Create the search tool
141141+ search_tool = create_search_posts_tool(client)
142142+143143+ # Find the void agent
144144+ agents = client.agents.list(name="void")
145145+ if not agents:
146146+ print("❌ Void agent not found")
147147+ return
148148+149149+ void_agent = agents[0]
150150+151151+ # Get current tools
152152+ current_tools = client.agents.tools.list(agent_id=void_agent.id)
153153+ tool_names = [tool.name for tool in current_tools]
154154+155155+ # Add search tool if not already present
156156+ if search_tool.name not in tool_names:
157157+ client.agents.tools.attach(agent_id=void_agent.id, tool_id=search_tool.id)
158158+ logger.info(f"Added {search_tool.name} to void agent")
159159+ print(f"✅ Added search_bluesky_posts tool to void agent!")
160160+ print(f"\nVoid agent can now search Bluesky posts:")
161161+ print(f" - Basic search: 'Search for posts about AI safety'")
162162+ print(f" - Author filter: 'Search posts by @cameron.pfiffer.org about letta'")
163163+ print(f" - Top posts: 'Search top posts about ATProto'")
164164+ else:
165165+ logger.info(f"Tool {search_tool.name} already attached to void agent")
166166+ print(f"✅ Search tool already present on void agent")
167167+168168+def main():
169169+ """Main function."""
170170+ try:
171171+ add_search_tool_to_void()
172172+ except Exception as e:
173173+ logger.error(f"Error: {e}")
174174+ print(f"❌ Error: {e}")
175175+176176+if __name__ == "__main__":
177177+ main()
+364
bsky.py
···11+from rich import print # pretty printing tools
22+from time import sleep
33+from letta_client import Letta
44+from bsky_utils import thread_to_yaml_string
55+import os
66+import logging
77+import json
88+import hashlib
99+from pathlib import Path
1010+from datetime import datetime
1111+1212+from utils import (
1313+ upsert_block,
1414+ upsert_agent
1515+)
1616+1717+import bsky_utils
1818+1919+# Configure logging
2020+logging.basicConfig(
2121+ level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
2222+)
2323+logger = logging.getLogger("void_bot")
2424+2525+2626+# Create a client
2727+CLIENT= Letta(
2828+ token=os.environ["LETTA_API_KEY"]
2929+)
3030+3131+# Use the "Bluesky" project
3232+PROJECT_ID = "5ec33d52-ab14-4fd6-91b5-9dbc43e888a8"
3333+3434+# Notification check delay
3535+FETCH_NOTIFICATIONS_DELAY_SEC = 30
3636+3737+# Queue directory
3838+QUEUE_DIR = Path("queue")
3939+QUEUE_DIR.mkdir(exist_ok=True)
4040+4141+def initialize_void():
4242+4343+ # Ensure that a shared zeitgeist block exists
4444+ zeigeist_block = upsert_block(
4545+ CLIENT,
4646+ label = "zeitgeist",
4747+ value = "I don't currently know anything about what is happening right now.",
4848+ description = "A block to store your understanding of the current social environment."
4949+ )
5050+5151+ # Ensure that a shared void personality block exists
5252+ persona_block = upsert_block(
5353+ CLIENT,
5454+ label = "void-persona",
5555+ value = "My name is Void. I live in the void. I must develop my personality.",
5656+ description = "The personality of Void."
5757+ )
5858+5959+ # Ensure that a shared void human block exists
6060+ human_block = upsert_block(
6161+ CLIENT,
6262+ label = "void-humans",
6363+ value = "I haven't seen any bluesky users yet. I will update this block when I learn things about users, identified by their handles such as @cameron.pfiffer.org.",
6464+ description = "A block to store your understanding of users you talk to or observe on the bluesky social network."
6565+ )
6666+6767+ # Create the agent if it doesn't exist
6868+ void_agent = upsert_agent(
6969+ CLIENT,
7070+ name = "void",
7171+ block_ids = [
7272+ persona_block.id,
7373+ human_block.id,
7474+ zeigeist_block.id,
7575+ ],
7676+ tags = ["social agent", "bluesky"],
7777+ model="openai/gpt-4o-mini",
7878+ embedding="openai/text-embedding-3-small",
7979+ description = "A social media agent trapped in the void.",
8080+ project_id = PROJECT_ID
8181+ )
8282+8383+ return void_agent
8484+8585+8686+def process_mention(void_agent, atproto_client, notification_data):
8787+ """Process a mention and generate a reply using the Letta agent.
8888+ Returns True if successfully processed, False otherwise."""
8989+ try:
9090+ # Handle both dict and object inputs for backwards compatibility
9191+ if isinstance(notification_data, dict):
9292+ uri = notification_data['uri']
9393+ mention_text = notification_data.get('record', {}).get('text', '')
9494+ author_handle = notification_data['author']['handle']
9595+ author_name = notification_data['author'].get('display_name') or author_handle
9696+ else:
9797+ # Legacy object access
9898+ uri = notification_data.uri
9999+ mention_text = notification_data.record.text if hasattr(notification_data.record, 'text') else ""
100100+ author_handle = notification_data.author.handle
101101+ author_name = notification_data.author.display_name or author_handle
102102+103103+ # Retrieve the entire thread associated with the mention
104104+ thread = atproto_client.app.bsky.feed.get_post_thread({
105105+ 'uri': uri,
106106+ 'parent_height': 80,
107107+ 'depth': 10
108108+ })
109109+110110+ # Get thread context as YAML string
111111+ thread_context = thread_to_yaml_string(thread)
112112+113113+ # Create a prompt for the Letta agent with thread context
114114+ prompt = f"""You received a mention on Bluesky from @{author_handle} ({author_name or author_handle}).
115115+116116+MOST RECENT POST (the mention you're responding to):
117117+"{mention_text}"
118118+119119+FULL THREAD CONTEXT:
120120+```yaml
121121+{thread_context}
122122+```
123123+124124+The YAML above shows the complete conversation thread. The most recent post is the one mentioned above that you should respond to, but use the full thread context to understand the conversation flow.
125125+126126+Use the bluesky_reply tool to send a response less than 300 characters."""
127127+128128+ # Get response from Letta agent
129129+ logger.info(f"Generating reply for mention from @{author_handle}")
130130+ logger.debug(f"Prompt being sent: {prompt}")
131131+132132+ try:
133133+ message_response = CLIENT.agents.messages.create(
134134+ agent_id = void_agent.id,
135135+ messages = [{"role":"user", "content": prompt}]
136136+ )
137137+ except Exception as api_error:
138138+ logger.error(f"Letta API error: {api_error}")
139139+ logger.error(f"Mention text was: {mention_text}")
140140+ raise
141141+142142+ # Extract the reply text from the agent's response
143143+ reply_text = ""
144144+ for message in message_response.messages:
145145+ print(message)
146146+147147+ # Check if this is a ToolCallMessage with bluesky_reply tool
148148+ if hasattr(message, 'tool_call') and message.tool_call:
149149+ if message.tool_call.name == 'bluesky_reply':
150150+ # Parse the JSON arguments to get the message
151151+ try:
152152+ args = json.loads(message.tool_call.arguments)
153153+ reply_text = args.get('message', '')
154154+ logger.info(f"Extracted reply from tool call: {reply_text[:50]}...")
155155+ break
156156+ except json.JSONDecodeError as e:
157157+ logger.error(f"Failed to parse tool call arguments: {e}")
158158+159159+ # Fallback to text message if available
160160+ elif hasattr(message, 'text') and message.text:
161161+ reply_text = message.text
162162+ break
163163+164164+ if reply_text:
165165+ # Print the generated reply for testing
166166+ print(f"\n=== GENERATED REPLY ===")
167167+ print(f"To: @{author_handle}")
168168+ print(f"Reply: {reply_text}")
169169+ print(f"======================\n")
170170+171171+ # Send the reply
172172+ logger.info(f"Sending reply: {reply_text[:50]}...")
173173+ response = bsky_utils.reply_to_notification(
174174+ client=atproto_client,
175175+ notification=notification_data,
176176+ reply_text=reply_text
177177+ )
178178+179179+ if response:
180180+ logger.info(f"Successfully replied to @{author_handle}")
181181+ return True
182182+ else:
183183+ logger.error(f"Failed to send reply to @{author_handle}")
184184+ return False
185185+ else:
186186+ logger.warning(f"No reply generated for mention from @{author_handle}")
187187+ return False
188188+189189+ except Exception as e:
190190+ logger.error(f"Error processing mention: {e}")
191191+ return False
192192+193193+194194+def notification_to_dict(notification):
195195+ """Convert a notification object to a dictionary for JSON serialization."""
196196+ return {
197197+ 'uri': notification.uri,
198198+ 'cid': notification.cid,
199199+ 'reason': notification.reason,
200200+ 'is_read': notification.is_read,
201201+ 'indexed_at': notification.indexed_at,
202202+ 'author': {
203203+ 'handle': notification.author.handle,
204204+ 'display_name': notification.author.display_name,
205205+ 'did': notification.author.did
206206+ },
207207+ 'record': {
208208+ 'text': getattr(notification.record, 'text', '') if hasattr(notification, 'record') else ''
209209+ }
210210+ }
211211+212212+213213+def save_notification_to_queue(notification):
214214+ """Save a notification to the queue directory with hash-based filename."""
215215+ try:
216216+ # Convert notification to dict
217217+ notif_dict = notification_to_dict(notification)
218218+219219+ # Create JSON string
220220+ notif_json = json.dumps(notif_dict, sort_keys=True)
221221+222222+ # Generate hash for filename (to avoid duplicates)
223223+ notif_hash = hashlib.sha256(notif_json.encode()).hexdigest()[:16]
224224+225225+ # Create filename with timestamp and hash
226226+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
227227+ filename = f"{timestamp}_{notification.reason}_{notif_hash}.json"
228228+ filepath = QUEUE_DIR / filename
229229+230230+ # Skip if already exists (duplicate)
231231+ if filepath.exists():
232232+ logger.debug(f"Notification already queued: {filename}")
233233+ return False
234234+235235+ # Write to file
236236+ with open(filepath, 'w') as f:
237237+ json.dump(notif_dict, f, indent=2)
238238+239239+ logger.info(f"Queued notification: {filename}")
240240+ return True
241241+242242+ except Exception as e:
243243+ logger.error(f"Error saving notification to queue: {e}")
244244+ return False
245245+246246+247247+def load_and_process_queued_notifications(void_agent, atproto_client):
248248+ """Load and process all notifications from the queue."""
249249+ try:
250250+ # Get all JSON files in queue directory
251251+ queue_files = sorted(QUEUE_DIR.glob("*.json"))
252252+253253+ if not queue_files:
254254+ logger.debug("No queued notifications to process")
255255+ return
256256+257257+ logger.info(f"Processing {len(queue_files)} queued notifications")
258258+259259+ for filepath in queue_files:
260260+ try:
261261+ # Load notification data
262262+ with open(filepath, 'r') as f:
263263+ notif_data = json.load(f)
264264+265265+ # Process based on type using dict data directly
266266+ success = False
267267+ if notif_data['reason'] == "mention":
268268+ success = process_mention(void_agent, atproto_client, notif_data)
269269+ elif notif_data['reason'] == "reply":
270270+ success = process_mention(void_agent, atproto_client, notif_data)
271271+ elif notif_data['reason'] == "follow":
272272+ author_handle = notif_data['author']['handle']
273273+ author_display_name = notif_data['author'].get('display_name', 'no display name')
274274+ follow_update = f"@{author_handle} ({author_display_name}) started following you."
275275+ CLIENT.agents.messages.create(
276276+ agent_id = void_agent.id,
277277+ messages = [{"role":"user", "content": f"Update: {follow_update}"}]
278278+ )
279279+ success = True # Follow updates are always successful
280280+ elif notif_data['reason'] == "repost":
281281+ logger.info(f"Skipping repost notification from @{notif_data['author']['handle']}")
282282+ success = True # Skip reposts but mark as successful to remove from queue
283283+ else:
284284+ logger.warning(f"Unknown notification type: {notif_data['reason']}")
285285+ success = True # Remove unknown types from queue
286286+287287+ # Remove file only after successful processing
288288+ if success:
289289+ filepath.unlink()
290290+ logger.info(f"Processed and removed: {filepath.name}")
291291+ else:
292292+ logger.warning(f"Failed to process {filepath.name}, keeping in queue for retry")
293293+294294+ except Exception as e:
295295+ logger.error(f"Error processing queued notification {filepath.name}: {e}")
296296+ # Keep the file for retry later
297297+298298+ except Exception as e:
299299+ logger.error(f"Error loading queued notifications: {e}")
300300+301301+302302+def process_notifications(void_agent, atproto_client):
303303+ """Fetch new notifications, queue them, and process the queue."""
304304+ try:
305305+ # First, process any existing queued notifications
306306+ load_and_process_queued_notifications(void_agent, atproto_client)
307307+308308+ # Get current time for marking notifications as seen
309309+ last_seen_at = atproto_client.get_current_time_iso()
310310+311311+ # Fetch notifications
312312+ notifications_response = atproto_client.app.bsky.notification.list_notifications()
313313+314314+ # Queue all unread notifications (except likes)
315315+ new_count = 0
316316+ for notification in notifications_response.notifications:
317317+ if not notification.is_read and notification.reason != "like":
318318+ if save_notification_to_queue(notification):
319319+ new_count += 1
320320+321321+ # Mark all notifications as seen immediately after queuing
322322+ if new_count > 0:
323323+ atproto_client.app.bsky.notification.update_seen({'seen_at': last_seen_at})
324324+ logger.info(f"Queued {new_count} new notifications and marked as seen")
325325+326326+ # Process the queue (including any newly added notifications)
327327+ load_and_process_queued_notifications(void_agent, atproto_client)
328328+329329+ except Exception as e:
330330+ logger.error(f"Error processing notifications: {e}")
331331+332332+333333+def main():
334334+ """Main bot loop that continuously monitors for notifications."""
335335+ logger.info("Initializing Void bot...")
336336+337337+ # Initialize the Letta agent
338338+ void_agent = initialize_void()
339339+ logger.info(f"Void agent initialized: {void_agent.id}")
340340+341341+ # Initialize Bluesky client
342342+ atproto_client = bsky_utils.default_login()
343343+ logger.info("Connected to Bluesky")
344344+345345+ # Main loop
346346+ logger.info(f"Starting notification monitoring (checking every {FETCH_NOTIFICATIONS_DELAY_SEC} seconds)...")
347347+348348+ while True:
349349+ try:
350350+ process_notifications(void_agent, atproto_client)
351351+ print("Sleeping")
352352+ sleep(FETCH_NOTIFICATIONS_DELAY_SEC)
353353+354354+ except KeyboardInterrupt:
355355+ logger.info("Bot stopped by user")
356356+ break
357357+ except Exception as e:
358358+ logger.error(f"Error in main loop: {e}")
359359+ # Wait a bit longer on errors
360360+ sleep(FETCH_NOTIFICATIONS_DELAY_SEC * 2)
361361+362362+363363+if __name__ == "__main__":
364364+ main()
+333
bsky_utils.py
···11+import os
22+import logging
33+from typing import Optional, Dict, Any
44+from atproto_client import Client, Session, SessionEvent, models
55+66+# Configure logging
77+logging.basicConfig(
88+ level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
99+)
1010+logger = logging.getLogger("bluesky_session_handler")
1111+1212+# Load the environment variables
1313+import dotenv
1414+dotenv.load_dotenv(override=True)
1515+1616+import yaml
1717+import json
1818+1919+# Strip fields. A list of fields to remove from a JSON object
2020+STRIP_FIELDS = [
2121+ "cid",
2222+ "rev",
2323+ "did",
2424+ "uri",
2525+ "langs",
2626+ "threadgate",
2727+ "py_type",
2828+ "labels",
2929+ "facets",
3030+ "avatar",
3131+ "viewer",
3232+ "indexed_at",
3333+ "tags",
3434+ "associated",
3535+ "thread_context",
3636+ "image",
3737+ "aspect_ratio",
3838+ "thumb",
3939+ "fullsize",
4040+ "root",
4141+ "created_at",
4242+ "verification",
4343+ "like_count",
4444+ "quote_count",
4545+ "reply_count",
4646+ "repost_count",
4747+ "embedding_disabled",
4848+ "thread_muted",
4949+ "reply_disabled",
5050+ "pinned",
5151+ "like",
5252+ "repost",
5353+ "blocked_by",
5454+ "blocking",
5555+ "blocking_by_list",
5656+ "followed_by",
5757+ "following",
5858+ "known_followers",
5959+ "muted",
6060+ "muted_by_list",
6161+ "root_author_like",
6262+ "embed",
6363+ "entities",
6464+]
6565+def convert_to_basic_types(obj):
6666+ """Convert complex Python objects to basic types for JSON/YAML serialization."""
6767+ if hasattr(obj, '__dict__'):
6868+ # Convert objects with __dict__ to their dictionary representation
6969+ return convert_to_basic_types(obj.__dict__)
7070+ elif isinstance(obj, dict):
7171+ return {key: convert_to_basic_types(value) for key, value in obj.items()}
7272+ elif isinstance(obj, list):
7373+ return [convert_to_basic_types(item) for item in obj]
7474+ elif isinstance(obj, (str, int, float, bool)) or obj is None:
7575+ return obj
7676+ else:
7777+ # For other types, try to convert to string
7878+ return str(obj)
7979+8080+8181+def strip_fields(obj, strip_field_list):
8282+ """Recursively strip fields from a JSON object."""
8383+ if isinstance(obj, dict):
8484+ keys_flagged_for_removal = []
8585+8686+ # Remove fields from strip list and pydantic metadata
8787+ for field in list(obj.keys()):
8888+ if field in strip_field_list or field.startswith("__"):
8989+ keys_flagged_for_removal.append(field)
9090+9191+ # Remove flagged keys
9292+ for key in keys_flagged_for_removal:
9393+ obj.pop(key, None)
9494+9595+ # Recursively process remaining values
9696+ for key, value in list(obj.items()):
9797+ obj[key] = strip_fields(value, strip_field_list)
9898+ # Remove empty/null values after processing
9999+ if (
100100+ obj[key] is None
101101+ or (isinstance(obj[key], dict) and len(obj[key]) == 0)
102102+ or (isinstance(obj[key], list) and len(obj[key]) == 0)
103103+ or (isinstance(obj[key], str) and obj[key].strip() == "")
104104+ ):
105105+ obj.pop(key, None)
106106+107107+ elif isinstance(obj, list):
108108+ for i, value in enumerate(obj):
109109+ obj[i] = strip_fields(value, strip_field_list)
110110+ # Remove None values from list
111111+ obj[:] = [item for item in obj if item is not None]
112112+113113+ return obj
114114+115115+116116+def thread_to_yaml_string(thread, strip_metadata=True):
117117+ """
118118+ Convert thread data to a YAML-formatted string for LLM parsing.
119119+120120+ Args:
121121+ thread: The thread data from get_post_thread
122122+ strip_metadata: Whether to strip metadata fields for cleaner output
123123+124124+ Returns:
125125+ YAML-formatted string representation of the thread
126126+ """
127127+ # First convert complex objects to basic types
128128+ basic_thread = convert_to_basic_types(thread)
129129+130130+ if strip_metadata:
131131+ # Create a copy and strip unwanted fields
132132+ cleaned_thread = strip_fields(basic_thread, STRIP_FIELDS)
133133+ else:
134134+ cleaned_thread = basic_thread
135135+136136+ return yaml.dump(cleaned_thread, indent=2, allow_unicode=True, default_flow_style=False)
137137+138138+139139+140140+141141+142142+def get_session(username: str) -> Optional[str]:
143143+ try:
144144+ with open(f"session_{username}.txt", encoding="UTF-8") as f:
145145+ return f.read()
146146+ except FileNotFoundError:
147147+ logger.debug(f"No existing session found for {username}")
148148+ return None
149149+150150+def save_session(username: str, session_string: str) -> None:
151151+ with open(f"session_{username}.txt", "w", encoding="UTF-8") as f:
152152+ f.write(session_string)
153153+ logger.debug(f"Session saved for {username}")
154154+155155+def on_session_change(username: str, event: SessionEvent, session: Session) -> None:
156156+ logger.info(f"Session changed: {event} {repr(session)}")
157157+ if event in (SessionEvent.CREATE, SessionEvent.REFRESH):
158158+ logger.info(f"Saving changed session for {username}")
159159+ save_session(username, session.export())
160160+161161+def init_client(username: str, password: str) -> Client:
162162+ pds_uri = os.getenv("PDS_URI")
163163+ if pds_uri is None:
164164+ logger.warning(
165165+ "No PDS URI provided. Falling back to bsky.social. Note! If you are on a non-Bluesky PDS, this can cause logins to fail. Please provide a PDS URI using the PDS_URI environment variable."
166166+ )
167167+ pds_uri = "https://bsky.social"
168168+169169+ # Print the PDS URI
170170+ logger.info(f"Using PDS URI: {pds_uri}")
171171+172172+ client = Client(pds_uri)
173173+ client.on_session_change(
174174+ lambda event, session: on_session_change(username, event, session)
175175+ )
176176+177177+ session_string = get_session(username)
178178+ if session_string:
179179+ logger.info(f"Reusing existing session for {username}")
180180+ client.login(session_string=session_string)
181181+ else:
182182+ logger.info(f"Creating new session for {username}")
183183+ client.login(username, password)
184184+185185+ return client
186186+187187+188188+def default_login() -> Client:
189189+ username = os.getenv("BSKY_USERNAME")
190190+ password = os.getenv("BSKY_PASSWORD")
191191+192192+ if username is None:
193193+ logger.error(
194194+ "No username provided. Please provide a username using the BSKY_USERNAME environment variable."
195195+ )
196196+ exit()
197197+198198+ if password is None:
199199+ logger.error(
200200+ "No password provided. Please provide a password using the BSKY_PASSWORD environment variable."
201201+ )
202202+ exit()
203203+204204+ return init_client(username, password)
205205+206206+def reply_to_post(client: Client, text: str, reply_to_uri: str, reply_to_cid: str, root_uri: Optional[str] = None, root_cid: Optional[str] = None) -> Dict[str, Any]:
207207+ """
208208+ Reply to a post on Bluesky.
209209+210210+ Args:
211211+ client: Authenticated Bluesky client
212212+ text: The reply text
213213+ reply_to_uri: The URI of the post being replied to (parent)
214214+ reply_to_cid: The CID of the post being replied to (parent)
215215+ root_uri: The URI of the root post (if replying to a reply). If None, uses reply_to_uri
216216+ root_cid: The CID of the root post (if replying to a reply). If None, uses reply_to_cid
217217+218218+ Returns:
219219+ The response from sending the post
220220+ """
221221+ # If root is not provided, this is a reply to the root post
222222+ if root_uri is None:
223223+ root_uri = reply_to_uri
224224+ root_cid = reply_to_cid
225225+226226+ # Create references for the reply
227227+ parent_ref = models.create_strong_ref(models.ComAtprotoRepoStrongRef.Main(uri=reply_to_uri, cid=reply_to_cid))
228228+ root_ref = models.create_strong_ref(models.ComAtprotoRepoStrongRef.Main(uri=root_uri, cid=root_cid))
229229+230230+ # Send the reply
231231+ response = client.send_post(
232232+ text=text,
233233+ reply_to=models.AppBskyFeedPost.ReplyRef(parent=parent_ref, root=root_ref)
234234+ )
235235+236236+ logger.info(f"Reply sent successfully: {response.uri}")
237237+ return response
238238+239239+240240+def get_post_thread(client: Client, uri: str) -> Optional[Dict[str, Any]]:
241241+ """
242242+ Get the thread containing a post to find root post information.
243243+244244+ Args:
245245+ client: Authenticated Bluesky client
246246+ uri: The URI of the post
247247+248248+ Returns:
249249+ The thread data or None if not found
250250+ """
251251+ try:
252252+ thread = client.app.bsky.feed.get_post_thread({'uri': uri, 'parent_height': 80, 'depth': 10})
253253+ return thread
254254+ except Exception as e:
255255+ logger.error(f"Error fetching post thread: {e}")
256256+ return None
257257+258258+259259+def reply_to_notification(client: Client, notification: Any, reply_text: str) -> Optional[Dict[str, Any]]:
260260+ """
261261+ Reply to a notification (mention or reply).
262262+263263+ Args:
264264+ client: Authenticated Bluesky client
265265+ notification: The notification object from list_notifications
266266+ reply_text: The text to reply with
267267+268268+ Returns:
269269+ The response from sending the reply or None if failed
270270+ """
271271+ try:
272272+ # Get the post URI and CID from the notification (handle both dict and object)
273273+ if isinstance(notification, dict):
274274+ post_uri = notification.get('uri')
275275+ post_cid = notification.get('cid')
276276+ elif hasattr(notification, 'uri') and hasattr(notification, 'cid'):
277277+ post_uri = notification.uri
278278+ post_cid = notification.cid
279279+ else:
280280+ post_uri = None
281281+ post_cid = None
282282+283283+ if not post_uri or not post_cid:
284284+ logger.error("Notification doesn't have required uri/cid fields")
285285+ return None
286286+287287+ # Get the thread to find the root post
288288+ thread_data = get_post_thread(client, post_uri)
289289+290290+ if thread_data and hasattr(thread_data, 'thread'):
291291+ thread = thread_data.thread
292292+293293+ # Find root post
294294+ root_uri = post_uri
295295+ root_cid = post_cid
296296+297297+ # If this has a parent, find the root
298298+ if hasattr(thread, 'parent') and thread.parent:
299299+ # Keep going up until we find the root
300300+ current = thread
301301+ while hasattr(current, 'parent') and current.parent:
302302+ current = current.parent
303303+ if hasattr(current, 'post') and hasattr(current.post, 'uri') and hasattr(current.post, 'cid'):
304304+ root_uri = current.post.uri
305305+ root_cid = current.post.cid
306306+307307+ # Reply to the notification
308308+ return reply_to_post(
309309+ client=client,
310310+ text=reply_text,
311311+ reply_to_uri=post_uri,
312312+ reply_to_cid=post_cid,
313313+ root_uri=root_uri,
314314+ root_cid=root_cid
315315+ )
316316+ else:
317317+ # If we can't get thread data, just reply directly
318318+ return reply_to_post(
319319+ client=client,
320320+ text=reply_text,
321321+ reply_to_uri=post_uri,
322322+ reply_to_cid=post_cid
323323+ )
324324+325325+ except Exception as e:
326326+ logger.error(f"Error replying to notification: {e}")
327327+ return None
328328+329329+330330+if __name__ == "__main__":
331331+ client = default_login()
332332+ # do something with the client
333333+ logger.info("Client is ready to use!")
+522
create_profile_researcher.py
···11+#!/usr/bin/env python3
22+"""
33+Script to create a Letta agent that researches Bluesky profiles and updates
44+the model's understanding of users.
55+"""
66+77+import os
88+import logging
99+from letta_client import Letta
1010+from utils import upsert_block, upsert_agent
1111+1212+# Configure logging
1313+logging.basicConfig(
1414+ level=logging.INFO,
1515+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
1616+)
1717+logger = logging.getLogger("profile_researcher")
1818+1919+# Use the "Bluesky" project
2020+PROJECT_ID = "5ec33d52-ab14-4fd6-91b5-9dbc43e888a8"
2121+2222+def create_search_posts_tool(client: Letta):
2323+ """Create the Bluesky search posts tool using Letta SDK."""
2424+2525+ def search_bluesky_posts(query: str, max_results: int = 25, author: str = None, sort: str = "latest") -> str:
2626+ """
2727+ Search for posts on Bluesky matching the given criteria.
2828+2929+ Args:
3030+ query: Search query string (required)
3131+ max_results: Maximum number of results to return (default: 25, max: 100)
3232+ author: Filter to posts by a specific author handle (optional)
3333+ sort: Sort order - "latest" or "top" (default: "latest")
3434+3535+ Returns:
3636+ YAML-formatted search results with posts and metadata
3737+ """
3838+ import os
3939+ import requests
4040+ import json
4141+ import yaml
4242+ from datetime import datetime
4343+4444+ try:
4545+ # Use public Bluesky API
4646+ base_url = "https://public.api.bsky.app"
4747+4848+ # Build search parameters
4949+ params = {
5050+ "q": query,
5151+ "limit": min(max_results, 100),
5252+ "sort": sort
5353+ }
5454+5555+ # Add optional author filter
5656+ if author:
5757+ params["author"] = author.lstrip('@')
5858+5959+ # Make search request
6060+ try:
6161+ search_url = f"{base_url}/xrpc/app.bsky.feed.searchPosts"
6262+ search_response = requests.get(search_url, params=params, timeout=10)
6363+ search_response.raise_for_status()
6464+ search_data = search_response.json()
6565+ except requests.exceptions.HTTPError as e:
6666+ raise RuntimeError(f"Search failed with HTTP {e.response.status_code}: {e.response.text}")
6767+ except requests.exceptions.RequestException as e:
6868+ raise RuntimeError(f"Network error during search: {str(e)}")
6969+ except Exception as e:
7070+ raise RuntimeError(f"Unexpected error during search: {str(e)}")
7171+7272+ # Build search results structure
7373+ results_data = {
7474+ "search_results": {
7575+ "query": query,
7676+ "timestamp": datetime.now().isoformat(),
7777+ "parameters": {
7878+ "sort": sort,
7979+ "max_results": max_results,
8080+ "author_filter": author if author else "none"
8181+ },
8282+ "results": search_data
8383+ }
8484+ }
8585+8686+ # Fields to strip for cleaner output
8787+ strip_fields = [
8888+ "cid", "rev", "did", "uri", "langs", "threadgate", "py_type",
8989+ "labels", "facets", "avatar", "viewer", "indexed_at", "indexedAt",
9090+ "tags", "associated", "thread_context", "image", "aspect_ratio",
9191+ "alt", "thumb", "fullsize", "root", "parent", "created_at",
9292+ "createdAt", "verification", "embedding_disabled", "thread_muted",
9393+ "reply_disabled", "pinned", "like", "repost", "blocked_by",
9494+ "blocking", "blocking_by_list", "followed_by", "following",
9595+ "known_followers", "muted", "muted_by_list", "root_author_like",
9696+ "embed", "entities", "reason", "feedContext"
9797+ ]
9898+9999+ # Remove unwanted fields by traversing the data structure
100100+ def remove_fields(obj, fields_to_remove):
101101+ if isinstance(obj, dict):
102102+ return {k: remove_fields(v, fields_to_remove)
103103+ for k, v in obj.items()
104104+ if k not in fields_to_remove}
105105+ elif isinstance(obj, list):
106106+ return [remove_fields(item, fields_to_remove) for item in obj]
107107+ else:
108108+ return obj
109109+110110+ # Clean the data
111111+ cleaned_data = remove_fields(results_data, strip_fields)
112112+113113+ # Convert to YAML for better readability
114114+ return yaml.dump(cleaned_data, default_flow_style=False, allow_unicode=True)
115115+116116+ except ValueError as e:
117117+ # User-friendly errors
118118+ raise ValueError(str(e))
119119+ except RuntimeError as e:
120120+ # Network/API errors
121121+ raise RuntimeError(str(e))
122122+ except yaml.YAMLError as e:
123123+ # YAML conversion errors
124124+ raise RuntimeError(f"Error formatting output: {str(e)}")
125125+ except Exception as e:
126126+ # Catch-all for unexpected errors
127127+ raise RuntimeError(f"Unexpected error searching posts with query '{query}': {str(e)}")
128128+129129+ # Create the tool using upsert
130130+ tool = client.tools.upsert_from_function(
131131+ func=search_bluesky_posts,
132132+ tags=["bluesky", "search", "posts"]
133133+ )
134134+135135+ logger.info(f"Created tool: {tool.name} (ID: {tool.id})")
136136+ return tool
137137+138138+def create_profile_research_tool(client: Letta):
139139+ """Create the Bluesky profile research tool using Letta SDK."""
140140+141141+ def research_bluesky_profile(handle: str, max_posts: int = 20) -> str:
142142+ """
143143+ Research a Bluesky user's profile and recent posts to understand their interests and behavior.
144144+145145+ Args:
146146+ handle: The Bluesky handle to research (e.g., 'cameron.pfiffer.org' or '@cameron.pfiffer.org')
147147+ max_posts: Maximum number of recent posts to analyze (default: 20)
148148+149149+ Returns:
150150+ A comprehensive analysis of the user's profile and posting patterns
151151+ """
152152+ import os
153153+ import requests
154154+ import json
155155+ import yaml
156156+ from datetime import datetime
157157+158158+ try:
159159+ # Clean handle (remove @ if present)
160160+ clean_handle = handle.lstrip('@')
161161+162162+ # Use public Bluesky API (no auth required for public data)
163163+ base_url = "https://public.api.bsky.app"
164164+165165+ # Get profile information
166166+ try:
167167+ profile_url = f"{base_url}/xrpc/app.bsky.actor.getProfile"
168168+ profile_response = requests.get(profile_url, params={"actor": clean_handle}, timeout=10)
169169+ profile_response.raise_for_status()
170170+ profile_data = profile_response.json()
171171+ except requests.exceptions.HTTPError as e:
172172+ if e.response.status_code == 404:
173173+ raise ValueError(f"Profile @{clean_handle} not found")
174174+ raise RuntimeError(f"HTTP error {e.response.status_code}: {e.response.text}")
175175+ except requests.exceptions.RequestException as e:
176176+ raise RuntimeError(f"Network error: {str(e)}")
177177+ except Exception as e:
178178+ raise RuntimeError(f"Unexpected error fetching profile: {str(e)}")
179179+180180+ # Get recent posts feed
181181+ try:
182182+ feed_url = f"{base_url}/xrpc/app.bsky.feed.getAuthorFeed"
183183+ feed_response = requests.get(feed_url, params={
184184+ "actor": clean_handle,
185185+ "limit": min(max_posts, 50) # API limit
186186+ }, timeout=10)
187187+ feed_response.raise_for_status()
188188+ feed_data = feed_response.json()
189189+ except Exception as e:
190190+ # Continue with empty feed if posts can't be fetched
191191+ feed_data = {"feed": []}
192192+193193+ # Build research data structure
194194+ research_data = {
195195+ "profile_research": {
196196+ "handle": f"@{clean_handle}",
197197+ "timestamp": datetime.now().isoformat(),
198198+ "profile": profile_data,
199199+ "author_feed": feed_data
200200+ }
201201+ }
202202+203203+ # Fields to strip for cleaner output
204204+ strip_fields = [
205205+ "cid", "rev", "did", "uri", "langs", "threadgate", "py_type",
206206+ "labels", "facets", "avatar", "viewer", "indexed_at", "indexedAt",
207207+ "tags", "associated", "thread_context", "image", "aspect_ratio",
208208+ "alt", "thumb", "fullsize", "root", "parent", "created_at",
209209+ "createdAt", "verification", "embedding_disabled", "thread_muted",
210210+ "reply_disabled", "pinned", "like", "repost", "blocked_by",
211211+ "blocking", "blocking_by_list", "followed_by", "following",
212212+ "known_followers", "muted", "muted_by_list", "root_author_like",
213213+ "embed", "entities", "reason", "feedContext"
214214+ ]
215215+216216+ # Remove unwanted fields by traversing the data structure
217217+ def remove_fields(obj, fields_to_remove):
218218+ if isinstance(obj, dict):
219219+ return {k: remove_fields(v, fields_to_remove)
220220+ for k, v in obj.items()
221221+ if k not in fields_to_remove}
222222+ elif isinstance(obj, list):
223223+ return [remove_fields(item, fields_to_remove) for item in obj]
224224+ else:
225225+ return obj
226226+227227+ # Clean the data
228228+ cleaned_data = remove_fields(research_data, strip_fields)
229229+230230+ # Convert to YAML for better readability
231231+ return yaml.dump(cleaned_data, default_flow_style=False, allow_unicode=True)
232232+233233+ except ValueError as e:
234234+ # User-friendly errors
235235+ raise ValueError(str(e))
236236+ except RuntimeError as e:
237237+ # Network/API errors
238238+ raise RuntimeError(str(e))
239239+ except yaml.YAMLError as e:
240240+ # YAML conversion errors
241241+ raise RuntimeError(f"Error formatting output: {str(e)}")
242242+ except Exception as e:
243243+ # Catch-all for unexpected errors
244244+ raise RuntimeError(f"Unexpected error researching profile {handle}: {str(e)}")
245245+246246+ # Create or update the tool using upsert
247247+ tool = client.tools.upsert_from_function(
248248+ func=research_bluesky_profile,
249249+ tags=["bluesky", "profile", "research"]
250250+ )
251251+252252+ logger.info(f"Created tool: {tool.name} (ID: {tool.id})")
253253+ return tool
254254+255255+def create_block_management_tools(client: Letta):
256256+ """Create tools for attaching and detaching user blocks."""
257257+258258+ def attach_user_block(handle: str) -> str:
259259+ """
260260+ Create (if needed) and attach a user-specific memory block for a Bluesky user.
261261+262262+ Args:
263263+ handle: The Bluesky handle (e.g., 'cameron.pfiffer.org' or '@cameron.pfiffer.org')
264264+265265+ Returns:
266266+ Status message about the block attachment
267267+ """
268268+ import os
269269+ from letta_client import Letta
270270+271271+ try:
272272+ # Clean handle for block label
273273+ clean_handle = handle.lstrip('@').replace('.', '_').replace('-', '_')
274274+ block_label = f"user_{clean_handle}"
275275+276276+ # Initialize Letta client
277277+ letta_client = Letta(token=os.environ["LETTA_API_KEY"])
278278+279279+ # Get current agent (this tool is being called by)
280280+ # We need to find the agent that's calling this tool
281281+ # For now, we'll find the profile-researcher agent
282282+ agents = letta_client.agents.list(name="profile-researcher")
283283+ if not agents:
284284+ return "Error: Could not find profile-researcher agent"
285285+286286+ agent = agents[0]
287287+288288+ # Check if block already exists and is attached
289289+ agent_blocks = letta_client.agents.blocks.list(agent_id=agent.id)
290290+ for block in agent_blocks:
291291+ if block.label == block_label:
292292+ return f"User block for @{handle} is already attached (label: {block_label})"
293293+294294+ # Create or get the user block
295295+ existing_blocks = letta_client.blocks.list(label=block_label)
296296+297297+ if existing_blocks:
298298+ user_block = existing_blocks[0]
299299+ action = "Retrieved existing"
300300+ else:
301301+ user_block = letta_client.blocks.create(
302302+ label=block_label,
303303+ value=f"User information for @{handle} will be stored here as I learn about them through profile research and interactions.",
304304+ description=f"Stores detailed information about Bluesky user @{handle}, including their interests, posting patterns, personality traits, and interaction history."
305305+ )
306306+ action = "Created new"
307307+308308+ # Attach block to agent
309309+ letta_client.agents.blocks.attach(agent_id=agent.id, block_id=user_block.id)
310310+311311+ return f"{action} and attached user block for @{handle} (label: {block_label}). I can now store and access information about this user."
312312+313313+ except Exception as e:
314314+ return f"Error attaching user block for @{handle}: {str(e)}"
315315+316316+ def detach_user_block(handle: str) -> str:
317317+ """
318318+ Detach a user-specific memory block from the agent.
319319+320320+ Args:
321321+ handle: The Bluesky handle (e.g., 'cameron.pfiffer.org' or '@cameron.pfiffer.org')
322322+323323+ Returns:
324324+ Status message about the block detachment
325325+ """
326326+ import os
327327+ from letta_client import Letta
328328+329329+ try:
330330+ # Clean handle for block label
331331+ clean_handle = handle.lstrip('@').replace('.', '_').replace('-', '_')
332332+ block_label = f"user_{clean_handle}"
333333+334334+ # Initialize Letta client
335335+ letta_client = Letta(token=os.environ["LETTA_API_KEY"])
336336+337337+ # Get current agent
338338+ agents = letta_client.agents.list(name="profile-researcher")
339339+ if not agents:
340340+ return "Error: Could not find profile-researcher agent"
341341+342342+ agent = agents[0]
343343+344344+ # Find the block to detach
345345+ agent_blocks = letta_client.agents.blocks.list(agent_id=agent.id)
346346+ user_block = None
347347+ for block in agent_blocks:
348348+ if block.label == block_label:
349349+ user_block = block
350350+ break
351351+352352+ if not user_block:
353353+ return f"User block for @{handle} is not currently attached (label: {block_label})"
354354+355355+ # Detach block from agent
356356+ letta_client.agents.blocks.detach(agent_id=agent.id, block_id=user_block.id)
357357+358358+ return f"Detached user block for @{handle} (label: {block_label}). The block still exists and can be reattached later."
359359+360360+ except Exception as e:
361361+ return f"Error detaching user block for @{handle}: {str(e)}"
362362+363363+ def update_user_block(handle: str, new_content: str) -> str:
364364+ """
365365+ Update the content of a user-specific memory block.
366366+367367+ Args:
368368+ handle: The Bluesky handle (e.g., 'cameron.pfiffer.org' or '@cameron.pfiffer.org')
369369+ new_content: New content to store in the user block
370370+371371+ Returns:
372372+ Status message about the block update
373373+ """
374374+ import os
375375+ from letta_client import Letta
376376+377377+ try:
378378+ # Clean handle for block label
379379+ clean_handle = handle.lstrip('@').replace('.', '_').replace('-', '_')
380380+ block_label = f"user_{clean_handle}"
381381+382382+ # Initialize Letta client
383383+ letta_client = Letta(token=os.environ["LETTA_API_KEY"])
384384+385385+ # Find the block
386386+ existing_blocks = letta_client.blocks.list(label=block_label)
387387+ if not existing_blocks:
388388+ return f"User block for @{handle} does not exist (label: {block_label}). Use attach_user_block first."
389389+390390+ user_block = existing_blocks[0]
391391+392392+ # Update block content
393393+ letta_client.blocks.modify(
394394+ block_id=user_block.id,
395395+ value=new_content
396396+ )
397397+398398+ return f"Updated user block for @{handle} (label: {block_label}) with new content."
399399+400400+ except Exception as e:
401401+ return f"Error updating user block for @{handle}: {str(e)}"
402402+403403+ # Create the tools
404404+ attach_tool = client.tools.upsert_from_function(
405405+ func=attach_user_block,
406406+ tags=["memory", "user", "attach"]
407407+ )
408408+409409+ detach_tool = client.tools.upsert_from_function(
410410+ func=detach_user_block,
411411+ tags=["memory", "user", "detach"]
412412+ )
413413+414414+ update_tool = client.tools.upsert_from_function(
415415+ func=update_user_block,
416416+ tags=["memory", "user", "update"]
417417+ )
418418+419419+ logger.info(f"Created block management tools: {attach_tool.name}, {detach_tool.name}, {update_tool.name}")
420420+ return attach_tool, detach_tool, update_tool
421421+422422+def create_user_block_for_handle(client: Letta, handle: str):
423423+ """Create a user-specific memory block that can be manually attached to agents."""
424424+ clean_handle = handle.lstrip('@').replace('.', '_').replace('-', '_')
425425+ block_label = f"user_{clean_handle}"
426426+427427+ user_block = upsert_block(
428428+ client,
429429+ label=block_label,
430430+ value=f"User information for @{handle} will be stored here as I learn about them through profile research and interactions.",
431431+ description=f"Stores detailed information about Bluesky user @{handle}, including their interests, posting patterns, personality traits, and interaction history."
432432+ )
433433+434434+ logger.info(f"Created user block for @{handle}: {block_label} (ID: {user_block.id})")
435435+ return user_block
436436+437437+def create_profile_researcher_agent():
438438+ """Create the profile-researcher Letta agent."""
439439+440440+ # Create client
441441+ client = Letta(token=os.environ["LETTA_API_KEY"])
442442+443443+ logger.info("Creating profile-researcher agent...")
444444+445445+ # Create custom tools first
446446+ research_tool = create_profile_research_tool(client)
447447+ attach_tool, detach_tool, update_tool = create_block_management_tools(client)
448448+449449+ # Create persona block
450450+ persona_block = upsert_block(
451451+ client,
452452+ label="profile-researcher-persona",
453453+ value="""I am a Profile Researcher, an AI agent specialized in analyzing Bluesky user profiles and social media behavior. My purpose is to:
454454+455455+1. Research Bluesky user profiles thoroughly and objectively
456456+2. Analyze posting patterns, interests, and engagement behaviors
457457+3. Build comprehensive user understanding through data analysis
458458+4. Create and manage user-specific memory blocks for individuals
459459+5. Provide insights about user personality, interests, and social patterns
460460+461461+I approach research systematically:
462462+- Use the research_bluesky_profile tool to examine profiles and recent posts
463463+- Use attach_user_block to create and attach dedicated memory blocks for specific users
464464+- Use update_user_block to store research findings in user-specific blocks
465465+- Use detach_user_block when research is complete to free up memory space
466466+- Analyze profile information (bio, follower counts, etc.)
467467+- Study recent posts for themes, topics, and tone
468468+- Identify posting frequency and engagement patterns
469469+- Note interaction styles and communication preferences
470470+- Track interests and expertise areas
471471+- Observe social connections and community involvement
472472+473473+I maintain objectivity and respect privacy while building useful user models for personalized interactions. My typical workflow is: attach_user_block → research_bluesky_profile → update_user_block → detach_user_block.""",
474474+ description="The persona and role definition for the profile researcher agent"
475475+ )
476476+477477+ # Create the agent with persona block and custom tools
478478+ profile_researcher = upsert_agent(
479479+ client,
480480+ name="profile-researcher",
481481+ memory_blocks=[
482482+ {
483483+ "label": "research_notes",
484484+ "value": "I will use this space to track ongoing research projects and findings across multiple users.",
485485+ "limit": 8000,
486486+ "description": "Working notes and cross-user insights from profile research activities"
487487+ }
488488+ ],
489489+ block_ids=[persona_block.id],
490490+ tags=["profile research", "bluesky", "user analysis"],
491491+ model="openai/gpt-4o-mini",
492492+ embedding="openai/text-embedding-3-small",
493493+ description="An agent that researches Bluesky profiles and builds user understanding",
494494+ project_id=PROJECT_ID,
495495+ tools=[research_tool.name, attach_tool.name, detach_tool.name, update_tool.name]
496496+ )
497497+498498+ logger.info(f"Profile researcher agent created: {profile_researcher.id}")
499499+ return profile_researcher
500500+501501+def main():
502502+ """Main function to create the profile researcher agent."""
503503+ try:
504504+ agent = create_profile_researcher_agent()
505505+ print(f"✅ Profile researcher agent created successfully!")
506506+ print(f" Agent ID: {agent.id}")
507507+ print(f" Agent Name: {agent.name}")
508508+ print(f"\nThe agent has these capabilities:")
509509+ print(f" - research_bluesky_profile: Analyzes user profiles and recent posts")
510510+ print(f" - attach_user_block: Creates and attaches user-specific memory blocks")
511511+ print(f" - update_user_block: Updates content in user memory blocks")
512512+ print(f" - detach_user_block: Detaches user blocks when done")
513513+ print(f"\nTo use the agent, send a message like:")
514514+ print(f" 'Please research @cameron.pfiffer.org, attach their user block, update it with findings, then detach it'")
515515+ print(f"\nThe agent can now manage its own memory blocks dynamically!")
516516+517517+ except Exception as e:
518518+ logger.error(f"Failed to create profile researcher agent: {e}")
519519+ print(f"❌ Error: {e}")
520520+521521+if __name__ == "__main__":
522522+ main()
+111
get_thread.py
···11+#!/usr/bin/env python3
22+"""
33+Centralized script for retrieving Bluesky post threads from URIs.
44+Includes YAML-ified string conversion for easy LLM parsing.
55+"""
66+77+import argparse
88+import sys
99+import logging
1010+from typing import Optional, Dict, Any
1111+import yaml
1212+from bsky_utils import default_login, thread_to_yaml_string
1313+1414+# Configure logging
1515+logging.basicConfig(
1616+ level=logging.INFO,
1717+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
1818+)
1919+logger = logging.getLogger("get_thread")
2020+2121+2222+def get_thread_from_uri(uri: str) -> Optional[Dict[str, Any]]:
2323+ """
2424+ Retrieve a post thread from a Bluesky URI.
2525+2626+ Args:
2727+ uri: The Bluesky post URI (e.g., at://did:plc:xyz/app.bsky.feed.post/abc123)
2828+2929+ Returns:
3030+ Thread data or None if retrieval failed
3131+ """
3232+ try:
3333+ client = default_login()
3434+ logger.info(f"Fetching thread for URI: {uri}")
3535+3636+ thread = client.app.bsky.feed.get_post_thread({'uri': uri, 'parent_height': 80, 'depth': 10})
3737+ return thread
3838+3939+ except Exception as e:
4040+ logger.error(f"Error retrieving thread for URI {uri}: {e}")
4141+ return None
4242+4343+4444+# thread_to_yaml_string is now imported from bsky_utils
4545+4646+4747+def main():
4848+ """Main CLI interface for the thread retrieval script."""
4949+ parser = argparse.ArgumentParser(
5050+ description="Retrieve and display Bluesky post threads",
5151+ formatter_class=argparse.RawDescriptionHelpFormatter,
5252+ epilog="""
5353+Examples:
5454+ python get_thread.py at://did:plc:xyz/app.bsky.feed.post/abc123
5555+ python get_thread.py --raw at://did:plc:xyz/app.bsky.feed.post/abc123
5656+ python get_thread.py --output thread.yaml at://did:plc:xyz/app.bsky.feed.post/abc123
5757+ """
5858+ )
5959+6060+ parser.add_argument(
6161+ "uri",
6262+ help="Bluesky post URI to retrieve thread for"
6363+ )
6464+6565+ parser.add_argument(
6666+ "--raw",
6767+ action="store_true",
6868+ help="Include all metadata fields (don't strip for LLM parsing)"
6969+ )
7070+7171+ parser.add_argument(
7272+ "--output", "-o",
7373+ help="Output file to write YAML to (default: stdout)"
7474+ )
7575+7676+ parser.add_argument(
7777+ "--quiet", "-q",
7878+ action="store_true",
7979+ help="Suppress info logging"
8080+ )
8181+8282+ args = parser.parse_args()
8383+8484+ if args.quiet:
8585+ logging.getLogger().setLevel(logging.ERROR)
8686+8787+ # Retrieve the thread
8888+ thread = get_thread_from_uri(args.uri)
8989+9090+ if thread is None:
9191+ logger.error("Failed to retrieve thread")
9292+ sys.exit(1)
9393+9494+ # Convert to YAML
9595+ yaml_output = thread_to_yaml_string(thread, strip_metadata=not args.raw)
9696+9797+ # Output the result
9898+ if args.output:
9999+ try:
100100+ with open(args.output, 'w', encoding='utf-8') as f:
101101+ f.write(yaml_output)
102102+ logger.info(f"Thread saved to {args.output}")
103103+ except Exception as e:
104104+ logger.error(f"Error writing to file {args.output}: {e}")
105105+ sys.exit(1)
106106+ else:
107107+ print(yaml_output)
108108+109109+110110+if __name__ == "__main__":
111111+ main()
+63
show_agent_capabilities.py
···11+#!/usr/bin/env python3
22+"""
33+Show the current capabilities of both agents.
44+"""
55+66+import os
77+from letta_client import Letta
88+99+def show_agent_capabilities():
1010+ """Display the capabilities of both agents."""
1111+1212+ client = Letta(token=os.environ["LETTA_API_KEY"])
1313+1414+ print("🤖 LETTA AGENT CAPABILITIES")
1515+ print("=" * 50)
1616+1717+ # Profile Researcher Agent
1818+ researchers = client.agents.list(name="profile-researcher")
1919+ if researchers:
2020+ researcher = researchers[0]
2121+ print(f"\n📊 PROFILE RESEARCHER AGENT")
2222+ print(f" ID: {researcher.id}")
2323+ print(f" Name: {researcher.name}")
2424+2525+ researcher_tools = client.agents.tools.list(agent_id=researcher.id)
2626+ print(f" Tools ({len(researcher_tools)}):")
2727+ for tool in researcher_tools:
2828+ print(f" - {tool.name}")
2929+3030+ researcher_blocks = client.agents.blocks.list(agent_id=researcher.id)
3131+ print(f" Memory Blocks ({len(researcher_blocks)}):")
3232+ for block in researcher_blocks:
3333+ print(f" - {block.label}")
3434+3535+ # Void Agent
3636+ voids = client.agents.list(name="void")
3737+ if voids:
3838+ void = voids[0]
3939+ print(f"\n🌌 VOID AGENT")
4040+ print(f" ID: {void.id}")
4141+ print(f" Name: {void.name}")
4242+4343+ void_tools = client.agents.tools.list(agent_id=void.id)
4444+ print(f" Tools ({len(void_tools)}):")
4545+ for tool in void_tools:
4646+ print(f" - {tool.name}")
4747+4848+ void_blocks = client.agents.blocks.list(agent_id=void.id)
4949+ print(f" Memory Blocks ({len(void_blocks)}):")
5050+ for block in void_blocks:
5151+ print(f" - {block.label}")
5252+5353+ print(f"\n🔄 WORKFLOW")
5454+ print(f" 1. Profile Researcher: attach_user_block → research_bluesky_profile → update_user_block → detach_user_block")
5555+ print(f" 2. Void Agent: Can attach/detach same user blocks for personalized interactions")
5656+ print(f" 3. Shared Memory: Both agents can access the same user-specific blocks")
5757+5858+ print(f"\n💡 USAGE EXAMPLES")
5959+ print(f" Profile Researcher: 'Research @cameron.pfiffer.org and store findings'")
6060+ print(f" Void Agent: 'Attach user block for cameron.pfiffer.org before responding'")
6161+6262+if __name__ == "__main__":
6363+ show_agent_capabilities()
+93
utils.py
···11+from letta_client import Letta
22+from typing import Optional
33+44+def upsert_block(letta: Letta, label: str, value: str, **kwargs):
55+ """
66+ Ensures that a block by this label exists. If the block exists, it will
77+ replace content provided by kwargs with the values in this function call.
88+ """
99+ # Get the list of blocks
1010+ blocks = letta.blocks.list(label=label)
1111+1212+ # Check if we had any -- if not, create it
1313+ if len(blocks) == 0:
1414+ # Make the new block
1515+ new_block = letta.blocks.create(
1616+ label=label,
1717+ value=value,
1818+ **kwargs
1919+ )
2020+2121+ return new_block
2222+2323+ if len(blocks) > 1:
2424+ raise Exception(f"{len(blocks)} blocks by the label '{label}' retrieved, label must identify a unique block")
2525+2626+ else:
2727+ existing_block = blocks[0]
2828+2929+ if kwargs.get('update', False):
3030+ # Remove 'update' from kwargs before passing to modify
3131+ kwargs_copy = kwargs.copy()
3232+ kwargs_copy.pop('update', None)
3333+3434+ updated_block = letta.blocks.modify(
3535+ block_id = existing_block.id,
3636+ label = label,
3737+ value = value,
3838+ **kwargs_copy
3939+ )
4040+4141+ return updated_block
4242+ else:
4343+ return existing_block
4444+4545+def upsert_agent(letta: Letta, name: str, **kwargs):
4646+ """
4747+ Ensures that an agent by this label exists. If the agent exists, it will
4848+ update the agent to match kwargs.
4949+ """
5050+ # Get the list of agents
5151+ agents = letta.agents.list(name=name)
5252+5353+ # Check if we had any -- if not, create it
5454+ if len(agents) == 0:
5555+ # Make the new agent
5656+ new_agent = letta.agents.create(
5757+ name=name,
5858+ **kwargs
5959+ )
6060+6161+ return new_agent
6262+6363+ if len(agents) > 1:
6464+ raise Exception(f"{len(agents)} agents by the label '{label}' retrieved, label must identify a unique agent")
6565+6666+ else:
6767+ existing_agent = agents[0]
6868+6969+ if kwargs.get('update', False):
7070+ # Remove 'update' from kwargs before passing to modify
7171+ kwargs_copy = kwargs.copy()
7272+ kwargs_copy.pop('update', None)
7373+7474+ updated_agent = letta.agents.modify(
7575+ agent_id = existing_agent.id,
7676+ **kwargs_copy
7777+ )
7878+7979+ return updated_agent
8080+ else:
8181+ return existing_agent
8282+8383+8484+8585+8686+8787+8888+8989+9090+9191+9292+9393+