A simple tool which lets you scrape twitter accounts and crosspost them to bluesky accounts! Comes with a CLI and a webapp for managing profiles! Works with images/videos/link embeds/threads.

Merge pull request #1 from j4ckxyz/copilot/update-tweet-interface-user-info

authored by

jack and committed by
GitHub
d862e1bf b5031d87

+470 -351
+77 -44
src/ai-manager.ts
··· 2 2 import axios from 'axios'; 3 3 import { getConfig } from './config-manager.js'; 4 4 5 - export async function generateAltText(buffer: Buffer, mimeType: string, contextText: string): Promise<string | undefined> { 5 + export async function generateAltText( 6 + buffer: Buffer, 7 + mimeType: string, 8 + contextText: string, 9 + ): Promise<string | undefined> { 6 10 const config = getConfig(); 7 - 11 + 8 12 // 1. Determine Provider and Credentials 9 13 // Priority: AI Config > Legacy Gemini Config > Environment Variables 10 - 11 - let provider = config.ai?.provider || 'gemini'; 14 + 15 + const provider = config.ai?.provider || 'gemini'; 12 16 let apiKey = config.ai?.apiKey; 13 17 let model = config.ai?.model; 14 - let baseUrl = config.ai?.baseUrl; 18 + const baseUrl = config.ai?.baseUrl; 15 19 16 20 // Fallbacks for Environment Variables 17 21 if (!apiKey) { ··· 23 27 24 28 // Fallback for Gemini specific legacy env var if provider is implicitly gemini 25 29 if (!apiKey && provider === 'gemini') { 26 - apiKey = process.env.GEMINI_API_KEY; 30 + apiKey = process.env.GEMINI_API_KEY; 27 31 } 28 32 29 33 // API Key is mandatory for Gemini and Anthropic 30 34 if (!apiKey && (provider === 'gemini' || provider === 'anthropic')) { 31 - return undefined; 35 + return undefined; 32 36 } 33 37 34 38 // Default Models ··· 48 52 return await callOpenAICompatible(apiKey, model || 'gpt-4o', baseUrl, buffer, mimeType, contextText); 49 53 case 'anthropic': 50 54 // apiKey is guaranteed by check above 51 - return await callAnthropic(apiKey!, model || 'claude-3-5-sonnet-20241022', baseUrl, buffer, mimeType, contextText); 55 + return await callAnthropic( 56 + apiKey!, 57 + model || 'claude-3-5-sonnet-20241022', 58 + baseUrl, 59 + buffer, 60 + mimeType, 61 + contextText, 62 + ); 52 63 default: 53 64 console.warn(`[AI] ⚠️ Unknown provider: ${provider}`); 54 65 return undefined; ··· 59 70 } 60 71 } 61 72 62 - async function callGemini(apiKey: string, modelName: string, buffer: Buffer, mimeType: string, contextText: string): Promise<string | undefined> { 73 + async function callGemini( 74 + apiKey: string, 75 + modelName: string, 76 + buffer: Buffer, 77 + mimeType: string, 78 + contextText: string, 79 + ): Promise<string | undefined> { 63 80 const genAI = new GoogleGenerativeAI(apiKey); 64 81 const model = genAI.getGenerativeModel({ model: modelName }); 65 82 ··· 72 89 { 73 90 inlineData: { 74 91 data: buffer.toString('base64'), 75 - mimeType 76 - } 77 - } 92 + mimeType, 93 + }, 94 + }, 78 95 ]); 79 96 const response = await result.response; 80 97 return response.text(); 81 98 } 82 99 83 - async function callOpenAICompatible(apiKey: string | undefined, model: string, baseUrl: string | undefined, buffer: Buffer, mimeType: string, contextText: string): Promise<string | undefined> { 84 - const url = baseUrl ? `${baseUrl.replace(/\/+$/, '')}/chat/completions` : 'https://api.openai.com/v1/chat/completions'; 85 - 100 + async function callOpenAICompatible( 101 + apiKey: string | undefined, 102 + model: string, 103 + baseUrl: string | undefined, 104 + buffer: Buffer, 105 + mimeType: string, 106 + contextText: string, 107 + ): Promise<string | undefined> { 108 + const url = baseUrl 109 + ? `${baseUrl.replace(/\/+$/, '')}/chat/completions` 110 + : 'https://api.openai.com/v1/chat/completions'; 111 + 86 112 const base64Image = `data:${mimeType};base64,${buffer.toString('base64')}`; 87 113 88 114 const payload = { 89 115 model: model, 90 116 messages: [ 91 117 { 92 - role: "user", 118 + role: 'user', 93 119 content: [ 94 120 { 95 - type: "text", 96 - text: `Describe this image for alt text. Be concise but descriptive. Context from the tweet text: "${contextText}".` 121 + type: 'text', 122 + text: `Describe this image for alt text. Be concise but descriptive. Context from the tweet text: "${contextText}".`, 97 123 }, 98 124 { 99 - type: "image_url", 125 + type: 'image_url', 100 126 image_url: { 101 - url: base64Image 102 - } 103 - } 104 - ] 105 - } 127 + url: base64Image, 128 + }, 129 + }, 130 + ], 131 + }, 106 132 ], 107 - max_tokens: 300 133 + max_tokens: 300, 108 134 }; 109 135 110 136 const headers: Record<string, string> = { 111 - 'Content-Type': 'application/json' 137 + 'Content-Type': 'application/json', 112 138 }; 113 139 114 140 if (apiKey) { 115 - headers['Authorization'] = `Bearer ${apiKey}`; 141 + headers['Authorization'] = `Bearer ${apiKey}`; 116 142 } 117 143 118 144 // OpenRouter specific headers (optional but good practice) 119 145 if (url.includes('openrouter.ai')) { 120 - headers['HTTP-Referer'] = 'https://github.com/tweets-2-bsky'; 121 - headers['X-Title'] = 'Tweets to Bluesky'; 146 + headers['HTTP-Referer'] = 'https://github.com/tweets-2-bsky'; 147 + headers['X-Title'] = 'Tweets to Bluesky'; 122 148 } 123 149 124 150 const response = await axios.post(url, payload, { headers }); ··· 126 152 return response.data.choices[0]?.message?.content || undefined; 127 153 } 128 154 129 - async function callAnthropic(apiKey: string, model: string, baseUrl: string | undefined, buffer: Buffer, mimeType: string, contextText: string): Promise<string | undefined> { 155 + async function callAnthropic( 156 + apiKey: string, 157 + model: string, 158 + baseUrl: string | undefined, 159 + buffer: Buffer, 160 + mimeType: string, 161 + contextText: string, 162 + ): Promise<string | undefined> { 130 163 const url = baseUrl ? `${baseUrl.replace(/\/+$/, '')}/v1/messages` : 'https://api.anthropic.com/v1/messages'; 131 - 164 + 132 165 const base64Data = buffer.toString('base64'); 133 166 134 167 const payload = { ··· 136 169 max_tokens: 300, 137 170 messages: [ 138 171 { 139 - role: "user", 172 + role: 'user', 140 173 content: [ 141 174 { 142 - type: "image", 175 + type: 'image', 143 176 source: { 144 - type: "base64", 177 + type: 'base64', 145 178 media_type: mimeType, 146 - data: base64Data 147 - } 179 + data: base64Data, 180 + }, 148 181 }, 149 182 { 150 - type: "text", 151 - text: `Describe this image for alt text. Be concise but descriptive. Context from the tweet text: "${contextText}".` 152 - } 153 - ] 154 - } 155 - ] 183 + type: 'text', 184 + text: `Describe this image for alt text. Be concise but descriptive. Context from the tweet text: "${contextText}".`, 185 + }, 186 + ], 187 + }, 188 + ], 156 189 }; 157 190 158 191 const response = await axios.post(url, payload, { 159 192 headers: { 160 193 'x-api-key': apiKey, 161 194 'anthropic-version': '2023-06-01', 162 - 'Content-Type': 'application/json' 163 - } 195 + 'Content-Type': 'application/json', 196 + }, 164 197 }); 165 198 166 199 return response.data.content[0]?.text || undefined;
+45 -39
src/cli.ts
··· 1 1 import { Command } from 'commander'; 2 2 import inquirer from 'inquirer'; 3 - import { addMapping, getConfig, removeMapping, saveConfig, updateTwitterConfig, type AIConfig } from './config-manager.js'; 3 + import { addMapping, getConfig, removeMapping, saveConfig, updateTwitterConfig } from './config-manager.js'; 4 4 5 5 const program = new Command(); 6 6 ··· 15 15 .action(async () => { 16 16 const config = getConfig(); 17 17 const currentAi = config.ai || { provider: 'gemini' }; 18 - 18 + 19 19 // Check legacy gemini key if not in new config 20 20 if (!config.ai && config.geminiApiKey) { 21 - currentAi.apiKey = config.geminiApiKey; 21 + currentAi.apiKey = config.geminiApiKey; 22 22 } 23 23 24 24 const answers = await inquirer.prompt([ ··· 30 30 { name: 'Google Gemini (Default)', value: 'gemini' }, 31 31 { name: 'OpenAI / OpenRouter', value: 'openai' }, 32 32 { name: 'Anthropic (Claude)', value: 'anthropic' }, 33 - { name: 'Custom (OpenAI Compatible)', value: 'custom' } 33 + { name: 'Custom (OpenAI Compatible)', value: 'custom' }, 34 34 ], 35 - default: currentAi.provider 35 + default: currentAi.provider, 36 36 }, 37 37 { 38 38 type: 'input', ··· 40 40 message: 'Enter API Key (optional for some custom providers):', 41 41 default: currentAi.apiKey, 42 42 validate: (input: string, answers: any) => { 43 - if (['gemini', 'anthropic'].includes(answers.provider) && !input) { 44 - return 'API Key is required for this provider.'; 45 - } 46 - return true; 47 - } 43 + if (['gemini', 'anthropic'].includes(answers.provider) && !input) { 44 + return 'API Key is required for this provider.'; 45 + } 46 + return true; 47 + }, 48 48 }, 49 49 { 50 50 type: 'input', ··· 57 57 name: 'baseUrl', 58 58 message: 'Enter Base URL (optional, e.g. for OpenRouter):', 59 59 default: currentAi.baseUrl, 60 - when: (answers) => ['openai', 'anthropic', 'custom'].includes(answers.provider) 61 - } 60 + when: (answers) => ['openai', 'anthropic', 'custom'].includes(answers.provider), 61 + }, 62 62 ]); 63 63 64 64 config.ai = { 65 - provider: answers.provider, 66 - apiKey: answers.apiKey, 67 - model: answers.model || undefined, 68 - baseUrl: answers.baseUrl || undefined 65 + provider: answers.provider, 66 + apiKey: answers.apiKey, 67 + model: answers.model || undefined, 68 + baseUrl: answers.baseUrl || undefined, 69 69 }; 70 - 70 + 71 71 // Clear legacy key to avoid confusion 72 72 delete config.geminiApiKey; 73 - 73 + 74 74 saveConfig(config); 75 75 console.log('AI configuration updated!'); 76 76 }); ··· 125 125 default: 'https://bsky.social', 126 126 }, 127 127 ]); 128 - 129 - const usernames = answers.twitterUsernames.split(',').map((u: string) => u.trim()).filter((u: string) => u.length > 0); 130 - 128 + 129 + const usernames = answers.twitterUsernames 130 + .split(',') 131 + .map((u: string) => u.trim()) 132 + .filter((u: string) => u.length > 0); 133 + 131 134 addMapping({ 132 135 ...answers, 133 136 twitterUsernames: usernames, ··· 144 147 console.log('No mappings found.'); 145 148 return; 146 149 } 147 - 150 + 148 151 const { id } = await inquirer.prompt([ 149 152 { 150 153 type: 'list', ··· 186 189 }, 187 190 ]); 188 191 189 - const usernames = answers.twitterUsernames.split(',').map((u: string) => u.trim()).filter((u: string) => u.length > 0); 192 + const usernames = answers.twitterUsernames 193 + .split(',') 194 + .map((u: string) => u.trim()) 195 + .filter((u: string) => u.length > 0); 190 196 191 197 // Update the mapping directly 192 - const index = config.mappings.findIndex(m => m.id === id); 198 + const index = config.mappings.findIndex((m) => m.id === id); 193 199 const existingMapping = config.mappings[index]; 194 - 200 + 195 201 if (index !== -1 && existingMapping) { 196 - const updatedMapping = { 197 - ...existingMapping, 198 - twitterUsernames: usernames, 199 - bskyIdentifier: answers.bskyIdentifier, 200 - bskyServiceUrl: answers.bskyServiceUrl, 201 - }; 202 - 203 - if (answers.bskyPassword && answers.bskyPassword.trim().length > 0) { 204 - updatedMapping.bskyPassword = answers.bskyPassword; 205 - } 206 - 207 - config.mappings[index] = updatedMapping; 208 - saveConfig(config); 209 - console.log('Mapping updated successfully!'); 202 + const updatedMapping = { 203 + ...existingMapping, 204 + twitterUsernames: usernames, 205 + bskyIdentifier: answers.bskyIdentifier, 206 + bskyServiceUrl: answers.bskyServiceUrl, 207 + }; 208 + 209 + if (answers.bskyPassword && answers.bskyPassword.trim().length > 0) { 210 + updatedMapping.bskyPassword = answers.bskyPassword; 211 + } 212 + 213 + config.mappings[index] = updatedMapping; 214 + saveConfig(config); 215 + console.log('Mapping updated successfully!'); 210 216 } 211 217 }); 212 218
+2 -2
src/config-manager.ts
··· 69 69 export function saveConfig(config: AppConfig): void { 70 70 // biome-ignore lint/suspicious/noExplicitAny: cleanup before save 71 71 const configToSave = { ...config } as any; 72 - 72 + 73 73 // Remove legacy field from saved file 74 74 configToSave.mappings = configToSave.mappings.map((m: any) => { 75 75 const { twitterUsername, ...rest } = m; ··· 94 94 const config = getConfig(); 95 95 const index = config.mappings.findIndex((m) => m.id === id); 96 96 const existing = config.mappings[index]; 97 - 97 + 98 98 if (index !== -1 && existing) { 99 99 config.mappings[index] = { ...existing, ...updates }; 100 100 saveConfig(config);
+13 -11
src/db.ts
··· 1 - import Database from 'better-sqlite3'; 2 - import path from 'node:path'; 3 1 import fs from 'node:fs'; 2 + import path from 'node:path'; 4 3 import { fileURLToPath } from 'node:url'; 4 + import Database from 'better-sqlite3'; 5 5 6 6 const __filename = fileURLToPath(import.meta.url); 7 7 const __dirname = path.dirname(__filename); ··· 17 17 db.pragma('journal_mode = WAL'); 18 18 19 19 // --- Migration Support --- 20 - const tableInfo = db.prepare("PRAGMA table_info(processed_tweets)").all() as any[]; 20 + const tableInfo = db.prepare('PRAGMA table_info(processed_tweets)').all() as any[]; 21 21 22 22 if (tableInfo.length > 0) { 23 - const hasBskyIdentifier = tableInfo.some(col => col.name === 'bsky_identifier'); 24 - 23 + const hasBskyIdentifier = tableInfo.some((col) => col.name === 'bsky_identifier'); 24 + 25 25 if (!hasBskyIdentifier) { 26 26 console.log('🔄 Upgrading database schema to support multiple accounts...'); 27 27 // SQLite doesn't support easy PK changes, so we recreate the table ··· 96 96 bsky_cid: row.bsky_cid, 97 97 bsky_root_uri: row.bsky_root_uri, 98 98 bsky_root_cid: row.bsky_root_cid, 99 - status: row.status 99 + status: row.status, 100 100 }; 101 101 }, 102 102 ··· 114 114 tweet.bsky_cid || null, 115 115 tweet.bsky_root_uri || null, 116 116 tweet.bsky_root_cid || null, 117 - tweet.status 117 + tweet.status, 118 118 ); 119 119 }, 120 120 ··· 128 128 cid: row.bsky_cid, 129 129 root: row.bsky_root_uri ? { uri: row.bsky_root_uri, cid: row.bsky_root_cid } : undefined, 130 130 migrated: row.status === 'migrated', 131 - skipped: row.status === 'skipped' 131 + skipped: row.status === 'skipped', 132 132 }; 133 133 } 134 134 return map; ··· 144 144 cid: row.bsky_cid, 145 145 root: row.bsky_root_uri ? { uri: row.bsky_root_uri, cid: row.bsky_root_cid } : undefined, 146 146 migrated: row.status === 'migrated', 147 - skipped: row.status === 'skipped' 147 + skipped: row.status === 'skipped', 148 148 }; 149 149 } 150 150 return map; ··· 156 156 }, 157 157 158 158 repairUnknownIdentifiers(twitterUsername: string, bskyIdentifier: string) { 159 - const stmt = db.prepare('UPDATE processed_tweets SET bsky_identifier = ? WHERE bsky_identifier = "unknown" AND twitter_username = ?'); 159 + const stmt = db.prepare( 160 + 'UPDATE processed_tweets SET bsky_identifier = ? WHERE bsky_identifier = "unknown" AND twitter_username = ?', 161 + ); 160 162 stmt.run(bskyIdentifier.toLowerCase(), twitterUsername.toLowerCase()); 161 163 }, 162 164 163 165 clearAll() { 164 166 db.prepare('DELETE FROM processed_tweets').run(); 165 - } 167 + }, 166 168 };
+280 -210
src/index.ts
··· 7 7 import type { BlobRef } from '@atproto/api'; 8 8 import { TwitterClient } from '@steipete/bird/dist/lib/twitter-client.js'; 9 9 import axios from 'axios'; 10 + import * as cheerio from 'cheerio'; 10 11 import { Command } from 'commander'; 11 12 import * as francModule from 'franc-min'; 12 13 import iso6391 from 'iso-639-1'; 13 - import os from 'node:os'; 14 14 import puppeteer from 'puppeteer-core'; 15 - import * as cheerio from 'cheerio'; 16 15 import sharp from 'sharp'; 17 16 import { generateAltText } from './ai-manager.js'; 18 17 ··· 98 97 in_reply_to_status_id?: string; 99 98 in_reply_to_user_id_str?: string; 100 99 in_reply_to_user_id?: string; 100 + user?: { 101 + screen_name?: string; 102 + id_str?: string; 103 + }; 101 104 } 102 105 103 106 interface TwitterSearchResult { ··· 127 130 128 131 async function migrateJsonToSqlite() { 129 132 if (!fs.existsSync(PROCESSED_DIR)) return; 130 - 131 - const files = fs.readdirSync(PROCESSED_DIR).filter(f => f.endsWith('.json')); 133 + 134 + const files = fs.readdirSync(PROCESSED_DIR).filter((f) => f.endsWith('.json')); 132 135 if (files.length === 0) return; 133 136 134 137 console.log(`📦 Found ${files.length} legacy cache files. Migrating to SQLite...`); 135 138 const config = getConfig(); 136 - 139 + 137 140 for (const file of files) { 138 141 const username = file.replace('.json', '').toLowerCase(); 139 142 // Try to find a matching bskyIdentifier from config 140 - const mapping = config.mappings.find(m => m.twitterUsernames.map(u => u.toLowerCase()).includes(username)); 143 + const mapping = config.mappings.find((m) => m.twitterUsernames.map((u) => u.toLowerCase()).includes(username)); 141 144 const bskyIdentifier = mapping?.bskyIdentifier || 'unknown'; 142 145 143 146 try { 144 147 const filePath = path.join(PROCESSED_DIR, file); 145 148 const data = JSON.parse(fs.readFileSync(filePath, 'utf8')) as ProcessedTweetsMap; 146 - 149 + 147 150 for (const [twitterId, entry] of Object.entries(data)) { 148 151 dbService.saveTweet({ 149 152 twitter_id: twitterId, ··· 153 156 bsky_cid: entry.cid, 154 157 bsky_root_uri: entry.root?.uri, 155 158 bsky_root_cid: entry.root?.cid, 156 - status: entry.migrated ? 'migrated' : (entry.skipped ? 'skipped' : 'failed') 159 + status: entry.migrated ? 'migrated' : entry.skipped ? 'skipped' : 'failed', 157 160 }); 158 161 } 159 162 // Move file to backup ··· 171 174 dbService.repairUnknownIdentifiers(username, mapping.bskyIdentifier); 172 175 } 173 176 } 174 - 177 + 175 178 console.log('✅ Migration complete.'); 176 179 } 177 180 ··· 179 182 return dbService.getTweetsByBskyIdentifier(bskyIdentifier); 180 183 } 181 184 182 - function saveProcessedTweet(twitterUsername: string, bskyIdentifier: string, twitterId: string, entry: ProcessedTweetEntry): void { 185 + function saveProcessedTweet( 186 + twitterUsername: string, 187 + bskyIdentifier: string, 188 + twitterId: string, 189 + entry: ProcessedTweetEntry, 190 + ): void { 183 191 dbService.saveTweet({ 184 192 twitter_id: twitterId, 185 193 twitter_username: twitterUsername.toLowerCase(), ··· 188 196 bsky_cid: entry.cid, 189 197 bsky_root_uri: entry.root?.uri, 190 198 bsky_root_cid: entry.root?.cid, 191 - status: entry.migrated || (entry.uri && entry.cid) ? 'migrated' : (entry.skipped ? 'skipped' : 'failed') 199 + status: entry.migrated || (entry.uri && entry.cid) ? 'migrated' : entry.skipped ? 'skipped' : 'failed', 192 200 }); 193 201 } 194 202 ··· 219 227 mapped.in_reply_to_status_id_str = result.legacy.in_reply_to_status_id_str; 220 228 mapped.in_reply_to_user_id_str = result.legacy.in_reply_to_user_id_str; 221 229 } 230 + // Capture user info from the Twitter API response 231 + if (mapped && (result as any).core?.user_results?.result?.legacy) { 232 + const userLegacy = (result as any).core.user_results.result.legacy; 233 + mapped.user = { 234 + screen_name: userLegacy.screen_name, 235 + id_str: (result as any).core.user_results.result.rest_id, 236 + }; 237 + } 222 238 return mapped; 223 239 } 224 240 } ··· 229 245 function getTwitterClient() { 230 246 const config = getConfig(); 231 247 if (!config.twitter.authToken || !config.twitter.ct0) return null; 232 - 248 + 233 249 // Re-initialize if config changed or not yet initialized 234 - if (!twitter || 235 - currentTwitterCookies.authToken !== config.twitter.authToken || 236 - currentTwitterCookies.ct0 !== config.twitter.ct0) { 250 + if ( 251 + !twitter || 252 + currentTwitterCookies.authToken !== config.twitter.authToken || 253 + currentTwitterCookies.ct0 !== config.twitter.ct0 254 + ) { 237 255 twitter = new CustomTwitterClient({ 238 256 cookies: { 239 257 authToken: config.twitter.authToken, 240 258 ct0: config.twitter.ct0, 241 259 }, 242 260 }); 243 - currentTwitterCookies = { 244 - authToken: config.twitter.authToken, 245 - ct0: config.twitter.ct0 261 + currentTwitterCookies = { 262 + authToken: config.twitter.authToken, 263 + ct0: config.twitter.ct0, 246 264 }; 247 265 } 248 266 return twitter; ··· 316 334 const isGif = mimeType === 'image/gif'; 317 335 const isAnimation = isGif || isWebp; 318 336 319 - if ((buffer.length > MAX_SIZE && (mimeType.startsWith('image/') || mimeType === 'application/octet-stream')) || (isPng && buffer.length > MAX_SIZE)) { 337 + if ( 338 + (buffer.length > MAX_SIZE && (mimeType.startsWith('image/') || mimeType === 'application/octet-stream')) || 339 + (isPng && buffer.length > MAX_SIZE) 340 + ) { 320 341 console.log(`[UPLOAD] ⚖️ Image too large (${(buffer.length / 1024).toFixed(2)} KB). Optimizing...`); 321 342 try { 322 343 let image = sharp(buffer); ··· 330 351 while (currentBuffer.length > MAX_SIZE && attempts < 5) { 331 352 attempts++; 332 353 console.log(`[UPLOAD] 📉 Compression attempt ${attempts}: Width ${width}, Quality ${quality}...`); 333 - 354 + 334 355 if (isAnimation) { 335 - // For animations (GIF/WebP), we can only do so much without losing frames 336 - // Try to convert to WebP if it's a GIF, or optimize WebP 337 - image = sharp(buffer, { animated: true }); 338 - if (isGif) { 339 - // Convert GIF to WebP for better compression 340 - image = image.webp({ quality: Math.max(quality, 50), effort: 6 }); 341 - finalMimeType = 'image/webp'; 342 - } else { 343 - image = image.webp({ quality: Math.max(quality, 50), effort: 6 }); 344 - } 345 - // Resize if really big 346 - if (metadata.width && metadata.width > 800) { 347 - image = image.resize({ width: 800, withoutEnlargement: true }); 348 - } 356 + // For animations (GIF/WebP), we can only do so much without losing frames 357 + // Try to convert to WebP if it's a GIF, or optimize WebP 358 + image = sharp(buffer, { animated: true }); 359 + if (isGif) { 360 + // Convert GIF to WebP for better compression 361 + image = image.webp({ quality: Math.max(quality, 50), effort: 6 }); 362 + finalMimeType = 'image/webp'; 363 + } else { 364 + image = image.webp({ quality: Math.max(quality, 50), effort: 6 }); 365 + } 366 + // Resize if really big 367 + if (metadata.width && metadata.width > 800) { 368 + image = image.resize({ width: 800, withoutEnlargement: true }); 369 + } 349 370 } else { 350 - // Static images 351 - if (width > 1600) width = 1600; 352 - else if (attempts > 1) width = Math.floor(width * 0.8); 353 - 354 - quality = Math.max(50, quality - 10); 355 - 356 - image = sharp(buffer) 357 - .resize({ width, withoutEnlargement: true }) 358 - .jpeg({ quality, mozjpeg: true }); 359 - 360 - finalMimeType = 'image/jpeg'; 371 + // Static images 372 + if (width > 1600) width = 1600; 373 + else if (attempts > 1) width = Math.floor(width * 0.8); 374 + 375 + quality = Math.max(50, quality - 10); 376 + 377 + image = sharp(buffer).resize({ width, withoutEnlargement: true }).jpeg({ quality, mozjpeg: true }); 378 + 379 + finalMimeType = 'image/jpeg'; 361 380 } 362 - 381 + 363 382 currentBuffer = await image.toBuffer(); 364 383 if (currentBuffer.length <= MAX_SIZE) { 365 - finalBuffer = currentBuffer; 366 - console.log(`[UPLOAD] ✅ Optimized to ${(finalBuffer.length / 1024).toFixed(2)} KB`); 367 - break; 384 + finalBuffer = currentBuffer; 385 + console.log(`[UPLOAD] ✅ Optimized to ${(finalBuffer.length / 1024).toFixed(2)} KB`); 386 + break; 368 387 } 369 388 } 370 - 389 + 371 390 if (finalBuffer.length > MAX_SIZE) { 372 - console.warn(`[UPLOAD] ⚠️ Could not compress below limit. Current: ${(finalBuffer.length / 1024).toFixed(2)} KB. Upload might fail.`); 391 + console.warn( 392 + `[UPLOAD] ⚠️ Could not compress below limit. Current: ${(finalBuffer.length / 1024).toFixed(2)} KB. Upload might fail.`, 393 + ); 373 394 } 374 - 375 395 } catch (err) { 376 396 console.warn(`[UPLOAD] ⚠️ Optimization failed, attempting original upload:`, (err as Error).message); 377 397 finalBuffer = buffer; ··· 393 413 'C:\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe', 394 414 ]; 395 415 396 - const executablePath = browserPaths.find(p => fs.existsSync(p)); 397 - 416 + const executablePath = browserPaths.find((p) => fs.existsSync(p)); 417 + 398 418 if (!executablePath) { 399 419 console.warn(`[SCREENSHOT] ⏩ Skipping screenshot (no Chrome/Chromium found at common paths).`); 400 420 return null; ··· 438 458 `; 439 459 440 460 await page.setContent(html, { waitUntil: 'networkidle0' }); 441 - 461 + 442 462 // Wait for the twitter iframe to load and render 443 463 try { 444 464 await page.waitForSelector('iframe', { timeout: 10000 }); 445 465 // Small extra wait for images inside iframe 446 - await new Promise(r => setTimeout(r, 2000)); 466 + await new Promise((r) => setTimeout(r, 2000)); 447 467 } catch (e) { 448 468 console.warn(`[SCREENSHOT] ⚠️ Timeout waiting for tweet iframe, taking screenshot anyway.`); 449 469 } ··· 469 489 470 490 while (!blob) { 471 491 attempts++; 472 - const statusUrl = new URL("https://video.bsky.app/xrpc/app.bsky.video.getJobStatus"); 473 - statusUrl.searchParams.append("jobId", jobId); 492 + const statusUrl = new URL('https://video.bsky.app/xrpc/app.bsky.video.getJobStatus'); 493 + statusUrl.searchParams.append('jobId', jobId); 474 494 475 495 const statusResponse = await fetch(statusUrl); 476 496 if (!statusResponse.ok) { ··· 488 508 if (statusData.jobStatus.blob) { 489 509 blob = statusData.jobStatus.blob; 490 510 console.log(`[VIDEO] 🎉 Video processing complete! Blob ref obtained.`); 491 - } else if (state === "JOB_STATE_FAILED") { 492 - throw new Error(`Video processing failed: ${statusData.jobStatus.error || "Unknown error"}`); 511 + } else if (state === 'JOB_STATE_FAILED') { 512 + throw new Error(`Video processing failed: ${statusData.jobStatus.error || 'Unknown error'}`); 493 513 } else { 494 514 // Wait before next poll 495 515 await new Promise((resolve) => setTimeout(resolve, 5000)); ··· 497 517 498 518 if (attempts > 60) { 499 519 // ~5 minute timeout 500 - throw new Error("Video processing timed out after 5 minutes."); 520 + throw new Error('Video processing timed out after 5 minutes.'); 501 521 } 502 522 } 503 523 return blob!; ··· 507 527 try { 508 528 const response = await axios.get(url, { 509 529 headers: { 510 - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', 511 - 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8', 530 + 'User-Agent': 531 + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', 532 + Accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8', 512 533 'Accept-Language': 'en-US,en;q=0.9', 513 534 }, 514 535 timeout: 10000, 515 536 }); 516 - 537 + 517 538 const $ = cheerio.load(response.data); 518 539 const title = $('meta[property="og:title"]').attr('content') || $('title').text() || ''; 519 - const description = $('meta[property="og:description"]').attr('content') || $('meta[name="description"]').attr('content') || ''; 540 + const description = 541 + $('meta[property="og:description"]').attr('content') || $('meta[name="description"]').attr('content') || ''; 520 542 let thumbBlob: BlobRef | undefined; 521 543 522 544 let imageUrl = $('meta[property="og:image"]').attr('content'); 523 545 if (imageUrl) { 524 - if (!imageUrl.startsWith('http')) { 525 - const baseUrl = new URL(url); 526 - imageUrl = new URL(imageUrl, baseUrl.origin).toString(); 527 - } 528 - try { 529 - const { buffer, mimeType } = await downloadMedia(imageUrl); 530 - thumbBlob = await uploadToBluesky(agent, buffer, mimeType); 531 - } catch (e) { 532 - console.warn(`Failed to upload thumbnail for ${url}:`, e); 533 - } 546 + if (!imageUrl.startsWith('http')) { 547 + const baseUrl = new URL(url); 548 + imageUrl = new URL(imageUrl, baseUrl.origin).toString(); 549 + } 550 + try { 551 + const { buffer, mimeType } = await downloadMedia(imageUrl); 552 + thumbBlob = await uploadToBluesky(agent, buffer, mimeType); 553 + } catch (e) { 554 + console.warn(`Failed to upload thumbnail for ${url}:`, e); 555 + } 534 556 } 535 557 536 558 if (!title && !description) return null; 537 559 538 560 const external: any = { 539 - uri: url, 540 - title: title || url, 541 - description: description, 561 + uri: url, 562 + title: title || url, 563 + description: description, 542 564 }; 543 565 544 566 if (thumbBlob) { 545 - external.thumb = thumbBlob; 567 + external.thumb = thumbBlob; 546 568 } 547 569 548 570 return { 549 - $type: 'app.bsky.embed.external', 550 - external, 571 + $type: 'app.bsky.embed.external', 572 + external, 551 573 }; 552 - 553 574 } catch (err) { 554 575 console.warn(`Failed to fetch embed card for ${url}:`, err); 555 576 return null; ··· 557 578 } 558 579 559 580 async function uploadVideoToBluesky(agent: BskyAgent, buffer: Buffer, filename: string): Promise<BlobRef> { 560 - const sanitizedFilename = filename.split("?")[0] || "video.mp4"; 581 + const sanitizedFilename = filename.split('?')[0] || 'video.mp4'; 561 582 console.log( 562 583 `[VIDEO] 🟢 Starting upload process for ${sanitizedFilename} (${(buffer.length / 1024 / 1024).toFixed(2)} MB)`, 563 584 ); ··· 570 591 571 592 // didDoc might be present in repoDesc 572 593 const pdsService = (repoDesc as any).didDoc?.service?.find( 573 - (s: any) => s.id === "#atproto_pds" || s.type === "AtProtoPds", 594 + (s: any) => s.id === '#atproto_pds' || s.type === 'AtProtoPds', 574 595 ); 575 596 const pdsUrl = pdsService?.serviceEndpoint; 576 - const pdsHost = pdsUrl ? new URL(pdsUrl).host : "bsky.social"; 597 + const pdsHost = pdsUrl ? new URL(pdsUrl).host : 'bsky.social'; 577 598 578 599 console.log(`[VIDEO] 🌐 PDS Host detected: ${pdsHost}`); 579 600 console.log(`[VIDEO] 🔑 Requesting service auth token for audience: did:web:${pdsHost}...`); 580 601 581 602 const { data: serviceAuth } = await agent.com.atproto.server.getServiceAuth({ 582 603 aud: `did:web:${pdsHost}`, 583 - lxm: "com.atproto.repo.uploadBlob", 604 + lxm: 'com.atproto.repo.uploadBlob', 584 605 exp: Math.floor(Date.now() / 1000) + 60 * 30, 585 606 }); 586 607 console.log(`[VIDEO] ✅ Service auth token obtained.`); ··· 588 609 const token = serviceAuth.token; 589 610 590 611 // 2. Upload to Video Service 591 - const uploadUrl = new URL("https://video.bsky.app/xrpc/app.bsky.video.uploadVideo"); 592 - uploadUrl.searchParams.append("did", agent.session!.did!); 593 - uploadUrl.searchParams.append("name", sanitizedFilename); 612 + const uploadUrl = new URL('https://video.bsky.app/xrpc/app.bsky.video.uploadVideo'); 613 + uploadUrl.searchParams.append('did', agent.session!.did!); 614 + uploadUrl.searchParams.append('name', sanitizedFilename); 594 615 595 616 console.log(`[VIDEO] 📤 Uploading to ${uploadUrl.href}...`); 596 617 const uploadResponse = await fetch(uploadUrl, { 597 - method: "POST", 618 + method: 'POST', 598 619 headers: { 599 620 Authorization: `Bearer ${token}`, 600 - "Content-Type": "video/mp4", 621 + 'Content-Type': 'video/mp4', 601 622 }, 602 623 body: buffer, 603 624 }); ··· 608 629 609 630 try { 610 631 const errorJson = JSON.parse(errorText); 611 - if (errorJson.error === "already_exists" && errorJson.jobId) { 632 + if (errorJson.error === 'already_exists' && errorJson.jobId) { 612 633 console.log(`[VIDEO] ♻️ Video already exists. Resuming with Job ID: ${errorJson.jobId}`); 613 634 return await pollForVideoProcessing(agent, errorJson.jobId); 614 635 } 615 - if (errorJson.error === "unconfirmed_email" || (errorJson.jobStatus && errorJson.jobStatus.error === "unconfirmed_email")) { 616 - console.error(`[VIDEO] 🛑 BLUESKY ERROR: Your email is unconfirmed. You MUST verify your email on Bluesky to upload videos.`); 617 - throw new Error("Bluesky Email Unconfirmed - Video Upload Rejected"); 636 + if ( 637 + errorJson.error === 'unconfirmed_email' || 638 + (errorJson.jobStatus && errorJson.jobStatus.error === 'unconfirmed_email') 639 + ) { 640 + console.error( 641 + `[VIDEO] 🛑 BLUESKY ERROR: Your email is unconfirmed. You MUST verify your email on Bluesky to upload videos.`, 642 + ); 643 + throw new Error('Bluesky Email Unconfirmed - Video Upload Rejected'); 618 644 } 619 645 } catch (e) { 620 646 // Not JSON or missing fields, proceed with throwing ··· 732 758 tweets: Tweet[], 733 759 dryRun = false, 734 760 ): Promise<void> { 761 + // Filter tweets to ensure they're actually from this user 762 + const filteredTweets = tweets.filter((t) => { 763 + const authorScreenName = t.user?.screen_name?.toLowerCase(); 764 + if (authorScreenName && authorScreenName !== twitterUsername.toLowerCase()) { 765 + console.log( 766 + `[${twitterUsername}] ⏩ Skipping tweet ${t.id_str || t.id} - author is @${t.user?.screen_name}, not @${twitterUsername}`, 767 + ); 768 + return false; 769 + } 770 + return true; 771 + }); 772 + 735 773 const processedTweets = loadProcessedTweets(bskyIdentifier); 736 - const toProcess = tweets.filter(t => !processedTweets[t.id_str || t.id || '']); 737 - 774 + const toProcess = filteredTweets.filter((t) => !processedTweets[t.id_str || t.id || '']); 775 + 738 776 if (toProcess.length === 0) { 739 777 console.log(`[${twitterUsername}] ✅ No new tweets to process for ${bskyIdentifier}.`); 740 778 return; 741 779 } 742 780 743 781 console.log(`[${twitterUsername}] 🚀 Processing ${toProcess.length} new tweets for ${bskyIdentifier}...`); 744 - 745 - tweets.reverse(); 782 + 783 + filteredTweets.reverse(); 746 784 let count = 0; 747 - for (const tweet of tweets) { 785 + for (const tweet of filteredTweets) { 748 786 count++; 749 787 const tweetId = tweet.id_str || tweet.id; 750 788 if (!tweetId) continue; ··· 756 794 state: 'processing', 757 795 currentAccount: twitterUsername, 758 796 processedCount: count, 759 - totalCount: tweets.length, 797 + totalCount: filteredTweets.length, 760 798 message: `Processing tweet ${tweetId}`, 761 799 }); 762 800 ··· 791 829 .replace(/&gt;/g, '>') 792 830 .replace(/&quot;/g, '"') 793 831 .replace(/&#39;/g, "'"); 794 - 832 + 795 833 // 1. Link Expansion 796 834 console.log(`[${twitterUsername}] 🔗 Expanding links...`); 797 835 const urls = tweet.entities?.urls || []; ··· 806 844 const matches = text.match(tcoRegex) || []; 807 845 for (const tco of matches) { 808 846 // Avoid re-resolving if we already handled it via entities 809 - if (urls.some(u => u.url === tco)) continue; 847 + if (urls.some((u) => u.url === tco)) continue; 810 848 811 849 console.log(`[${twitterUsername}] 🔍 Resolving fallback link: ${tco}`); 812 850 const resolved = await expandUrl(tco); 813 851 if (resolved !== tco) { 814 - text = text.replace(tco, resolved); 815 - // Add to urls array so it can be used for card embedding later 816 - urls.push({ url: tco, expanded_url: resolved }); 852 + text = text.replace(tco, resolved); 853 + // Add to urls array so it can be used for card embedding later 854 + urls.push({ url: tco, expanded_url: resolved }); 817 855 } 818 856 } 819 857 ··· 831 869 mediaLinksToRemove.push(media.url); 832 870 if (media.expanded_url) mediaLinksToRemove.push(media.expanded_url); 833 871 } 834 - 872 + 835 873 let aspectRatio: AspectRatio | undefined; 836 874 if (media.sizes?.large) { 837 875 aspectRatio = { width: media.sizes.large.w, height: media.sizes.large.h }; ··· 850 888 console.log(`[${twitterUsername}] 📤 Uploading image to Bluesky...`); 851 889 updateAppStatus({ message: `Uploading image to Bluesky...` }); 852 890 const blob = await uploadToBluesky(agent, buffer, mimeType); 853 - 891 + 854 892 let altText = media.ext_alt_text; 855 893 if (!altText) { 856 - console.log(`[${twitterUsername}] 🤖 Generating alt text via Gemini...`); 857 - // Use original tweet text for context, not the modified/cleaned one 858 - altText = await generateAltText(buffer, mimeType, tweetText); 859 - if (altText) console.log(`[${twitterUsername}] ✅ Alt text generated: ${altText.substring(0, 50)}...`); 894 + console.log(`[${twitterUsername}] 🤖 Generating alt text via Gemini...`); 895 + // Use original tweet text for context, not the modified/cleaned one 896 + altText = await generateAltText(buffer, mimeType, tweetText); 897 + if (altText) console.log(`[${twitterUsername}] ✅ Alt text generated: ${altText.substring(0, 50)}...`); 860 898 } 861 899 862 900 images.push({ alt: altText || 'Image from Twitter', image: blob, aspectRatio }); ··· 877 915 } else if (media.type === 'video' || media.type === 'animated_gif') { 878 916 const variants = media.video_info?.variants || []; 879 917 const duration = media.video_info?.duration_millis || 0; 880 - 881 - if (duration > 180000) { // 3 minutes 882 - console.warn(`[${twitterUsername}] ⚠️ Video too long (${(duration / 1000).toFixed(1)}s). Fallback to link.`); 883 - const tweetUrl = `https://twitter.com/${twitterUsername}/status/${tweetId}`; 884 - if (!text.includes(tweetUrl)) text += `\n\nVideo: ${tweetUrl}`; 885 - continue; 918 + 919 + if (duration > 180000) { 920 + // 3 minutes 921 + console.warn(`[${twitterUsername}] ⚠️ Video too long (${(duration / 1000).toFixed(1)}s). Fallback to link.`); 922 + const tweetUrl = `https://twitter.com/${twitterUsername}/status/${tweetId}`; 923 + if (!text.includes(tweetUrl)) text += `\n\nVideo: ${tweetUrl}`; 924 + continue; 886 925 } 887 926 888 927 const mp4s = variants ··· 897 936 console.log(`[${twitterUsername}] 📥 Downloading video: ${videoUrl}`); 898 937 updateAppStatus({ message: `Downloading video: ${path.basename(videoUrl)}` }); 899 938 const { buffer, mimeType } = await downloadMedia(videoUrl); 900 - 939 + 901 940 if (buffer.length <= 90 * 1024 * 1024) { 902 941 const filename = videoUrl.split('/').pop() || 'video.mp4'; 903 942 updateAppStatus({ message: `Uploading video to Bluesky...` }); ··· 906 945 console.log(`[${twitterUsername}] ✅ Video upload process complete.`); 907 946 break; // Prioritize first video 908 947 } 909 - 910 - console.warn(`[${twitterUsername}] ⚠️ Video too large (${(buffer.length / 1024 / 1024).toFixed(2)}MB). Fallback to link.`); 948 + 949 + console.warn( 950 + `[${twitterUsername}] ⚠️ Video too large (${(buffer.length / 1024 / 1024).toFixed(2)}MB). Fallback to link.`, 951 + ); 911 952 const tweetUrl = `https://twitter.com/${twitterUsername}/status/${tweetId}`; 912 953 if (!text.includes(tweetUrl)) text += `\n\nVideo: ${tweetUrl}`; 913 954 } catch (err) { ··· 938 979 } else { 939 980 const quoteUrlEntity = urls.find((u) => u.expanded_url?.includes(quoteId)); 940 981 const qUrl = quoteUrlEntity?.expanded_url || `https://twitter.com/i/status/${quoteId}`; 941 - 982 + 942 983 // Check if it's a self-quote (same user) 943 - const isSelfQuote = qUrl.toLowerCase().includes(`twitter.com/${twitterUsername.toLowerCase()}/`) || 944 - qUrl.toLowerCase().includes(`x.com/${twitterUsername.toLowerCase()}/`); 945 - 984 + const isSelfQuote = 985 + qUrl.toLowerCase().includes(`twitter.com/${twitterUsername.toLowerCase()}/`) || 986 + qUrl.toLowerCase().includes(`x.com/${twitterUsername.toLowerCase()}/`); 987 + 946 988 if (!isSelfQuote) { 947 989 externalQuoteUrl = qUrl; 948 990 console.log(`[${twitterUsername}] 🔗 Quoted tweet is external: ${externalQuoteUrl}`); 949 - 991 + 950 992 // Try to capture screenshot for external QTs if we have space for images 951 993 if (images.length < 4 && !videoBlob) { 952 994 const ssBuffer = await captureTweetScreenshot(externalQuoteUrl); ··· 964 1006 } 965 1007 } 966 1008 } else if (images.length === 0 && !videoBlob) { 967 - // If no media and no quote, check for external links to embed 968 - // We prioritize the LAST link found as it's often the main content 969 - const potentialLinks = urls 970 - .map(u => u.expanded_url) 971 - .filter(u => u && !u.includes('twitter.com') && !u.includes('x.com')) as string[]; 972 - 973 - if (potentialLinks.length > 0) { 974 - const linkToEmbed = potentialLinks[potentialLinks.length - 1]; 975 - if (linkToEmbed) { 976 - console.log(`[${twitterUsername}] 🃏 Fetching link card for: ${linkToEmbed}`); 977 - linkCard = await fetchEmbedUrlCard(agent, linkToEmbed); 978 - } 1009 + // If no media and no quote, check for external links to embed 1010 + // We prioritize the LAST link found as it's often the main content 1011 + const potentialLinks = urls 1012 + .map((u) => u.expanded_url) 1013 + .filter((u) => u && !u.includes('twitter.com') && !u.includes('x.com')) as string[]; 1014 + 1015 + if (potentialLinks.length > 0) { 1016 + const linkToEmbed = potentialLinks[potentialLinks.length - 1]; 1017 + if (linkToEmbed) { 1018 + console.log(`[${twitterUsername}] 🃏 Fetching link card for: ${linkToEmbed}`); 1019 + linkCard = await fetchEmbedUrlCard(agent, linkToEmbed); 979 1020 } 1021 + } 980 1022 } 981 1023 982 1024 // Only append link for external quotes IF we couldn't natively embed it OR screenshot it 983 - const hasScreenshot = images.some(img => img.alt.startsWith('Quote Tweet:')); 1025 + const hasScreenshot = images.some((img) => img.alt.startsWith('Quote Tweet:')); 984 1026 if (externalQuoteUrl && !quoteEmbed && !hasScreenshot && !text.includes(externalQuoteUrl)) { 985 1027 text += `\n\nQT: ${externalQuoteUrl}`; 986 1028 } ··· 988 1030 // 4. Threading and Posting 989 1031 const chunks = splitText(text); 990 1032 console.log(`[${twitterUsername}] 📝 Splitting text into ${chunks.length} chunks.`); 991 - 1033 + 992 1034 let lastPostInfo: ProcessedTweetEntry | null = replyParentInfo; 993 1035 994 1036 for (let i = 0; i < chunks.length; i++) { 995 1037 const chunk = chunks[i] as string; 996 1038 console.log(`[${twitterUsername}] 📤 Posting chunk ${i + 1}/${chunks.length}...`); 997 1039 updateAppStatus({ message: `Posting chunk ${i + 1}/${chunks.length}...` }); 998 - 1040 + 999 1041 const rt = new RichText({ text: chunk }); 1000 1042 await rt.detectFacets(agent); 1001 1043 const detectedLangs = detectLanguage(chunk); ··· 1048 1090 } catch (err: any) { 1049 1091 retries--; 1050 1092 if (retries === 0) throw err; 1051 - console.warn(`[${twitterUsername}] ⚠️ Post failed (Socket/Network), retrying in 5s... (${retries} retries left)`); 1052 - await new Promise(r => setTimeout(r, 5000)); 1093 + console.warn( 1094 + `[${twitterUsername}] ⚠️ Post failed (Socket/Network), retrying in 5s... (${retries} retries left)`, 1095 + ); 1096 + await new Promise((r) => setTimeout(r, 5000)); 1053 1097 } 1054 1098 } 1055 - 1099 + 1056 1100 const currentPostInfo = { 1057 1101 uri: response.uri, 1058 1102 cid: response.cid, ··· 1062 1106 if (i === 0) { 1063 1107 saveProcessedTweet(twitterUsername, bskyIdentifier, tweetId, currentPostInfo); 1064 1108 } 1065 - 1109 + 1066 1110 lastPostInfo = currentPostInfo; 1067 1111 console.log(`[${twitterUsername}] ✅ Chunk ${i + 1} posted successfully.`); 1068 - 1112 + 1069 1113 if (chunks.length > 1) { 1070 1114 await new Promise((r) => setTimeout(r, 3000)); 1071 1115 } ··· 1074 1118 break; 1075 1119 } 1076 1120 } 1077 - 1121 + 1078 1122 const wait = 10000; 1079 1123 console.log(`[${twitterUsername}] 😴 Pacing: Waiting ${wait / 1000}s before next tweet.`); 1080 1124 updateAppStatus({ state: 'pacing', message: `Pacing: Waiting ${wait / 1000}s...` }); ··· 1105 1149 } 1106 1150 } 1107 1151 1108 - 1109 - 1110 - async function importHistory(twitterUsername: string, bskyIdentifier: string, limit = 15, dryRun = false, ignoreCancellation = false): Promise<void> { 1152 + async function importHistory( 1153 + twitterUsername: string, 1154 + bskyIdentifier: string, 1155 + limit = 15, 1156 + dryRun = false, 1157 + ignoreCancellation = false, 1158 + ): Promise<void> { 1111 1159 const config = getConfig(); 1112 - const mapping = config.mappings.find((m) => m.twitterUsernames.map(u => u.toLowerCase()).includes(twitterUsername.toLowerCase())); 1160 + const mapping = config.mappings.find((m) => 1161 + m.twitterUsernames.map((u) => u.toLowerCase()).includes(twitterUsername.toLowerCase()), 1162 + ); 1113 1163 if (!mapping) { 1114 1164 console.error(`No mapping found for twitter username: ${twitterUsername}`); 1115 1165 return; ··· 1129 1179 while (true) { 1130 1180 // Check if this backfill request was cancelled (unless ignoring check) 1131 1181 if (!ignoreCancellation) { 1132 - const stillPending = getPendingBackfills().some(b => b.id === mapping.id); 1182 + const stillPending = getPendingBackfills().some((b) => b.id === mapping.id); 1133 1183 if (!stillPending) { 1134 1184 console.log(`[${twitterUsername}] 🛑 Backfill cancelled by user.`); 1135 1185 return; ··· 1149 1199 for (const t of result.tweets) { 1150 1200 const tid = t.id_str || t.id; 1151 1201 if (!tid) continue; 1202 + 1203 + // Validate tweet author 1204 + const authorScreenName = t.user?.screen_name?.toLowerCase(); 1205 + if (authorScreenName && authorScreenName !== twitterUsername.toLowerCase()) { 1206 + console.log(`[${twitterUsername}] ⏩ Skipping tweet ${tid} - author is @${t.user?.screen_name}`); 1207 + continue; 1208 + } 1209 + 1152 1210 if (!processedTweets[tid] && !seenIds.has(tid)) { 1153 1211 allFoundTweets.push(t); 1154 1212 seenIds.add(tid); ··· 1183 1241 const activeTasks = new Map<string, Promise<void>>(); 1184 1242 1185 1243 async function runAccountTask(mapping: AccountMapping, forceBackfill = false, dryRun = false) { 1186 - if (activeTasks.has(mapping.id)) return; // Already running 1244 + if (activeTasks.has(mapping.id)) return; // Already running 1245 + 1246 + const task = (async () => { 1247 + try { 1248 + const agent = await getAgent(mapping); 1249 + if (!agent) return; 1187 1250 1188 - const task = (async () => { 1189 - try { 1190 - const agent = await getAgent(mapping); 1191 - if (!agent) return; 1251 + const backfillReq = getPendingBackfills().find((b) => b.id === mapping.id); 1192 1252 1193 - const backfillReq = getPendingBackfills().find(b => b.id === mapping.id); 1194 - 1195 - if (forceBackfill || backfillReq) { 1196 - const limit = backfillReq?.limit || 15; 1197 - console.log(`[${mapping.bskyIdentifier}] Running backfill for ${mapping.twitterUsernames.length} accounts (limit ${limit})...`); 1198 - 1199 - for (const twitterUsername of mapping.twitterUsernames) { 1200 - try { 1201 - updateAppStatus({ state: 'backfilling', currentAccount: twitterUsername, message: `Starting backfill (limit ${limit})...` }); 1202 - await importHistory(twitterUsername, mapping.bskyIdentifier, limit, dryRun); 1203 - } catch (err) { 1204 - console.error(`❌ Error backfilling ${twitterUsername}:`, err); 1205 - } 1206 - } 1207 - clearBackfill(mapping.id); 1208 - console.log(`[${mapping.bskyIdentifier}] Backfill complete.`); 1209 - } else { 1210 - for (const twitterUsername of mapping.twitterUsernames) { 1211 - try { 1212 - updateAppStatus({ state: 'checking', currentAccount: twitterUsername, message: 'Fetching latest tweets...' }); 1213 - const result = await safeSearch(`from:${twitterUsername}`, 30); 1214 - if (!result.success || !result.tweets) continue; 1215 - await processTweets(agent, twitterUsername, mapping.bskyIdentifier, result.tweets, dryRun); 1216 - } catch (err) { 1217 - console.error(`❌ Error checking ${twitterUsername}:`, err); 1218 - } 1219 - } 1220 - } 1221 - } catch (err) { 1222 - console.error(`Error processing mapping ${mapping.bskyIdentifier}:`, err); 1223 - } finally { 1224 - activeTasks.delete(mapping.id); 1253 + if (forceBackfill || backfillReq) { 1254 + const limit = backfillReq?.limit || 15; 1255 + console.log( 1256 + `[${mapping.bskyIdentifier}] Running backfill for ${mapping.twitterUsernames.length} accounts (limit ${limit})...`, 1257 + ); 1258 + 1259 + for (const twitterUsername of mapping.twitterUsernames) { 1260 + try { 1261 + updateAppStatus({ 1262 + state: 'backfilling', 1263 + currentAccount: twitterUsername, 1264 + message: `Starting backfill (limit ${limit})...`, 1265 + }); 1266 + await importHistory(twitterUsername, mapping.bskyIdentifier, limit, dryRun); 1267 + } catch (err) { 1268 + console.error(`❌ Error backfilling ${twitterUsername}:`, err); 1269 + } 1225 1270 } 1226 - })(); 1271 + clearBackfill(mapping.id); 1272 + console.log(`[${mapping.bskyIdentifier}] Backfill complete.`); 1273 + } else { 1274 + for (const twitterUsername of mapping.twitterUsernames) { 1275 + try { 1276 + updateAppStatus({ 1277 + state: 'checking', 1278 + currentAccount: twitterUsername, 1279 + message: 'Fetching latest tweets...', 1280 + }); 1281 + const result = await safeSearch(`from:${twitterUsername}`, 30); 1282 + if (!result.success || !result.tweets) continue; 1283 + await processTweets(agent, twitterUsername, mapping.bskyIdentifier, result.tweets, dryRun); 1284 + } catch (err) { 1285 + console.error(`❌ Error checking ${twitterUsername}:`, err); 1286 + } 1287 + } 1288 + } 1289 + } catch (err) { 1290 + console.error(`Error processing mapping ${mapping.bskyIdentifier}:`, err); 1291 + } finally { 1292 + activeTasks.delete(mapping.id); 1293 + } 1294 + })(); 1227 1295 1228 - activeTasks.set(mapping.id, task); 1296 + activeTasks.set(mapping.id, task); 1229 1297 } 1230 1298 1299 + import type { AccountMapping } from './config-manager.js'; 1231 1300 import { 1232 - startServer, 1233 - updateLastCheckTime, 1234 - getPendingBackfills, 1235 1301 clearBackfill, 1236 1302 getNextCheckTime, 1303 + getPendingBackfills, 1304 + startServer, 1237 1305 updateAppStatus, 1306 + updateLastCheckTime, 1238 1307 } from './server.js'; 1239 - import { AccountMapping } from './config-manager.js'; 1240 1308 1241 1309 async function main(): Promise<void> { 1242 1310 const program = new Command(); ··· 1277 1345 console.error('Twitter credentials not set. Cannot import history.'); 1278 1346 process.exit(1); 1279 1347 } 1280 - const mapping = config.mappings.find(m => m.twitterUsernames.map(u => u.toLowerCase()).includes(options.username.toLowerCase())); 1348 + const mapping = config.mappings.find((m) => 1349 + m.twitterUsernames.map((u) => u.toLowerCase()).includes(options.username.toLowerCase()), 1350 + ); 1281 1351 if (!mapping) { 1282 1352 console.error(`No mapping found for ${options.username}`); 1283 1353 process.exit(1); ··· 1299 1369 const now = Date.now(); 1300 1370 const config = getConfig(); // Reload config to get new mappings/settings 1301 1371 const nextTime = getNextCheckTime(); 1302 - 1372 + 1303 1373 // Check if it's time for a scheduled run OR if we have pending backfills 1304 1374 const isScheduledRun = now >= nextTime; 1305 1375 const pendingBackfills = getPendingBackfills(); 1306 - 1376 + 1307 1377 if (isScheduledRun) { 1308 - console.log(`[${new Date().toISOString()}] ⏰ Scheduled check triggered.`); 1309 - updateLastCheckTime(); 1378 + console.log(`[${new Date().toISOString()}] ⏰ Scheduled check triggered.`); 1379 + updateLastCheckTime(); 1310 1380 } 1311 1381 1312 1382 for (const mapping of config.mappings) { 1313 - if (!mapping.enabled) continue; 1314 - 1315 - const hasPendingBackfill = pendingBackfills.some(b => b.id === mapping.id); 1316 - 1317 - // Run if scheduled OR backfill requested 1318 - if (isScheduledRun || hasPendingBackfill) { 1319 - runAccountTask(mapping, hasPendingBackfill, options.dryRun); 1320 - } 1383 + if (!mapping.enabled) continue; 1384 + 1385 + const hasPendingBackfill = pendingBackfills.some((b) => b.id === mapping.id); 1386 + 1387 + // Run if scheduled OR backfill requested 1388 + if (isScheduledRun || hasPendingBackfill) { 1389 + runAccountTask(mapping, hasPendingBackfill, options.dryRun); 1390 + } 1321 1391 } 1322 - 1392 + 1323 1393 // Sleep for 5 seconds 1324 - await new Promise(resolve => setTimeout(resolve, 5000)); 1394 + await new Promise((resolve) => setTimeout(resolve, 5000)); 1325 1395 } 1326 1396 } 1327 1397
+14 -11
src/run-tests.js
··· 21 21 const url1 = 'https://pbs.twimg.com/media/ABC123.jpg'; 22 22 const highQuality1 = url1.includes('?') ? url1.replace('?', ':orig?') : url1 + ':orig'; 23 23 assert(highQuality1 === 'https://pbs.twimg.com/media/ABC123.jpg:orig', 'Should append :orig to plain URLs'); 24 - 24 + 25 25 const url2 = 'https://pbs.twimg.com/media/ABC123.jpg?format=jpg&name=small'; 26 26 const highQuality2 = url2.includes('?') ? url2.replace('?', ':orig?') : url2 + ':orig'; 27 - assert(highQuality2 === 'https://pbs.twimg.com/media/ABC123.jpg:orig?format=jpg&name=small', 'Should replace ? with :orig? for query URLs'); 28 - 27 + assert( 28 + highQuality2 === 'https://pbs.twimg.com/media/ABC123.jpg:orig?format=jpg&name=small', 29 + 'Should replace ? with :orig? for query URLs', 30 + ); 31 + 29 32 const url3 = 'https://pbs.twimg.com/media/DEF456.png?name=large'; 30 33 const highQuality3 = url3.includes('?') ? url3.replace('?', ':orig?') : url3 + ':orig'; 31 34 assert(highQuality3 === 'https://pbs.twimg.com/media/DEF456.png:orig?name=large', 'Should work with PNGs too'); ··· 70 73 const text2 = 'First paragraph.\n\nSecond paragraph.\n\nThird paragraph.'; 71 74 const result2 = splitText(text2, 50); 72 75 assert(result2.length >= 2, `Should split at paragraph breaks (got ${result2.length} chunks)`); 73 - const allHaveContent = result2.every(c => c.length > 0); 76 + const allHaveContent = result2.every((c) => c.length > 0); 74 77 assert(allHaveContent, 'All chunks have content'); 75 78 console.log(); 76 79 } ··· 102 105 const sizes = { 103 106 large: { w: 1200, h: 800 }, 104 107 medium: { w: 600, h: 400 }, 105 - small: { w: 300, h: 200 } 108 + small: { w: 300, h: 200 }, 106 109 }; 107 110 108 111 const getAspectRatio = (mediaSizes, originalInfo) => { ··· 132 135 { content_type: 'video/mp4', url: 'low.mp4', bitrate: 500000 }, 133 136 { content_type: 'video/mp4', url: 'high.mp4', bitrate: 2000000 }, 134 137 { content_type: 'video/mp4', url: 'medium.mp4', bitrate: 1000000 }, 135 - { content_type: 'audio/mp4', url: 'audio.mp4', bitrate: 128000 } 138 + { content_type: 'audio/mp4', url: 'audio.mp4', bitrate: 128000 }, 136 139 ]; 137 140 138 141 const mp4s = variants ··· 196 199 const downloadWithRetry = async (url) => { 197 200 callCount++; 198 201 const isHighQuality = url.includes(':orig'); 199 - 202 + 200 203 if (isHighQuality && callCount === 1) { 201 204 throw new Error('High quality download failed'); 202 205 } 203 - 206 + 204 207 if (isHighQuality && callCount === 2) { 205 208 const fallbackUrl = url.replace(':orig?', '?'); 206 209 return { buffer: Buffer.from('standard quality'), mimeType: 'image/jpeg' }; 207 210 } 208 - 211 + 209 212 return { buffer: Buffer.from('standard quality'), mimeType: 'image/jpeg' }; 210 213 }; 211 214 ··· 228 231 } 229 232 }; 230 233 231 - runRetryTests().catch(err => { 234 + runRetryTests().catch((err) => { 232 235 console.log(` ✗ Retry test error: ${err.message}`); 233 236 testsFailed++; 234 237 }); ··· 242 245 jpeg: { quality: 92, mozjpeg: true }, 243 246 jpegFallback: { quality: 85, mozjpeg: true }, 244 247 png: { compressionLevel: 9, adaptiveFiltering: true }, 245 - webp: { quality: 90, effort: 6 } 248 + webp: { quality: 90, effort: 6 }, 246 249 }; 247 250 248 251 assert(settings.jpeg.quality === 92, 'JPEG quality is 92%');
+39 -34
src/server.ts
··· 1 - import fs from 'node:fs'; 2 1 import path from 'node:path'; 3 2 import { fileURLToPath } from 'node:url'; 4 3 import bcrypt from 'bcryptjs'; ··· 35 34 36 35 let currentAppStatus: AppStatus = { 37 36 state: 'idle', 38 - lastUpdate: Date.now() 37 + lastUpdate: Date.now(), 39 38 }; 40 39 41 40 app.use(cors()); ··· 119 118 if (Array.isArray(twitterUsernames)) { 120 119 usernames = twitterUsernames; 121 120 } else if (typeof twitterUsernames === 'string') { 122 - usernames = twitterUsernames.split(',').map(u => u.trim()).filter(u => u.length > 0); 121 + usernames = twitterUsernames 122 + .split(',') 123 + .map((u) => u.trim()) 124 + .filter((u) => u.length > 0); 123 125 } 124 126 125 127 const newMapping = { ··· 141 143 const { id } = req.params; 142 144 const { twitterUsernames, bskyIdentifier, bskyPassword, bskyServiceUrl, owner } = req.body; 143 145 const config = getConfig(); 144 - 146 + 145 147 const index = config.mappings.findIndex((m) => m.id === id); 146 148 const existingMapping = config.mappings[index]; 147 - 149 + 148 150 if (index === -1 || !existingMapping) { 149 151 res.status(404).json({ error: 'Mapping not found' }); 150 152 return; ··· 155 157 if (Array.isArray(twitterUsernames)) { 156 158 usernames = twitterUsernames; 157 159 } else if (typeof twitterUsernames === 'string') { 158 - usernames = twitterUsernames.split(',').map(u => u.trim()).filter(u => u.length > 0); 160 + usernames = twitterUsernames 161 + .split(',') 162 + .map((u) => u.trim()) 163 + .filter((u) => u.length > 0); 159 164 } 160 165 } 161 166 ··· 164 169 twitterUsernames: usernames, 165 170 bskyIdentifier: bskyIdentifier || existingMapping.bskyIdentifier, 166 171 // Only update password if provided 167 - bskyPassword: bskyPassword || existingMapping.bskyPassword, 172 + bskyPassword: bskyPassword || existingMapping.bskyPassword, 168 173 bskyServiceUrl: bskyServiceUrl || existingMapping.bskyServiceUrl, 169 174 owner: owner || existingMapping.owner, 170 175 }; ··· 194 199 for (const username of mapping.twitterUsernames) { 195 200 dbService.deleteTweetsByUsername(username); 196 201 } 197 - 202 + 198 203 res.json({ success: true, message: 'Cache cleared for all associated accounts' }); 199 204 }); 200 205 ··· 214 219 }); 215 220 216 221 app.get('/api/ai-config', authenticateToken, requireAdmin, (_req, res) => { 217 - const config = getConfig(); 218 - // Return legacy gemini key as part of new structure if needed 219 - const aiConfig = config.ai || { 220 - provider: 'gemini', 221 - apiKey: config.geminiApiKey || '' 222 - }; 223 - res.json(aiConfig); 222 + const config = getConfig(); 223 + // Return legacy gemini key as part of new structure if needed 224 + const aiConfig = config.ai || { 225 + provider: 'gemini', 226 + apiKey: config.geminiApiKey || '', 227 + }; 228 + res.json(aiConfig); 224 229 }); 225 - 230 + 226 231 app.post('/api/ai-config', authenticateToken, requireAdmin, (req, res) => { 227 - const { provider, apiKey, model, baseUrl } = req.body; 228 - const config = getConfig(); 229 - 230 - config.ai = { 231 - provider, 232 - apiKey, 233 - model: model || undefined, 234 - baseUrl: baseUrl || undefined 235 - }; 236 - 237 - // Clear legacy key to avoid confusion 238 - delete config.geminiApiKey; 239 - 240 - saveConfig(config); 241 - res.json({ success: true }); 232 + const { provider, apiKey, model, baseUrl } = req.body; 233 + const config = getConfig(); 234 + 235 + config.ai = { 236 + provider, 237 + apiKey, 238 + model: model || undefined, 239 + baseUrl: baseUrl || undefined, 240 + }; 241 + 242 + // Clear legacy key to avoid confusion 243 + delete config.geminiApiKey; 244 + 245 + saveConfig(config); 246 + res.json({ success: true }); 242 247 }); 243 248 244 249 // --- Status & Actions Routes --- ··· 276 281 return; 277 282 } 278 283 279 - if (!pendingBackfills.find(b => b.id === id)) { 284 + if (!pendingBackfills.find((b) => b.id === id)) { 280 285 pendingBackfills.push({ id, limit: limit ? Number(limit) : undefined }); 281 286 } 282 287 ··· 307 312 currentAppStatus = { 308 313 ...currentAppStatus, 309 314 ...status, 310 - lastUpdate: Date.now() 315 + lastUpdate: Date.now(), 311 316 }; 312 317 } 313 318 ··· 333 338 console.log(`🚀 Web interface running at http://localhost:${PORT}`); 334 339 console.log('📡 Accessible on your local network/Tailscale via your IP.'); 335 340 }); 336 - } 341 + }