A simple tool which lets you scrape twitter accounts and crosspost them to bluesky accounts! Comes with a CLI and a webapp for managing profiles! Works with images/videos/link embeds/threads.

feat: support multiple twitter accounts per bsky account with smart cross-threading

jack b1805e00 ec32a3f7

+58 -23
+29 -7
src/db.ts
··· 19 19 // Initialize schema 20 20 db.exec(` 21 21 CREATE TABLE IF NOT EXISTS processed_tweets ( 22 - twitter_id TEXT PRIMARY KEY, 22 + twitter_id TEXT NOT NULL, 23 23 twitter_username TEXT NOT NULL, 24 + bsky_identifier TEXT NOT NULL, 24 25 bsky_uri TEXT, 25 26 bsky_cid TEXT, 26 27 bsky_root_uri TEXT, 27 28 bsky_root_cid TEXT, 28 29 status TEXT NOT NULL, -- 'migrated', 'skipped', 'failed' 29 - created_at DATETIME DEFAULT CURRENT_TIMESTAMP 30 + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, 31 + PRIMARY KEY (twitter_id, bsky_identifier) 30 32 ); 31 33 CREATE INDEX IF NOT EXISTS idx_twitter_username ON processed_tweets(twitter_username); 34 + CREATE INDEX IF NOT EXISTS idx_bsky_identifier ON processed_tweets(bsky_identifier); 32 35 `); 33 36 34 37 export interface ProcessedTweet { 35 38 twitter_id: string; 36 39 twitter_username: string; 40 + bsky_identifier: string; 37 41 bsky_uri?: string; 38 42 bsky_cid?: string; 39 43 bsky_root_uri?: string; ··· 42 46 } 43 47 44 48 export const dbService = { 45 - getTweet(twitterId: string): ProcessedTweet | null { 46 - const stmt = db.prepare('SELECT * FROM processed_tweets WHERE twitter_id = ?'); 47 - const row = stmt.get(twitterId) as any; 49 + getTweet(twitterId: string, bskyIdentifier: string): ProcessedTweet | null { 50 + const stmt = db.prepare('SELECT * FROM processed_tweets WHERE twitter_id = ? AND bsky_identifier = ?'); 51 + const row = stmt.get(twitterId, bskyIdentifier) as any; 48 52 if (!row) return null; 49 53 return { 50 54 twitter_id: row.twitter_id, 51 55 twitter_username: row.twitter_username, 56 + bsky_identifier: row.bsky_identifier, 52 57 bsky_uri: row.bsky_uri, 53 58 bsky_cid: row.bsky_cid, 54 59 bsky_root_uri: row.bsky_root_uri, ··· 60 65 saveTweet(tweet: ProcessedTweet) { 61 66 const stmt = db.prepare(` 62 67 INSERT OR REPLACE INTO processed_tweets 63 - (twitter_id, twitter_username, bsky_uri, bsky_cid, bsky_root_uri, bsky_root_cid, status) 64 - VALUES (?, ?, ?, ?, ?, ?, ?) 68 + (twitter_id, twitter_username, bsky_identifier, bsky_uri, bsky_cid, bsky_root_uri, bsky_root_cid, status) 69 + VALUES (?, ?, ?, ?, ?, ?, ?, ?) 65 70 `); 66 71 stmt.run( 67 72 tweet.twitter_id, 68 73 tweet.twitter_username, 74 + tweet.bsky_identifier, 69 75 tweet.bsky_uri || null, 70 76 tweet.bsky_cid || null, 71 77 tweet.bsky_root_uri || null, 72 78 tweet.bsky_root_cid || null, 73 79 tweet.status 74 80 ); 81 + }, 82 + 83 + getTweetsByBskyIdentifier(bskyIdentifier: string): Record<string, any> { 84 + const stmt = db.prepare('SELECT * FROM processed_tweets WHERE bsky_identifier = ?'); 85 + const rows = stmt.all(bskyIdentifier.toLowerCase()) as any[]; 86 + const map: Record<string, any> = {}; 87 + for (const row of rows) { 88 + map[row.twitter_id] = { 89 + uri: row.bsky_uri, 90 + cid: row.bsky_cid, 91 + root: row.bsky_root_uri ? { uri: row.bsky_root_uri, cid: row.bsky_root_cid } : undefined, 92 + migrated: row.status === 'migrated', 93 + skipped: row.status === 'skipped' 94 + }; 95 + } 96 + return map; 75 97 }, 76 98 77 99 getTweetsByUsername(username: string): Record<string, any> {
+29 -16
src/index.ts
··· 128 128 if (files.length === 0) return; 129 129 130 130 console.log(`📦 Found ${files.length} legacy cache files. Migrating to SQLite...`); 131 + const config = getConfig(); 131 132 132 133 for (const file of files) { 133 - const username = file.replace('.json', ''); 134 + const username = file.replace('.json', '').toLowerCase(); 135 + // Try to find a matching bskyIdentifier from config 136 + const mapping = config.mappings.find(m => m.twitterUsername.toLowerCase() === username); 137 + const bskyIdentifier = mapping?.bskyIdentifier || 'unknown'; 138 + 134 139 try { 135 140 const filePath = path.join(PROCESSED_DIR, file); 136 141 const data = JSON.parse(fs.readFileSync(filePath, 'utf8')) as ProcessedTweetsMap; ··· 139 144 dbService.saveTweet({ 140 145 twitter_id: twitterId, 141 146 twitter_username: username, 147 + bsky_identifier: bskyIdentifier, 142 148 bsky_uri: entry.uri, 143 149 bsky_cid: entry.cid, 144 150 bsky_root_uri: entry.root?.uri, ··· 157 163 console.log('✅ Migration complete.'); 158 164 } 159 165 160 - function loadProcessedTweets(twitterUsername: string): ProcessedTweetsMap { 161 - return dbService.getTweetsByUsername(twitterUsername); 166 + function loadProcessedTweets(bskyIdentifier: string): ProcessedTweetsMap { 167 + return dbService.getTweetsByBskyIdentifier(bskyIdentifier); 162 168 } 163 169 164 - function saveProcessedTweet(twitterUsername: string, twitterId: string, entry: ProcessedTweetEntry): void { 170 + function saveProcessedTweet(twitterUsername: string, bskyIdentifier: string, twitterId: string, entry: ProcessedTweetEntry): void { 165 171 dbService.saveTweet({ 166 172 twitter_id: twitterId, 167 173 twitter_username: twitterUsername.toLowerCase(), 174 + bsky_identifier: bskyIdentifier.toLowerCase(), 168 175 bsky_uri: entry.uri, 169 176 bsky_cid: entry.cid, 170 177 bsky_root_uri: entry.root?.uri, ··· 618 625 async function processTweets( 619 626 agent: BskyAgent, 620 627 twitterUsername: string, 628 + bskyIdentifier: string, 621 629 tweets: Tweet[], 622 630 dryRun = false, 623 631 ): Promise<void> { 624 - const processedTweets = loadProcessedTweets(twitterUsername); 632 + const processedTweets = loadProcessedTweets(bskyIdentifier); 625 633 const toProcess = tweets.filter(t => !processedTweets[t.id_str || t.id || '']); 626 634 627 635 if (toProcess.length === 0) { 628 - console.log(`[${twitterUsername}] ✅ No new tweets to process.`); 636 + console.log(`[${twitterUsername}] ✅ No new tweets to process for ${bskyIdentifier}.`); 629 637 return; 630 638 } 631 639 632 - console.log(`[${twitterUsername}] 🚀 Processing ${toProcess.length} new tweets...`); 640 + console.log(`[${twitterUsername}] 🚀 Processing ${toProcess.length} new tweets for ${bskyIdentifier}...`); 633 641 634 642 tweets.reverse(); 635 643 let count = 0; ··· 658 666 659 667 if (isReply) { 660 668 if (replyStatusId && processedTweets[replyStatusId] && !processedTweets[replyStatusId]?.migrated) { 661 - console.log(`[${twitterUsername}] 🧵 Threading reply to local post: ${replyStatusId}`); 669 + console.log(`[${twitterUsername}] 🧵 Threading reply to post in ${bskyIdentifier}: ${replyStatusId}`); 662 670 replyParentInfo = processedTweets[replyStatusId] ?? null; 663 671 } else { 664 672 console.log(`[${twitterUsername}] ⏩ Skipping external/unknown reply.`); 665 673 if (!dryRun) { 666 - saveProcessedTweet(twitterUsername, tweetId, { skipped: true }); 674 + saveProcessedTweet(twitterUsername, bskyIdentifier, tweetId, { skipped: true }); 667 675 } 668 676 continue; 669 677 } ··· 880 888 }; 881 889 882 890 if (i === 0) { 883 - saveProcessedTweet(twitterUsername, tweetId, currentPostInfo); 891 + saveProcessedTweet(twitterUsername, bskyIdentifier, tweetId, currentPostInfo); 884 892 } 885 893 886 894 lastPostInfo = currentPostInfo; ··· 946 954 const limit = backfillReq?.limit || 15; 947 955 console.log(`[${mapping.twitterUsername}] Running backfill (limit ${limit})...`); 948 956 updateAppStatus({ state: 'backfilling', currentAccount: mapping.twitterUsername, message: `Starting backfill (limit ${limit})...` }); 949 - await importHistory(mapping.twitterUsername, limit, dryRun); 957 + await importHistory(mapping.twitterUsername, mapping.bskyIdentifier, limit, dryRun); 950 958 clearBackfill(mapping.id); 951 959 console.log(`[${mapping.twitterUsername}] Backfill complete.`); 952 960 } else { 953 961 updateAppStatus({ state: 'checking', currentAccount: mapping.twitterUsername, message: 'Fetching latest tweets...' }); 954 962 const result = await safeSearch(`from:${mapping.twitterUsername}`, 30); 955 963 if (!result.success || !result.tweets) continue; 956 - await processTweets(agent, mapping.twitterUsername, result.tweets, dryRun); 964 + await processTweets(agent, mapping.twitterUsername, mapping.bskyIdentifier, result.tweets, dryRun); 957 965 } 958 966 } catch (err) { 959 967 console.error(`Error processing mapping ${mapping.twitterUsername}:`, err); ··· 967 975 } 968 976 } 969 977 970 - async function importHistory(twitterUsername: string, limit = 15, dryRun = false): Promise<void> { 978 + async function importHistory(twitterUsername: string, bskyIdentifier: string, limit = 15, dryRun = false): Promise<void> { 971 979 const config = getConfig(); 972 980 const mapping = config.mappings.find((m) => m.twitterUsername.toLowerCase() === twitterUsername.toLowerCase()); 973 981 if (!mapping) { ··· 984 992 const batchSize = 100; 985 993 const allFoundTweets: Tweet[] = []; 986 994 const seenIds = new Set<string>(); 987 - const processedTweets = loadProcessedTweets(twitterUsername); 995 + const processedTweets = loadProcessedTweets(bskyIdentifier); 988 996 989 997 while (true) { 990 998 // Check if this backfill request was cancelled ··· 1032 1040 1033 1041 console.log(`Fetch complete. Found ${allFoundTweets.length} new tweets to import.`); 1034 1042 if (allFoundTweets.length > 0) { 1035 - await processTweets(agent, twitterUsername, allFoundTweets, dryRun); 1043 + await processTweets(agent, twitterUsername, bskyIdentifier, allFoundTweets, dryRun); 1036 1044 console.log('History import complete.'); 1037 1045 } 1038 1046 } ··· 1083 1091 console.error('Twitter credentials not set. Cannot import history.'); 1084 1092 process.exit(1); 1085 1093 } 1086 - await importHistory(options.username, options.limit, options.dryRun); 1094 + const mapping = config.mappings.find(m => m.twitterUsername.toLowerCase() === options.username.toLowerCase()); 1095 + if (!mapping) { 1096 + console.error(`No mapping found for ${options.username}`); 1097 + process.exit(1); 1098 + } 1099 + await importHistory(options.username, mapping.bskyIdentifier, options.limit, options.dryRun); 1087 1100 process.exit(0); 1088 1101 } 1089 1102