A simple tool which lets you scrape twitter accounts and crosspost them to bluesky accounts! Comes with a CLI and a webapp for managing profiles! Works with images/videos/link embeds/threads.

fix: improve image compression, handle unconfirmed email, use browser UA for open graph

jack 67ed40fd afeb5689

+116 -45
+53 -45
src/index.ts
··· 353 353 try { 354 354 let image = sharp(buffer); 355 355 const metadata = await image.metadata(); 356 + let currentBuffer = buffer; 357 + let width = metadata.width || 2000; 358 + let quality = 90; 356 359 357 - if (isAnimation) { 358 - console.log(`[UPLOAD] 🖼️ Preserving animation format.`); 359 - if (isWebp && buffer.length > MAX_SIZE) { 360 - image = image.webp({ quality: 90, effort: 6 }); 361 - } 362 - } else { 363 - if (metadata.width && metadata.width > 2000) { 364 - image = image.resize(2000, undefined, { withoutEnlargement: true }); 365 - } 366 - 367 - if (isPng) { 368 - if (metadata.hasAlpha) { 369 - image = image.png({ compressionLevel: 9, adaptiveFiltering: true }); 370 - } else { 371 - image = image.jpeg({ quality: 92, mozjpeg: true }); 372 - finalMimeType = 'image/jpeg'; 373 - } 374 - } else if (isJpeg) { 375 - if (buffer.length > MAX_SIZE) { 376 - image = image.jpeg({ quality: 92, mozjpeg: true }); 377 - } 360 + // Iterative compression loop 361 + let attempts = 0; 362 + while (currentBuffer.length > MAX_SIZE && attempts < 5) { 363 + attempts++; 364 + console.log(`[UPLOAD] 📉 Compression attempt ${attempts}: Width ${width}, Quality ${quality}...`); 365 + 366 + if (isAnimation) { 367 + // For animations (GIF/WebP), we can only do so much without losing frames 368 + // Try to convert to WebP if it's a GIF, or optimize WebP 369 + image = sharp(buffer, { animated: true }); 370 + if (isGif) { 371 + // Convert GIF to WebP for better compression 372 + image = image.webp({ quality: Math.max(quality, 50), effort: 6 }); 373 + finalMimeType = 'image/webp'; 374 + } else { 375 + image = image.webp({ quality: Math.max(quality, 50), effort: 6 }); 376 + } 377 + // Resize if really big 378 + if (metadata.width && metadata.width > 800) { 379 + image = image.resize({ width: 800, withoutEnlargement: true }); 380 + } 378 381 } else { 379 - image = image.jpeg({ quality: 92, mozjpeg: true }); 380 - finalMimeType = 'image/jpeg'; 381 - } 382 - } 383 - 384 - finalBuffer = await image.toBuffer(); 385 - console.log(`[UPLOAD] ✅ Optimized to ${(finalBuffer.length / 1024).toFixed(2)} KB`); 386 - 387 - if (finalBuffer.length > MAX_SIZE && !isAnimation) { 388 - console.log(`[UPLOAD] ⚠️ Still large, trying higher compression...`); 389 - const pipeline = sharp(buffer); 390 - const md = await pipeline.metadata(); 391 - 392 - if (md.width && md.width > 1600) { 393 - pipeline.resize(1600, undefined, { withoutEnlargement: true }); 382 + // Static images 383 + if (width > 1600) width = 1600; 384 + else if (attempts > 1) width = Math.floor(width * 0.8); 385 + 386 + quality = Math.max(50, quality - 10); 387 + 388 + image = sharp(buffer) 389 + .resize({ width, withoutEnlargement: true }) 390 + .jpeg({ quality, mozjpeg: true }); 391 + 392 + finalMimeType = 'image/jpeg'; 394 393 } 395 - 396 - if (mimeType === 'image/png' && md.hasAlpha) { 397 - finalBuffer = await pipeline.png({ compressionLevel: 9 }).toBuffer(); 398 - finalMimeType = 'image/png'; 399 - } else { 400 - finalBuffer = await pipeline.jpeg({ quality: 85, mozjpeg: true }).toBuffer(); 401 - finalMimeType = 'image/jpeg'; 394 + 395 + currentBuffer = await image.toBuffer(); 396 + if (currentBuffer.length <= MAX_SIZE) { 397 + finalBuffer = currentBuffer; 398 + console.log(`[UPLOAD] ✅ Optimized to ${(finalBuffer.length / 1024).toFixed(2)} KB`); 399 + break; 402 400 } 403 - console.log(`[UPLOAD] ✅ Further compressed to ${(finalBuffer.length / 1024).toFixed(2)} KB`); 404 401 } 402 + 403 + if (finalBuffer.length > MAX_SIZE) { 404 + console.warn(`[UPLOAD] ⚠️ Could not compress below limit. Current: ${(finalBuffer.length / 1024).toFixed(2)} KB. Upload might fail.`); 405 + } 406 + 405 407 } catch (err) { 406 408 console.warn(`[UPLOAD] ⚠️ Optimization failed, attempting original upload:`, (err as Error).message); 407 409 finalBuffer = buffer; ··· 537 539 try { 538 540 const response = await axios.get(url, { 539 541 headers: { 540 - 'User-Agent': 'Mozilla/5.0 (compatible; Tweets2Bsky/1.0; +https://github.com/j4ckxyz/tweets-2-bsky)', 542 + 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', 543 + 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8', 544 + 'Accept-Language': 'en-US,en;q=0.9', 541 545 }, 542 546 timeout: 10000, 543 547 }); ··· 639 643 if (errorJson.error === "already_exists" && errorJson.jobId) { 640 644 console.log(`[VIDEO] ♻️ Video already exists. Resuming with Job ID: ${errorJson.jobId}`); 641 645 return await pollForVideoProcessing(agent, errorJson.jobId); 646 + } 647 + if (errorJson.error === "unconfirmed_email" || (errorJson.jobStatus && errorJson.jobStatus.error === "unconfirmed_email")) { 648 + console.error(`[VIDEO] 🛑 BLUESKY ERROR: Your email is unconfirmed. You MUST verify your email on Bluesky to upload videos.`); 649 + throw new Error("Bluesky Email Unconfirmed - Video Upload Rejected"); 642 650 } 643 651 } catch (e) { 644 652 // Not JSON or missing fields, proceed with throwing
+63
src/test-link.ts
··· 1 + import 'dotenv/config'; 2 + import { TwitterClient } from '@steipete/bird/dist/lib/twitter-client.js'; 3 + 4 + // Provided credentials 5 + const AUTH_TOKEN = '30f5905989c9984bef1ca849910db8c84ae31c04'; 6 + const CT0 = '012bae351eeebe4a19df0a21c73d5705d887246beee09c96da2a5a6935495688d3479b8e3fa963433a43b3b37675c16f4a192d018dd388108a6d6aa3ed3d9ba3233238bc71830583a1613ec042d3ba55'; 7 + 8 + async function main() { 9 + console.log('🧪 Starting Link Extraction Test...'); 10 + 11 + const client = new TwitterClient({ 12 + cookies: { 13 + authToken: AUTH_TOKEN, 14 + ct0: CT0, 15 + }, 16 + }); 17 + 18 + const username = 'NVIDIANetworkng'; 19 + // Searching for the specific tweet ID: 2003547578848206861 20 + // Note: search usually doesn't find by ID directly unless we use specific operators or search from user 21 + // Let's try searching from user and look for the ID. 22 + 23 + console.log(`🔍 Fetching tweets for @${username}...`); 24 + 25 + try { 26 + const result = (await client.search(`from:${username}`, 20)) as any; 27 + 28 + if (!result.success || !result.tweets) { 29 + console.error('❌ Failed to fetch tweets:', result.error); 30 + return; 31 + } 32 + 33 + const targetId = '2003547578848206861'; 34 + const targetTweet = result.tweets.find((t: any) => (t.id_str || t.id) === targetId); 35 + 36 + if (targetTweet) { 37 + console.log('✅ Found target tweet!'); 38 + console.log('--- RAW TWEET OBJECT ---'); 39 + console.log(JSON.stringify(targetTweet, null, 2)); 40 + console.log('------------------------'); 41 + 42 + console.log('--- Entities ---'); 43 + console.log(JSON.stringify(targetTweet.entities, null, 2)); 44 + 45 + if (targetTweet.card) { 46 + console.log('--- Card Data (Bird internal?) ---'); 47 + console.log(JSON.stringify(targetTweet.card, null, 2)); 48 + } 49 + } else { 50 + console.log(`❌ Target tweet ${targetId} not found in last 20 results.`); 51 + // Just print the first one to see structure anyway 52 + if (result.tweets.length > 0) { 53 + console.log('Printing first tweet for structure analysis:'); 54 + console.log(JSON.stringify(result.tweets[0], null, 2)); 55 + } 56 + } 57 + 58 + } catch (err) { 59 + console.error('❌ Error:', err); 60 + } 61 + } 62 + 63 + main();