this repo has no description

Investigation/warning about inbound migration

lewis fc98610d 7cbcc189

Changed files
+807 -371
frontend
src
api
auth
comms
delegation
moderation
oauth
endpoints
sync
validation
tests
+9
KNOWN_ISSUES.md
··· 1 + # Known Issues 2 + 3 + ## Account migration from bsky.social 4 + 5 + Migrating your account from bsky.social to this PDS works, but Bluesky's appview may not recognize your new signing key. This means you can post and your followers will see it, but some authenticated requests might fail with "jwt signature does not match jwt issuer". 6 + 7 + We've been trying hard to verify that our side is correct (PLC updated, signing keys match, relays have the account) but something about how we're emitting events isn't triggering Bluesky's appview to refresh its identity data. Still investigating. 8 + 9 + No workaround yet.
+90 -14
frontend/src/lib/migration/atproto-client.ts
··· 11 11 Session, 12 12 } from "./types"; 13 13 14 + function apiLog( 15 + method: string, 16 + endpoint: string, 17 + data?: Record<string, unknown>, 18 + ) { 19 + const timestamp = new Date().toISOString(); 20 + const msg = `[API ${timestamp}] ${method} ${endpoint}`; 21 + if (data) { 22 + console.log(msg, JSON.stringify(data, null, 2)); 23 + } else { 24 + console.log(msg); 25 + } 26 + } 27 + 14 28 export class AtprotoClient { 15 29 private baseUrl: string; 16 30 private accessToken: string | null = null; ··· 107 121 body.authFactorToken = authFactorToken; 108 122 } 109 123 110 - const session = await this.xrpc<Session>("com.atproto.server.createSession", { 111 - httpMethod: "POST", 112 - body, 113 - }); 124 + const session = await this.xrpc<Session>( 125 + "com.atproto.server.createSession", 126 + { 127 + httpMethod: "POST", 128 + body, 129 + }, 130 + ); 114 131 115 132 this.accessToken = session.accessJwt; 116 133 return session; ··· 239 256 async listMissingBlobs( 240 257 cursor?: string, 241 258 limit = 100, 242 - ): Promise<{ blobs: Array<{ cid: string; recordUri: string }>; cursor?: string }> { 259 + ): Promise< 260 + { blobs: Array<{ cid: string; recordUri: string }>; cursor?: string } 261 + > { 243 262 const params: Record<string, string> = { limit: String(limit) }; 244 263 if (cursor) { 245 264 params.cursor = cursor; ··· 267 286 } 268 287 269 288 async submitPlcOperation(operation: PlcOperation): Promise<void> { 289 + apiLog( 290 + "POST", 291 + `${this.baseUrl}/xrpc/com.atproto.identity.submitPlcOperation`, 292 + { 293 + operationType: operation.type, 294 + operationPrev: operation.prev, 295 + }, 296 + ); 297 + const start = Date.now(); 270 298 await this.xrpc("com.atproto.identity.submitPlcOperation", { 271 299 httpMethod: "POST", 272 300 body: { operation }, 273 301 }); 302 + apiLog( 303 + "POST", 304 + `${this.baseUrl}/xrpc/com.atproto.identity.submitPlcOperation COMPLETE`, 305 + { 306 + durationMs: Date.now() - start, 307 + }, 308 + ); 274 309 } 275 310 276 311 async getRecommendedDidCredentials(): Promise<DidCredentials> { ··· 278 313 } 279 314 280 315 async activateAccount(): Promise<void> { 316 + apiLog("POST", `${this.baseUrl}/xrpc/com.atproto.server.activateAccount`); 317 + const start = Date.now(); 281 318 await this.xrpc("com.atproto.server.activateAccount", { 282 319 httpMethod: "POST", 283 320 }); 321 + apiLog( 322 + "POST", 323 + `${this.baseUrl}/xrpc/com.atproto.server.activateAccount COMPLETE`, 324 + { 325 + durationMs: Date.now() - start, 326 + }, 327 + ); 284 328 } 285 329 286 330 async deactivateAccount(): Promise<void> { 287 - await this.xrpc("com.atproto.server.deactivateAccount", { 288 - httpMethod: "POST", 289 - }); 331 + apiLog("POST", `${this.baseUrl}/xrpc/com.atproto.server.deactivateAccount`); 332 + const start = Date.now(); 333 + try { 334 + await this.xrpc("com.atproto.server.deactivateAccount", { 335 + httpMethod: "POST", 336 + }); 337 + apiLog( 338 + "POST", 339 + `${this.baseUrl}/xrpc/com.atproto.server.deactivateAccount COMPLETE`, 340 + { 341 + durationMs: Date.now() - start, 342 + success: true, 343 + }, 344 + ); 345 + } catch (e) { 346 + const err = e as Error & { error?: string; status?: number }; 347 + apiLog( 348 + "POST", 349 + `${this.baseUrl}/xrpc/com.atproto.server.deactivateAccount FAILED`, 350 + { 351 + durationMs: Date.now() - start, 352 + error: err.message, 353 + errorCode: err.error, 354 + status: err.status, 355 + }, 356 + ); 357 + throw e; 358 + } 290 359 } 291 360 292 361 async checkAccountStatus(): Promise<AccountStatus> { ··· 330 399 identifier: string, 331 400 password: string, 332 401 ): Promise<Session> { 333 - const session = await this.xrpc<Session>("com.atproto.server.createSession", { 334 - httpMethod: "POST", 335 - body: { identifier, password, allowDeactivated: true }, 336 - }); 402 + const session = await this.xrpc<Session>( 403 + "com.atproto.server.createSession", 404 + { 405 + httpMethod: "POST", 406 + body: { identifier, password, allowDeactivated: true }, 407 + }, 408 + ); 337 409 this.accessToken = session.accessJwt; 338 410 return session; 339 411 } ··· 341 413 async verifyToken( 342 414 token: string, 343 415 identifier: string, 344 - ): Promise<{ success: boolean; did: string; purpose: string; channel: string }> { 416 + ): Promise< 417 + { success: boolean; did: string; purpose: string; channel: string } 418 + > { 345 419 return this.xrpc("com.tranquil.account.verifyToken", { 346 420 httpMethod: "POST", 347 421 body: { token, identifier }, ··· 392 466 393 467 if (handle.endsWith(".bsky.social")) { 394 468 const res = await fetch( 395 - `https://public.api.bsky.app/xrpc/com.atproto.identity.resolveHandle?handle=${encodeURIComponent(handle)}`, 469 + `https://public.api.bsky.app/xrpc/com.atproto.identity.resolveHandle?handle=${ 470 + encodeURIComponent(handle) 471 + }`, 396 472 ); 397 473 if (!res.ok) { 398 474 throw new Error(`Failed to resolve handle: ${res.statusText}`);
+224 -29
frontend/src/lib/migration/flow.svelte.ts
··· 20 20 updateStep, 21 21 } from "./storage"; 22 22 23 + function migrationLog(stage: string, data?: Record<string, unknown>) { 24 + const timestamp = new Date().toISOString(); 25 + const msg = `[MIGRATION ${timestamp}] ${stage}`; 26 + if (data) { 27 + console.log(msg, JSON.stringify(data, null, 2)); 28 + } else { 29 + console.log(msg); 30 + } 31 + } 32 + 23 33 function createInitialProgress(): MigrationProgress { 24 34 return { 25 35 repoExported: false, ··· 105 115 password: string, 106 116 twoFactorCode?: string, 107 117 ): Promise<void> { 118 + migrationLog("loginToSource START", { handle, has2FA: !!twoFactorCode }); 119 + 108 120 if (!state.sourcePdsUrl) { 109 121 await resolveSourcePds(handle); 110 122 } ··· 114 126 } 115 127 116 128 try { 129 + migrationLog("loginToSource: Calling createSession on OLD PDS", { 130 + pdsUrl: state.sourcePdsUrl, 131 + }); 117 132 const session = await sourceClient.login(handle, password, twoFactorCode); 133 + migrationLog("loginToSource SUCCESS", { 134 + did: session.did, 135 + handle: session.handle, 136 + pdsUrl: state.sourcePdsUrl, 137 + }); 118 138 state.sourceAccessToken = session.accessJwt; 119 139 state.sourceRefreshToken = session.refreshJwt; 120 140 state.sourceDid = session.did; ··· 123 143 saveMigrationState(state); 124 144 } catch (e) { 125 145 const err = e as Error & { error?: string }; 146 + migrationLog("loginToSource FAILED", { 147 + error: err.message, 148 + errorCode: err.error, 149 + }); 126 150 if (err.error === "AuthFactorTokenRequired") { 127 151 state.requires2FA = true; 128 - throw new Error("Two-factor authentication required. Please enter the code sent to your email."); 152 + throw new Error( 153 + "Two-factor authentication required. Please enter the code sent to your email.", 154 + ); 129 155 } 130 156 throw e; 131 157 } ··· 143 169 } 144 170 } 145 171 146 - async function authenticateToLocal(email: string, password: string): Promise<void> { 172 + async function authenticateToLocal( 173 + email: string, 174 + password: string, 175 + ): Promise<void> { 147 176 if (!localClient) { 148 177 localClient = createLocalClient(); 149 178 } ··· 151 180 } 152 181 153 182 async function startMigration(): Promise<void> { 183 + migrationLog("startMigration START", { 184 + sourceDid: state.sourceDid, 185 + sourceHandle: state.sourceHandle, 186 + targetHandle: state.targetHandle, 187 + sourcePdsUrl: state.sourcePdsUrl, 188 + }); 189 + 154 190 if (!sourceClient || !state.sourceAccessToken) { 191 + migrationLog("startMigration ERROR: Not logged in to source PDS"); 155 192 throw new Error("Not logged in to source PDS"); 156 193 } 157 194 ··· 163 200 setProgress({ currentOperation: "Getting service auth token..." }); 164 201 165 202 try { 203 + migrationLog("startMigration: Loading local server info"); 166 204 const serverInfo = await loadLocalServerInfo(); 205 + migrationLog("startMigration: Got server info", { 206 + serverDid: serverInfo.did, 207 + }); 208 + 209 + migrationLog("startMigration: Getting service auth token from OLD PDS"); 167 210 const { token } = await sourceClient.getServiceAuth( 168 211 serverInfo.did, 169 212 "com.atproto.server.createAccount", 170 213 ); 214 + migrationLog("startMigration: Got service auth token"); 171 215 state.serviceAuthToken = token; 172 216 173 217 setProgress({ currentOperation: "Creating account on new PDS..." }); ··· 180 224 inviteCode: state.inviteCode || undefined, 181 225 }; 182 226 227 + migrationLog("startMigration: Creating account on NEW PDS", { 228 + did: accountParams.did, 229 + handle: accountParams.handle, 230 + }); 183 231 const session = await localClient.createAccount(accountParams, token); 232 + migrationLog("startMigration: Account created on NEW PDS", { 233 + did: session.did, 234 + }); 184 235 localClient.setAccessToken(session.accessJwt); 185 236 186 237 setProgress({ currentOperation: "Exporting repository..." }); 187 - 238 + migrationLog("startMigration: Exporting repo from OLD PDS"); 239 + const exportStart = Date.now(); 188 240 const car = await sourceClient.getRepo(state.sourceDid); 189 - setProgress({ repoExported: true, currentOperation: "Importing repository..." }); 241 + migrationLog("startMigration: Repo exported", { 242 + durationMs: Date.now() - exportStart, 243 + sizeBytes: car.byteLength, 244 + }); 245 + setProgress({ 246 + repoExported: true, 247 + currentOperation: "Importing repository...", 248 + }); 190 249 250 + migrationLog("startMigration: Importing repo to NEW PDS"); 251 + const importStart = Date.now(); 191 252 await localClient.importRepo(car); 192 - setProgress({ repoImported: true, currentOperation: "Counting blobs..." }); 253 + migrationLog("startMigration: Repo imported", { 254 + durationMs: Date.now() - importStart, 255 + }); 256 + setProgress({ 257 + repoImported: true, 258 + currentOperation: "Counting blobs...", 259 + }); 193 260 194 261 const accountStatus = await localClient.checkAccountStatus(); 262 + migrationLog("startMigration: Account status", { 263 + expectedBlobs: accountStatus.expectedBlobs, 264 + importedBlobs: accountStatus.importedBlobs, 265 + }); 195 266 setProgress({ 196 267 blobsTotal: accountStatus.expectedBlobs, 197 268 currentOperation: "Migrating blobs...", ··· 202 273 setProgress({ currentOperation: "Migrating preferences..." }); 203 274 await migratePreferences(); 204 275 276 + migrationLog( 277 + "startMigration: Initial migration complete, waiting for email verification", 278 + ); 205 279 setStep("email-verify"); 206 280 } catch (e) { 207 281 const err = e as Error & { error?: string; status?: number }; 208 - const message = err.message || err.error || `Unknown error (status ${err.status || 'unknown'})`; 282 + const message = err.message || err.error || 283 + `Unknown error (status ${err.status || "unknown"})`; 284 + migrationLog("startMigration FAILED", { 285 + error: message, 286 + errorCode: err.error, 287 + status: err.status, 288 + stack: err.stack, 289 + }); 209 290 setError(message); 210 291 setStep("error"); 211 292 } ··· 226 307 for (const blob of blobs) { 227 308 try { 228 309 setProgress({ 229 - currentOperation: `Migrating blob ${migrated + 1}/${state.progress.blobsTotal}...`, 310 + currentOperation: `Migrating blob ${ 311 + migrated + 1 312 + }/${state.progress.blobsTotal}...`, 230 313 }); 231 314 232 - const blobData = await sourceClient.getBlob(state.sourceDid, blob.cid); 315 + const blobData = await sourceClient.getBlob( 316 + state.sourceDid, 317 + blob.cid, 318 + ); 233 319 await localClient.uploadBlob(blobData, "application/octet-stream"); 234 320 migrated++; 235 321 setProgress({ blobsMigrated: migrated }); ··· 253 339 } 254 340 } 255 341 256 - async function submitEmailVerifyToken(token: string, localPassword?: string): Promise<void> { 342 + async function submitEmailVerifyToken( 343 + token: string, 344 + localPassword?: string, 345 + ): Promise<void> { 257 346 if (!localClient) { 258 347 localClient = createLocalClient(); 259 348 } ··· 266 355 267 356 if (!sourceClient) { 268 357 setStep("source-login"); 269 - setError("Email verified! Please log in to your old account again to complete the migration."); 358 + setError( 359 + "Email verified! Please log in to your old account again to complete the migration.", 360 + ); 270 361 return; 271 362 } 272 363 ··· 285 376 setStep("plc-token"); 286 377 } catch (e) { 287 378 const err = e as Error & { error?: string; status?: number }; 288 - const message = err.message || err.error || `Unknown error (status ${err.status || 'unknown'})`; 379 + const message = err.message || err.error || 380 + `Unknown error (status ${err.status || "unknown"})`; 289 381 setError(message); 290 382 } 291 383 } ··· 305 397 306 398 checkingEmailVerification = true; 307 399 try { 308 - await localClient.loginDeactivated(state.targetEmail, state.targetPassword); 400 + await localClient.loginDeactivated( 401 + state.targetEmail, 402 + state.targetPassword, 403 + ); 309 404 await sourceClient.requestPlcOperationSignature(); 310 405 setStep("plc-token"); 311 406 return true; ··· 321 416 } 322 417 323 418 async function submitPlcToken(token: string): Promise<void> { 419 + migrationLog("submitPlcToken START", { 420 + sourceDid: state.sourceDid, 421 + sourceHandle: state.sourceHandle, 422 + targetHandle: state.targetHandle, 423 + sourcePdsUrl: state.sourcePdsUrl, 424 + }); 425 + 324 426 if (!sourceClient || !localClient) { 427 + migrationLog("submitPlcToken ERROR: Not connected to PDSes", { 428 + hasSourceClient: !!sourceClient, 429 + hasLocalClient: !!localClient, 430 + }); 325 431 throw new Error("Not connected to PDSes"); 326 432 } 327 433 ··· 330 436 setProgress({ currentOperation: "Signing PLC operation..." }); 331 437 332 438 try { 439 + migrationLog("Step 1: Getting recommended DID credentials from NEW PDS"); 333 440 const credentials = await localClient.getRecommendedDidCredentials(); 441 + migrationLog("Step 1 COMPLETE: Got credentials", { 442 + rotationKeys: credentials.rotationKeys, 443 + alsoKnownAs: credentials.alsoKnownAs, 444 + verificationMethods: credentials.verificationMethods, 445 + services: credentials.services, 446 + }); 334 447 448 + migrationLog("Step 2: Signing PLC operation on OLD PDS", { 449 + sourcePdsUrl: state.sourcePdsUrl, 450 + }); 451 + const signStart = Date.now(); 335 452 const { operation } = await sourceClient.signPlcOperation({ 336 453 token, 337 454 ...credentials, 338 455 }); 456 + migrationLog("Step 2 COMPLETE: PLC operation signed", { 457 + durationMs: Date.now() - signStart, 458 + operationType: operation.type, 459 + operationPrev: operation.prev, 460 + }); 339 461 340 - setProgress({ plcSigned: true, currentOperation: "Submitting PLC operation..." }); 462 + setProgress({ 463 + plcSigned: true, 464 + currentOperation: "Submitting PLC operation...", 465 + }); 466 + migrationLog("Step 3: Submitting PLC operation to NEW PDS"); 467 + const submitStart = Date.now(); 341 468 await localClient.submitPlcOperation(operation); 469 + migrationLog("Step 3 COMPLETE: PLC operation submitted", { 470 + durationMs: Date.now() - submitStart, 471 + }); 342 472 343 - setProgress({ currentOperation: "Activating account (waiting for DID propagation)..." }); 473 + setProgress({ 474 + currentOperation: "Activating account (waiting for DID propagation)...", 475 + }); 476 + migrationLog("Step 4: Activating account on NEW PDS"); 477 + const activateStart = Date.now(); 344 478 await localClient.activateAccount(); 479 + migrationLog("Step 4 COMPLETE: Account activated on NEW PDS", { 480 + durationMs: Date.now() - activateStart, 481 + }); 345 482 setProgress({ activated: true }); 346 483 347 484 setProgress({ currentOperation: "Deactivating old account..." }); 485 + migrationLog("Step 5: Deactivating account on OLD PDS", { 486 + sourcePdsUrl: state.sourcePdsUrl, 487 + }); 488 + const deactivateStart = Date.now(); 348 489 try { 349 490 await sourceClient.deactivateAccount(); 491 + migrationLog("Step 5 COMPLETE: Account deactivated on OLD PDS", { 492 + durationMs: Date.now() - deactivateStart, 493 + success: true, 494 + }); 350 495 setProgress({ deactivated: true }); 351 - } catch { 496 + } catch (deactivateErr) { 497 + const err = deactivateErr as Error & { 498 + error?: string; 499 + status?: number; 500 + }; 501 + migrationLog("Step 5 FAILED: Could not deactivate on OLD PDS", { 502 + durationMs: Date.now() - deactivateStart, 503 + error: err.message, 504 + errorCode: err.error, 505 + status: err.status, 506 + }); 352 507 } 353 508 509 + migrationLog("submitPlcToken SUCCESS: Migration complete", { 510 + sourceDid: state.sourceDid, 511 + newHandle: state.targetHandle, 512 + }); 354 513 setStep("success"); 355 514 clearMigrationState(); 356 515 } catch (e) { 357 516 const err = e as Error & { error?: string; status?: number }; 358 - const message = err.message || err.error || `Unknown error (status ${err.status || 'unknown'})`; 517 + const message = err.message || err.error || 518 + `Unknown error (status ${err.status || "unknown"})`; 519 + migrationLog("submitPlcToken FAILED", { 520 + error: message, 521 + errorCode: err.error, 522 + status: err.status, 523 + stack: err.stack, 524 + }); 359 525 state.step = "plc-token"; 360 526 state.error = message; 361 527 saveMigrationState(state); ··· 418 584 state.step = "source-login"; 419 585 } 420 586 421 - function getLocalSession(): { accessJwt: string; did: string; handle: string } | null { 587 + function getLocalSession(): 588 + | { accessJwt: string; did: string; handle: string } 589 + | null { 422 590 if (!localClient) return null; 423 591 const token = localClient.getAccessToken(); 424 592 if (!token) return null; ··· 430 598 } 431 599 432 600 return { 433 - get state() { return state; }, 601 + get state() { 602 + return state; 603 + }, 434 604 setStep, 435 605 setError, 436 606 loadLocalServerInfo, ··· 513 683 } 514 684 } 515 685 516 - function initLocalClient(accessToken: string, did?: string, handle?: string): void { 686 + function initLocalClient( 687 + accessToken: string, 688 + did?: string, 689 + handle?: string, 690 + ): void { 517 691 localClient = createLocalClient(); 518 692 localClient.setAccessToken(accessToken); 519 693 if (did) { ··· 557 731 setProgress({ currentOperation: "Exporting repository..." }); 558 732 559 733 const car = await localClient.getRepo(currentDid); 560 - setProgress({ repoExported: true, currentOperation: "Importing repository..." }); 734 + setProgress({ 735 + repoExported: true, 736 + currentOperation: "Importing repository...", 737 + }); 561 738 562 739 await targetClient.importRepo(car); 563 - setProgress({ repoImported: true, currentOperation: "Counting blobs..." }); 740 + setProgress({ 741 + repoImported: true, 742 + currentOperation: "Counting blobs...", 743 + }); 564 744 565 745 const accountStatus = await targetClient.checkAccountStatus(); 566 746 setProgress({ ··· 579 759 setStep("plc-token"); 580 760 } catch (e) { 581 761 const err = e as Error & { error?: string; status?: number }; 582 - const message = err.message || err.error || `Unknown error (status ${err.status || 'unknown'})`; 762 + const message = err.message || err.error || 763 + `Unknown error (status ${err.status || "unknown"})`; 583 764 setError(message); 584 765 setStep("error"); 585 766 } ··· 600 781 for (const blob of blobs) { 601 782 try { 602 783 setProgress({ 603 - currentOperation: `Migrating blob ${migrated + 1}/${state.progress.blobsTotal}...`, 784 + currentOperation: `Migrating blob ${ 785 + migrated + 1 786 + }/${state.progress.blobsTotal}...`, 604 787 }); 605 788 606 789 const blobData = await localClient.getBlob(did, blob.cid); ··· 644 827 ...credentials, 645 828 }); 646 829 647 - setProgress({ plcSigned: true, currentOperation: "Submitting PLC operation..." }); 830 + setProgress({ 831 + plcSigned: true, 832 + currentOperation: "Submitting PLC operation...", 833 + }); 648 834 649 835 await targetClient.submitPlcOperation(operation); 650 836 ··· 660 846 } 661 847 662 848 if (state.localDid.startsWith("did:web:")) { 663 - setProgress({ currentOperation: "Updating DID document forwarding..." }); 849 + setProgress({ 850 + currentOperation: "Updating DID document forwarding...", 851 + }); 664 852 try { 665 853 await localClient.updateMigrationForwarding(state.targetPdsUrl); 666 854 } catch (e) { ··· 672 860 clearMigrationState(); 673 861 } catch (e) { 674 862 const err = e as Error & { error?: string; status?: number }; 675 - const message = err.message || err.error || `Unknown error (status ${err.status || 'unknown'})`; 863 + const message = err.message || err.error || 864 + `Unknown error (status ${err.status || "unknown"})`; 676 865 setError(message); 677 866 setStep("plc-token"); 678 867 } ··· 711 900 } 712 901 713 902 return { 714 - get state() { return state; }, 903 + get state() { 904 + return state; 905 + }, 715 906 setStep, 716 907 setError, 717 908 validateTargetPds, ··· 730 921 }; 731 922 } 732 923 733 - export type InboundMigrationFlow = ReturnType<typeof createInboundMigrationFlow>; 734 - export type OutboundMigrationFlow = ReturnType<typeof createOutboundMigrationFlow>; 924 + export type InboundMigrationFlow = ReturnType< 925 + typeof createInboundMigrationFlow 926 + >; 927 + export type OutboundMigrationFlow = ReturnType< 928 + typeof createOutboundMigrationFlow 929 + >;
+11 -3
frontend/src/lib/migration/storage.ts
··· 1 - import type { MigrationDirection, MigrationState, StoredMigrationState } from "./types"; 1 + import type { 2 + MigrationDirection, 3 + MigrationState, 4 + StoredMigrationState, 5 + } from "./types"; 2 6 3 7 const STORAGE_KEY = "tranquil_migration_state"; 4 8 const MAX_AGE_MS = 24 * 60 * 60 * 1000; ··· 9 13 direction: state.direction, 10 14 step: state.direction === "inbound" ? state.step : state.step, 11 15 startedAt: new Date().toISOString(), 12 - sourcePdsUrl: state.direction === "inbound" ? state.sourcePdsUrl : window.location.origin, 13 - targetPdsUrl: state.direction === "inbound" ? window.location.origin : state.targetPdsUrl, 16 + sourcePdsUrl: state.direction === "inbound" 17 + ? state.sourcePdsUrl 18 + : window.location.origin, 19 + targetPdsUrl: state.direction === "inbound" 20 + ? window.location.origin 21 + : state.targetPdsUrl, 14 22 sourceDid: state.direction === "inbound" ? state.sourceDid : "", 15 23 sourceHandle: state.direction === "inbound" ? state.sourceHandle : "", 16 24 targetHandle: state.targetHandle,
+4 -5
src/api/delegation.rs
··· 468 468 auth: BearerAuth, 469 469 Query(params): Query<AuditLogParams>, 470 470 ) -> Response { 471 - let limit = params.limit.min(100).max(1); 471 + let limit = params.limit.clamp(1, 100); 472 472 let offset = params.offset.max(0); 473 473 474 474 let entries = ··· 489 489 } 490 490 }; 491 491 492 - let total = match delegation::audit::count_audit_log_entries(&state.db, &auth.0.did).await { 493 - Ok(t) => t, 494 - Err(_) => 0, 495 - }; 492 + let total = delegation::audit::count_audit_log_entries(&state.db, &auth.0.did) 493 + .await 494 + .unwrap_or_default(); 496 495 497 496 Json(GetAuditLogResponse { 498 497 entries: entries
+71 -27
src/api/identity/account.rs
··· 69 69 headers: HeaderMap, 70 70 Json(input): Json<CreateAccountInput>, 71 71 ) -> Response { 72 - info!("create_account called"); 72 + let is_potential_migration = input 73 + .did 74 + .as_ref() 75 + .map(|d| d.starts_with("did:plc:")) 76 + .unwrap_or(false); 77 + if is_potential_migration { 78 + info!( 79 + "[MIGRATION] createAccount called for potential migration did={:?} handle={}", 80 + input.did, input.handle 81 + ); 82 + } else { 83 + info!("create_account called"); 84 + } 73 85 let client_ip = extract_client_ip(&headers); 74 86 if !state 75 87 .check_rate_limit(RateLimitKind::AccountCreation, &client_ip) ··· 136 148 && let (Some(provided_did), Some(auth_did)) = (input.did.as_ref(), migration_auth.as_ref()) 137 149 { 138 150 if provided_did != auth_did { 151 + info!( 152 + "[MIGRATION] createAccount: Service token mismatch - token_did={} provided_did={}", 153 + auth_did, provided_did 154 + ); 139 155 return ( 140 156 StatusCode::FORBIDDEN, 141 157 Json(json!({ ··· 148 164 if is_did_web_byod { 149 165 info!(did = %provided_did, "Processing did:web BYOD account creation"); 150 166 } else { 151 - info!(did = %provided_did, "Processing account migration"); 167 + info!( 168 + "[MIGRATION] createAccount: Service token verified, processing migration for did={}", 169 + provided_did 170 + ); 152 171 } 153 172 } 154 173 ··· 1005 1024 } 1006 1025 1007 1026 let (access_jwt, refresh_jwt) = if is_migration { 1008 - let access_meta = 1009 - match crate::auth::create_access_token_with_metadata(&did, &secret_key_bytes) { 1010 - Ok(m) => m, 1011 - Err(e) => { 1012 - error!("Error creating access token for migration: {:?}", e); 1013 - return ( 1014 - StatusCode::INTERNAL_SERVER_ERROR, 1015 - Json(json!({"error": "InternalError"})), 1016 - ) 1017 - .into_response(); 1018 - } 1019 - }; 1020 - let refresh_meta = 1021 - match crate::auth::create_refresh_token_with_metadata(&did, &secret_key_bytes) { 1022 - Ok(m) => m, 1023 - Err(e) => { 1024 - error!("Error creating refresh token for migration: {:?}", e); 1025 - return ( 1026 - StatusCode::INTERNAL_SERVER_ERROR, 1027 - Json(json!({"error": "InternalError"})), 1028 - ) 1029 - .into_response(); 1030 - } 1031 - }; 1027 + info!( 1028 + "[MIGRATION] createAccount: Creating session tokens for migration did={}", 1029 + did 1030 + ); 1031 + let access_meta = match crate::auth::create_access_token_with_metadata( 1032 + &did, 1033 + &secret_key_bytes, 1034 + ) { 1035 + Ok(m) => m, 1036 + Err(e) => { 1037 + error!( 1038 + "[MIGRATION] createAccount: Error creating access token for migration: {:?}", 1039 + e 1040 + ); 1041 + return ( 1042 + StatusCode::INTERNAL_SERVER_ERROR, 1043 + Json(json!({"error": "InternalError"})), 1044 + ) 1045 + .into_response(); 1046 + } 1047 + }; 1048 + let refresh_meta = match crate::auth::create_refresh_token_with_metadata( 1049 + &did, 1050 + &secret_key_bytes, 1051 + ) { 1052 + Ok(m) => m, 1053 + Err(e) => { 1054 + error!( 1055 + "[MIGRATION] createAccount: Error creating refresh token for migration: {:?}", 1056 + e 1057 + ); 1058 + return ( 1059 + StatusCode::INTERNAL_SERVER_ERROR, 1060 + Json(json!({"error": "InternalError"})), 1061 + ) 1062 + .into_response(); 1063 + } 1064 + }; 1032 1065 if let Err(e) = sqlx::query!( 1033 1066 "INSERT INTO session_tokens (did, access_jti, refresh_jti, access_expires_at, refresh_expires_at) VALUES ($1, $2, $3, $4, $5)", 1034 1067 did, ··· 1040 1073 .execute(&state.db) 1041 1074 .await 1042 1075 { 1043 - error!("Error creating session for migration: {:?}", e); 1076 + error!("[MIGRATION] createAccount: Error creating session for migration: {:?}", e); 1044 1077 return ( 1045 1078 StatusCode::INTERNAL_SERVER_ERROR, 1046 1079 Json(json!({"error": "InternalError"})), 1047 1080 ) 1048 1081 .into_response(); 1049 1082 } 1083 + info!( 1084 + "[MIGRATION] createAccount: Session created successfully for did={}", 1085 + did 1086 + ); 1050 1087 (Some(access_meta.token), Some(refresh_meta.token)) 1051 1088 } else { 1052 1089 (None, None) 1053 1090 }; 1091 + 1092 + if is_migration { 1093 + info!( 1094 + "[MIGRATION] createAccount: SUCCESS - Account ready for migration did={} handle={}", 1095 + did, handle 1096 + ); 1097 + } 1054 1098 1055 1099 ( 1056 1100 StatusCode::OK,
+50 -8
src/api/identity/plc/submit.rs
··· 23 23 headers: axum::http::HeaderMap, 24 24 Json(input): Json<SubmitPlcOperationInput>, 25 25 ) -> Response { 26 + info!("[MIGRATION] submitPlcOperation called"); 26 27 let bearer = match crate::auth::extract_bearer_token_from_header( 27 28 headers.get("Authorization").and_then(|h| h.to_str().ok()), 28 29 ) { 29 30 Some(t) => t, 30 - None => return ApiError::AuthenticationRequired.into_response(), 31 + None => { 32 + info!("[MIGRATION] submitPlcOperation: No bearer token"); 33 + return ApiError::AuthenticationRequired.into_response(); 34 + } 31 35 }; 32 36 let auth_user = 33 37 match crate::auth::validate_bearer_token_allow_deactivated(&state.db, &bearer).await { 34 38 Ok(user) => user, 35 - Err(e) => return ApiError::from(e).into_response(), 39 + Err(e) => { 40 + info!("[MIGRATION] submitPlcOperation: Auth failed: {:?}", e); 41 + return ApiError::from(e).into_response(); 42 + } 36 43 }; 44 + info!( 45 + "[MIGRATION] submitPlcOperation: Authenticated user did={}", 46 + auth_user.did 47 + ); 37 48 if let Err(e) = crate::auth::scope_check::check_identity_scope( 38 49 auth_user.is_oauth, 39 50 auth_user.scope.as_deref(), 40 51 crate::oauth::scopes::IdentityAttr::Wildcard, 41 52 ) { 53 + info!("[MIGRATION] submitPlcOperation: Scope check failed"); 42 54 return e; 43 55 } 44 56 let did = &auth_user.did; ··· 188 200 let plc_client = PlcClient::new(None); 189 201 let operation_clone = input.operation.clone(); 190 202 let did_clone = did.clone(); 203 + info!( 204 + "[MIGRATION] submitPlcOperation: Sending operation to PLC directory for did={}", 205 + did 206 + ); 207 + let plc_start = std::time::Instant::now(); 191 208 let result: Result<(), CircuitBreakerError<PlcError>> = 192 209 with_circuit_breaker(&state.circuit_breakers.plc_directory, || async { 193 210 plc_client ··· 196 213 }) 197 214 .await; 198 215 match result { 199 - Ok(()) => {} 216 + Ok(()) => { 217 + info!( 218 + "[MIGRATION] submitPlcOperation: PLC directory accepted operation in {:?}", 219 + plc_start.elapsed() 220 + ); 221 + } 200 222 Err(CircuitBreakerError::CircuitOpen(e)) => { 201 - warn!("PLC directory circuit breaker open: {}", e); 223 + warn!( 224 + "[MIGRATION] submitPlcOperation: PLC directory circuit breaker open: {}", 225 + e 226 + ); 202 227 return ( 203 228 StatusCode::SERVICE_UNAVAILABLE, 204 229 Json(json!({ ··· 209 234 .into_response(); 210 235 } 211 236 Err(CircuitBreakerError::OperationFailed(e)) => { 212 - error!("Failed to submit PLC operation: {:?}", e); 237 + error!( 238 + "[MIGRATION] submitPlcOperation: PLC operation failed: {:?}", 239 + e 240 + ); 213 241 return ( 214 242 StatusCode::BAD_GATEWAY, 215 243 Json(json!({ ··· 220 248 .into_response(); 221 249 } 222 250 } 251 + info!( 252 + "[MIGRATION] submitPlcOperation: Sequencing identity event for did={}", 253 + did 254 + ); 223 255 match sqlx::query!( 224 256 "INSERT INTO repo_seq (did, event_type) VALUES ($1, 'identity') RETURNING seq", 225 257 did ··· 228 260 .await 229 261 { 230 262 Ok(row) => { 263 + info!( 264 + "[MIGRATION] submitPlcOperation: Identity event sequenced with seq={}", 265 + row.seq 266 + ); 231 267 if let Err(e) = sqlx::query(&format!("NOTIFY repo_updates, '{}'", row.seq)) 232 268 .execute(&state.db) 233 269 .await 234 270 { 235 - warn!("Failed to notify identity event: {:?}", e); 271 + warn!( 272 + "[MIGRATION] submitPlcOperation: Failed to notify identity event: {:?}", 273 + e 274 + ); 236 275 } 237 276 } 238 277 Err(e) => { 239 - warn!("Failed to sequence identity event: {:?}", e); 278 + warn!( 279 + "[MIGRATION] submitPlcOperation: Failed to sequence identity event: {:?}", 280 + e 281 + ); 240 282 } 241 283 } 242 - info!("Submitted PLC operation for user {}", did); 284 + info!("[MIGRATION] submitPlcOperation: SUCCESS for did={}", did); 243 285 (StatusCode::OK, Json(json!({}))).into_response() 244 286 }
+22 -36
src/api/repo/blob.rs
··· 1 1 use crate::auth::{ServiceTokenVerifier, is_service_token}; 2 2 use crate::delegation::{self, DelegationActionType}; 3 3 use crate::state::AppState; 4 + use crate::sync::import::find_blob_refs_ipld; 4 5 use axum::body::Bytes; 5 6 use axum::{ 6 7 Json, ··· 9 10 response::{IntoResponse, Response}, 10 11 }; 11 12 use cid::Cid; 13 + use ipld_core::ipld::Ipld; 12 14 use jacquard_repo::storage::BlockStore; 13 15 use multihash::Multihash; 14 16 use serde::{Deserialize, Serialize}; 15 17 use serde_json::json; 16 18 use sha2::{Digest, Sha256}; 17 19 use std::str::FromStr; 18 - use tracing::{debug, error}; 20 + use tracing::{debug, error, warn}; 19 21 20 22 const MAX_BLOB_SIZE: usize = 10_000_000_000; 21 23 const MAX_VIDEO_BLOB_SIZE: usize = 10_000_000_000; ··· 258 260 pub blobs: Vec<RecordBlob>, 259 261 } 260 262 261 - fn find_blobs(val: &serde_json::Value, blobs: &mut Vec<String>) { 262 - if let Some(obj) = val.as_object() { 263 - if let Some(type_val) = obj.get("$type") 264 - && type_val == "blob" 265 - && let Some(r) = obj.get("ref") 266 - && let Some(link) = r.get("$link") 267 - && let Some(s) = link.as_str() 268 - { 269 - blobs.push(s.to_string()); 270 - } 271 - for (_, v) in obj { 272 - find_blobs(v, blobs); 273 - } 274 - } else if let Some(arr) = val.as_array() { 275 - for v in arr { 276 - find_blobs(v, blobs); 277 - } 278 - } 279 - } 280 - 281 263 pub async fn list_missing_blobs( 282 264 State(state): State<AppState>, 283 265 headers: axum::http::HeaderMap, ··· 295 277 .into_response(); 296 278 } 297 279 }; 298 - let auth_user = match crate::auth::validate_bearer_token_allow_deactivated(&state.db, &token).await { 299 - Ok(user) => user, 300 - Err(_) => { 301 - return ( 302 - StatusCode::UNAUTHORIZED, 303 - Json(json!({"error": "AuthenticationFailed"})), 304 - ) 305 - .into_response(); 306 - } 307 - }; 280 + let auth_user = 281 + match crate::auth::validate_bearer_token_allow_deactivated(&state.db, &token).await { 282 + Ok(user) => user, 283 + Err(_) => { 284 + return ( 285 + StatusCode::UNAUTHORIZED, 286 + Json(json!({"error": "AuthenticationFailed"})), 287 + ) 288 + .into_response(); 289 + } 290 + }; 308 291 let did = auth_user.did; 309 292 let user_query = sqlx::query!("SELECT id FROM users WHERE did = $1", did) 310 293 .fetch_optional(&state.db) ··· 362 345 Ok(Some(b)) => b, 363 346 _ => continue, 364 347 }; 365 - let record_val: serde_json::Value = match serde_ipld_dagcbor::from_slice(&block_bytes) { 348 + let record_ipld: Ipld = match serde_ipld_dagcbor::from_slice(&block_bytes) { 366 349 Ok(v) => v, 367 - Err(_) => continue, 350 + Err(e) => { 351 + warn!("Failed to parse record {} as IPLD: {:?}", record_cid_str, e); 352 + continue; 353 + } 368 354 }; 369 - let mut blobs = Vec::new(); 370 - find_blobs(&record_val, &mut blobs); 371 - for blob_cid_str in blobs { 355 + let blob_refs = find_blob_refs_ipld(&record_ipld, 0); 356 + for blob_ref in blob_refs { 357 + let blob_cid_str = blob_ref.cid; 372 358 let exists = sqlx::query!( 373 359 "SELECT 1 as one FROM blobs WHERE cid = $1 AND created_by_user = $2", 374 360 blob_cid_str,
+15 -15
src/api/repo/import.rs
··· 350 350 .into_response(); 351 351 } 352 352 }; 353 - let key_bytes = match crate::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version) { 354 - Ok(k) => k, 355 - Err(e) => { 356 - error!("Failed to decrypt signing key: {}", e); 357 - return ( 358 - StatusCode::INTERNAL_SERVER_ERROR, 359 - Json(json!({"error": "InternalError"})), 360 - ) 361 - .into_response(); 362 - } 363 - }; 353 + let key_bytes = 354 + match crate::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version) { 355 + Ok(k) => k, 356 + Err(e) => { 357 + error!("Failed to decrypt signing key: {}", e); 358 + return ( 359 + StatusCode::INTERNAL_SERVER_ERROR, 360 + Json(json!({"error": "InternalError"})), 361 + ) 362 + .into_response(); 363 + } 364 + }; 364 365 let signing_key = match SigningKey::from_slice(&key_bytes) { 365 366 Ok(k) => k, 366 367 Err(e) => { ··· 422 423 "Created new commit for imported repo: cid={}, rev={}", 423 424 new_root_str, new_rev_str 424 425 ); 425 - if !is_migration { 426 - if let Err(e) = sequence_import_event(&state, did, &new_root_str).await { 427 - warn!("Failed to sequence import event: {:?}", e); 428 - } 426 + if !is_migration && let Err(e) = sequence_import_event(&state, did, &new_root_str).await 427 + { 428 + warn!("Failed to sequence import event: {:?}", e); 429 429 } 430 430 (StatusCode::OK, Json(json!({}))).into_response() 431 431 }
+3 -5
src/api/repo/record/read.rs
··· 11 11 use ipld_core::ipld::Ipld; 12 12 use jacquard_repo::storage::BlockStore; 13 13 use serde::{Deserialize, Serialize}; 14 - use serde_json::{json, Map, Value}; 14 + use serde_json::{Map, Value, json}; 15 15 use std::collections::HashMap; 16 16 use std::str::FromStr; 17 17 use tracing::{error, info}; ··· 37 37 } 38 38 Ipld::List(arr) => Value::Array(arr.into_iter().map(ipld_to_json).collect()), 39 39 Ipld::Map(map) => { 40 - let obj: Map<String, Value> = map 41 - .into_iter() 42 - .map(|(k, v)| (k, ipld_to_json(v))) 43 - .collect(); 40 + let obj: Map<String, Value> = 41 + map.into_iter().map(|(k, v)| (k, ipld_to_json(v))).collect(); 44 42 Value::Object(obj) 45 43 } 46 44 Ipld::Link(cid) => json!({ "$link": cid.to_string() }),
+6 -7
src/api/repo/record/utils.rs
··· 5 5 use jacquard_repo::commit::Commit; 6 6 use jacquard_repo::storage::BlockStore; 7 7 use k256::ecdsa::SigningKey; 8 - use serde_json::{json, Value}; 8 + use serde_json::{Value, json}; 9 9 use std::str::FromStr; 10 10 use uuid::Uuid; 11 11 ··· 18 18 fn extract_blob_cids_recursive(value: &Value, blobs: &mut Vec<String>) { 19 19 match value { 20 20 Value::Object(map) => { 21 - if map.get("$type").and_then(|v| v.as_str()) == Some("blob") { 22 - if let Some(ref_obj) = map.get("ref") { 23 - if let Some(link) = ref_obj.get("$link").and_then(|v| v.as_str()) { 24 - blobs.push(link.to_string()); 25 - } 26 - } 21 + if map.get("$type").and_then(|v| v.as_str()) == Some("blob") 22 + && let Some(ref_obj) = map.get("ref") 23 + && let Some(link) = ref_obj.get("$link").and_then(|v| v.as_str()) 24 + { 25 + blobs.push(link.to_string()); 27 26 } 28 27 for v in map.values() { 29 28 extract_blob_cids_recursive(v, blobs);
+94 -21
src/api/server/account_status.rs
··· 8 8 response::{IntoResponse, Response}, 9 9 }; 10 10 use bcrypt::verify; 11 - use cid::Cid; 12 11 use chrono::{Duration, Utc}; 12 + use cid::Cid; 13 13 use jacquard_repo::commit::Commit; 14 14 use jacquard_repo::storage::BlockStore; 15 15 use k256::ecdsa::SigningKey; ··· 216 216 })?; 217 217 218 218 if let Some(row) = user_row { 219 - let key_bytes = 220 - crate::config::decrypt_key(&row.key_bytes, row.encryption_version).map_err(|e| { 219 + let key_bytes = crate::config::decrypt_key(&row.key_bytes, row.encryption_version) 220 + .map_err(|e| { 221 221 error!("Failed to decrypt user key: {}", e); 222 222 ( 223 223 StatusCode::INTERNAL_SERVER_ERROR, ··· 247 247 )); 248 248 } 249 249 } 250 - } else if did.starts_with("did:web:") { 250 + } else if let Some(host_and_path) = did.strip_prefix("did:web:") { 251 251 let client = reqwest::Client::new(); 252 - let host_and_path = &did[8..]; 253 252 let decoded = host_and_path.replace("%3A", ":"); 254 253 let parts: Vec<&str> = decoded.split(':').collect(); 255 - let (host, path_parts) = if parts.len() > 1 && parts[1].chars().all(|c| c.is_ascii_digit()) { 254 + let (host, path_parts) = if parts.len() > 1 && parts[1].chars().all(|c| c.is_ascii_digit()) 255 + { 256 256 (format!("{}:{}", parts[0], parts[1]), parts[2..].to_vec()) 257 257 } else { 258 258 (parts[0].to_string(), parts[1..].to_vec()) 259 259 }; 260 - let scheme = if host.starts_with("localhost") || host.starts_with("127.") || host.contains(':') { 261 - "http" 262 - } else { 263 - "https" 264 - }; 260 + let scheme = 261 + if host.starts_with("localhost") || host.starts_with("127.") || host.contains(':') { 262 + "http" 263 + } else { 264 + "https" 265 + }; 265 266 let url = if path_parts.is_empty() { 266 267 format!("{}://{}/.well-known/did.json", scheme, host) 267 268 } else { ··· 323 324 State(state): State<AppState>, 324 325 headers: axum::http::HeaderMap, 325 326 ) -> Response { 327 + info!("[MIGRATION] activateAccount called"); 326 328 let extracted = match crate::auth::extract_auth_token_from_header( 327 329 headers.get("Authorization").and_then(|h| h.to_str().ok()), 328 330 ) { 329 331 Some(t) => t, 330 - None => return ApiError::AuthenticationRequired.into_response(), 332 + None => { 333 + info!("[MIGRATION] activateAccount: No auth token"); 334 + return ApiError::AuthenticationRequired.into_response(); 335 + } 331 336 }; 332 337 let dpop_proof = headers.get("DPoP").and_then(|h| h.to_str().ok()); 333 338 let http_uri = format!( ··· 346 351 .await 347 352 { 348 353 Ok(user) => user, 349 - Err(e) => return ApiError::from(e).into_response(), 354 + Err(e) => { 355 + info!("[MIGRATION] activateAccount: Auth failed: {:?}", e); 356 + return ApiError::from(e).into_response(); 357 + } 350 358 }; 359 + info!( 360 + "[MIGRATION] activateAccount: Authenticated user did={}", 361 + auth_user.did 362 + ); 351 363 352 364 if let Err(e) = crate::auth::scope_check::check_account_scope( 353 365 auth_user.is_oauth, ··· 355 367 crate::oauth::scopes::AccountAttr::Repo, 356 368 crate::oauth::scopes::AccountAction::Manage, 357 369 ) { 370 + info!("[MIGRATION] activateAccount: Scope check failed"); 358 371 return e; 359 372 } 360 373 361 374 let did = auth_user.did; 362 375 376 + info!( 377 + "[MIGRATION] activateAccount: Validating DID document for did={}", 378 + did 379 + ); 380 + let did_validation_start = std::time::Instant::now(); 363 381 if let Err((status, json)) = assert_valid_did_document_for_service(&state.db, &did).await { 364 382 info!( 365 - "activateAccount rejected for {}: DID document validation failed", 366 - did 383 + "[MIGRATION] activateAccount: DID document validation FAILED for {} (took {:?})", 384 + did, 385 + did_validation_start.elapsed() 367 386 ); 368 387 return (status, json).into_response(); 369 388 } 389 + info!( 390 + "[MIGRATION] activateAccount: DID document validation SUCCESS for {} (took {:?})", 391 + did, 392 + did_validation_start.elapsed() 393 + ); 370 394 371 395 let handle = sqlx::query_scalar!("SELECT handle FROM users WHERE did = $1", did) 372 396 .fetch_optional(&state.db) 373 397 .await 374 398 .ok() 375 399 .flatten(); 400 + info!( 401 + "[MIGRATION] activateAccount: Activating account did={} handle={:?}", 402 + did, handle 403 + ); 376 404 let result = sqlx::query!("UPDATE users SET deactivated_at = NULL WHERE did = $1", did) 377 405 .execute(&state.db) 378 406 .await; 379 407 match result { 380 408 Ok(_) => { 409 + info!( 410 + "[MIGRATION] activateAccount: DB update success for did={}", 411 + did 412 + ); 381 413 if let Some(ref h) = handle { 382 414 let _ = state.cache.delete(&format!("handle:{}", h)).await; 383 415 } 416 + info!( 417 + "[MIGRATION] activateAccount: Sequencing account event (active=true) for did={}", 418 + did 419 + ); 384 420 if let Err(e) = 385 421 crate::api::repo::record::sequence_account_event(&state, &did, true, None).await 386 422 { 387 - warn!("Failed to sequence account activation event: {}", e); 423 + warn!( 424 + "[MIGRATION] activateAccount: Failed to sequence account activation event: {}", 425 + e 426 + ); 427 + } else { 428 + info!("[MIGRATION] activateAccount: Account event sequenced successfully"); 388 429 } 430 + info!( 431 + "[MIGRATION] activateAccount: Sequencing identity event for did={} handle={:?}", 432 + did, handle 433 + ); 389 434 if let Err(e) = 390 435 crate::api::repo::record::sequence_identity_event(&state, &did, handle.as_deref()) 391 436 .await 392 437 { 393 - warn!("Failed to sequence identity event for activation: {}", e); 438 + warn!( 439 + "[MIGRATION] activateAccount: Failed to sequence identity event for activation: {}", 440 + e 441 + ); 442 + } else { 443 + info!("[MIGRATION] activateAccount: Identity event sequenced successfully"); 394 444 } 395 445 let repo_root = sqlx::query_scalar!( 396 446 "SELECT r.repo_root_cid FROM repos r JOIN users u ON r.user_id = u.id WHERE u.did = $1", ··· 401 451 .ok() 402 452 .flatten(); 403 453 if let Some(root_cid) = repo_root { 454 + info!( 455 + "[MIGRATION] activateAccount: Sequencing sync event for did={} root_cid={}", 456 + did, root_cid 457 + ); 404 458 let rev = if let Ok(cid) = Cid::from_str(&root_cid) { 405 459 if let Ok(Some(block)) = state.block_store.get(&cid).await { 406 460 Commit::from_cbor(&block).ok().map(|c| c.rev().to_string()) ··· 410 464 } else { 411 465 None 412 466 }; 413 - if let Err(e) = 414 - crate::api::repo::record::sequence_sync_event(&state, &did, &root_cid, rev.as_deref()).await 467 + if let Err(e) = crate::api::repo::record::sequence_sync_event( 468 + &state, 469 + &did, 470 + &root_cid, 471 + rev.as_deref(), 472 + ) 473 + .await 415 474 { 416 - warn!("Failed to sequence sync event for activation: {}", e); 475 + warn!( 476 + "[MIGRATION] activateAccount: Failed to sequence sync event for activation: {}", 477 + e 478 + ); 479 + } else { 480 + info!("[MIGRATION] activateAccount: Sync event sequenced successfully"); 417 481 } 482 + } else { 483 + warn!( 484 + "[MIGRATION] activateAccount: No repo root found for did={}", 485 + did 486 + ); 418 487 } 488 + info!("[MIGRATION] activateAccount: SUCCESS for did={}", did); 419 489 (StatusCode::OK, Json(json!({}))).into_response() 420 490 } 421 491 Err(e) => { 422 - error!("DB error activating account: {:?}", e); 492 + error!( 493 + "[MIGRATION] activateAccount: DB error activating account: {:?}", 494 + e 495 + ); 423 496 ( 424 497 StatusCode::INTERNAL_SERVER_ERROR, 425 498 Json(json!({"error": "InternalError"})),
+3 -3
src/api/server/mod.rs
··· 26 26 pub use invite::{create_invite_code, create_invite_codes, get_account_invite_codes}; 27 27 pub use logo::get_logo; 28 28 pub use meta::{describe_server, health, robots_txt}; 29 + pub use migration::{ 30 + clear_migration_forwarding, get_migration_status, update_migration_forwarding, 31 + }; 29 32 pub use passkey_account::{ 30 33 complete_passkey_setup, create_passkey_account, recover_passkey_account, 31 34 request_passkey_recovery, start_passkey_registration_for_setup, ··· 56 59 pub use trusted_devices::{ 57 60 extend_device_trust, is_device_trusted, list_trusted_devices, revoke_trusted_device, 58 61 trust_device, update_trusted_device, 59 - }; 60 - pub use migration::{ 61 - clear_migration_forwarding, get_migration_status, update_migration_forwarding, 62 62 }; 63 63 pub use verify_email::{resend_migration_verification, verify_migration_email}; 64 64 pub use verify_token::{VerifyTokenInput, VerifyTokenOutput, verify_token, verify_token_internal};
+1 -5
src/api/server/verify_token.rs
··· 1 - use axum::{ 2 - Json, 3 - extract::State, 4 - http::StatusCode, 5 - }; 1 + use axum::{Json, extract::State, http::StatusCode}; 6 2 use serde::{Deserialize, Serialize}; 7 3 use serde_json::json; 8 4 use tracing::{error, info, warn};
+13 -16
src/auth/mod.rs
··· 229 229 .ok() 230 230 .flatten(); 231 231 232 - match session_row { 233 - Some(row) => { 234 - if row.access_expires_at > chrono::Utc::now() { 235 - session_valid = true; 236 - if let Some(c) = cache { 237 - let _ = c 238 - .set( 239 - &session_cache_key, 240 - "1", 241 - Duration::from_secs(SESSION_CACHE_TTL_SECS), 242 - ) 243 - .await; 244 - } 245 - } else { 246 - return Err(TokenValidationError::TokenExpired); 232 + if let Some(row) = session_row { 233 + if row.access_expires_at > chrono::Utc::now() { 234 + session_valid = true; 235 + if let Some(c) = cache { 236 + let _ = c 237 + .set( 238 + &session_cache_key, 239 + "1", 240 + Duration::from_secs(SESSION_CACHE_TTL_SECS), 241 + ) 242 + .await; 247 243 } 244 + } else { 245 + return Err(TokenValidationError::TokenExpired); 248 246 } 249 - None => {} 250 247 } 251 248 } 252 249
+2 -3
src/comms/sender.rs
··· 1 1 use async_trait::async_trait; 2 - use base64::{engine::general_purpose::STANDARD as BASE64, Engine}; 2 + use base64::{Engine, engine::general_purpose::STANDARD as BASE64}; 3 3 use reqwest::Client; 4 4 use serde_json::json; 5 5 use std::process::Stdio; ··· 103 103 } 104 104 105 105 pub fn format_email(&self, notification: &QueuedComms) -> String { 106 - let subject = 107 - mime_encode_header(notification.subject.as_deref().unwrap_or("Notification")); 106 + let subject = mime_encode_header(notification.subject.as_deref().unwrap_or("Notification")); 108 107 let recipient = sanitize_header_value(&notification.recipient); 109 108 let from_header = if self.from_name.is_empty() { 110 109 self.from_address.clone()
+9 -11
src/delegation/scopes.rs
··· 75 75 } 76 76 77 77 fn find_matching_scope<'a>(requested: &str, granted: &HashSet<&'a str>) -> Option<&'a str> { 78 - for granted_scope in granted { 79 - if scopes_compatible(granted_scope, requested) { 80 - return Some(granted_scope); 81 - } 82 - } 83 - None 78 + granted 79 + .iter() 80 + .find(|&granted_scope| scopes_compatible(granted_scope, requested)) 81 + .map(|v| v as _) 84 82 } 85 83 86 84 fn scopes_compatible(granted: &str, requested: &str) -> bool { ··· 97 95 return true; 98 96 } 99 97 100 - if granted_base.ends_with(".*") { 101 - let prefix = &granted_base[..granted_base.len() - 2]; 102 - if requested_base.starts_with(prefix) && requested_base.len() > prefix.len() { 103 - return true; 104 - } 98 + if let Some(prefix) = granted_base.strip_suffix(".*") 99 + && requested_base.starts_with(prefix) 100 + && requested_base.len() > prefix.len() 101 + { 102 + return true; 105 103 } 106 104 107 105 false
+75 -66
src/moderation/mod.rs
··· 1 1 /* 2 - * CONTENT WARNING 3 - * 4 - * This file contains explicit slurs and hateful language. We're sorry you have to see them. 5 - * 6 - * These words exist here for one reason: to ensure our moderation system correctly blocks them. 7 - * We can't verify the filter catches the n-word without testing against the actual word. 8 - * Euphemisms wouldn't prove the protection works. 9 - * 10 - * If reading this file has caused you distress, please know: 11 - * - you are valued and welcome in this community 12 - * - these words do not reflect the views of this project or its contributors 13 - * - we maintain this code precisely because we believe everyone deserves an experience on the web that is free from this kinda language 2 + * CONTENT WARNING 3 + * 4 + * This file contains explicit slurs and hateful language. We're sorry you have to see them. 5 + * 6 + * These words exist here for one reason: to ensure our moderation system correctly blocks them. 7 + * We can't verify the filter catches the n-word without testing against the actual word. 8 + * Euphemisms wouldn't prove the protection works. 9 + * 10 + * If reading this file has caused you distress, please know: 11 + * - you are valued and welcome in this community 12 + * - these words do not reflect the views of this project or its contributors 13 + * - we maintain this code precisely because we believe everyone deserves an experience on the web that is free from this kinda language 14 14 */ 15 15 16 16 use regex::Regex; ··· 70 70 71 71 fn has_explicit_slur_with_extra_words(text: &str, extra_words: &[String]) -> bool { 72 72 let text_lower = text.to_lowercase(); 73 - let normalized = text_lower.replace('.', "").replace('-', "").replace('_', ""); 73 + let normalized = text_lower.replace(['.', '-', '_'], ""); 74 74 let stripped = strip_trailing_digits(&text_lower); 75 75 let normalized_stripped = strip_trailing_digits(&normalized); 76 76 ··· 104 104 #[cfg(test)] 105 105 mod tests { 106 106 use super::*; 107 + use base64::Engine; 108 + 109 + fn d(b64: &str) -> String { 110 + let bytes = base64::engine::general_purpose::STANDARD.decode(b64).unwrap(); 111 + String::from_utf8(bytes).unwrap() 112 + } 107 113 108 114 #[test] 109 115 fn test_chink_pattern() { 110 - assert!(has_explicit_slur("chink")); 111 - assert!(has_explicit_slur("chinks")); 112 - assert!(has_explicit_slur("CHINK")); 113 - assert!(has_explicit_slur("Chinks")); 116 + assert!(has_explicit_slur(&d("Y2hpbms="))); 117 + assert!(has_explicit_slur(&d("Y2hpbmtz"))); 118 + assert!(has_explicit_slur(&d("Q0hJTks="))); 119 + assert!(has_explicit_slur(&d("Q2hpbmtz"))); 114 120 } 115 121 116 122 #[test] 117 123 fn test_coon_pattern() { 118 - assert!(has_explicit_slur("coon")); 119 - assert!(has_explicit_slur("coons")); 120 - assert!(has_explicit_slur("COON")); 124 + assert!(has_explicit_slur(&d("Y29vbg=="))); 125 + assert!(has_explicit_slur(&d("Y29vbnM="))); 126 + assert!(has_explicit_slur(&d("Q09PTg=="))); 121 127 } 122 128 123 129 #[test] 124 130 fn test_fag_pattern() { 125 - assert!(has_explicit_slur("fag")); 126 - assert!(has_explicit_slur("fags")); 127 - assert!(has_explicit_slur("faggot")); 128 - assert!(has_explicit_slur("faggots")); 129 - assert!(has_explicit_slur("faggotry")); 131 + assert!(has_explicit_slur(&d("ZmFn"))); 132 + assert!(has_explicit_slur(&d("ZmFncw=="))); 133 + assert!(has_explicit_slur(&d("ZmFnZ290"))); 134 + assert!(has_explicit_slur(&d("ZmFnZ290cw=="))); 135 + assert!(has_explicit_slur(&d("ZmFnZ290cnk="))); 130 136 } 131 137 132 138 #[test] 133 139 fn test_kike_pattern() { 134 - assert!(has_explicit_slur("kike")); 135 - assert!(has_explicit_slur("kikes")); 136 - assert!(has_explicit_slur("KIKE")); 137 - assert!(has_explicit_slur("kikery")); 140 + assert!(has_explicit_slur(&d("a2lrZQ=="))); 141 + assert!(has_explicit_slur(&d("a2lrZXM="))); 142 + assert!(has_explicit_slur(&d("S0lLRQ=="))); 143 + assert!(has_explicit_slur(&d("a2lrZXJ5"))); 138 144 } 139 145 140 146 #[test] 141 147 fn test_nigger_pattern() { 142 - assert!(has_explicit_slur("nigger")); 143 - assert!(has_explicit_slur("niggers")); 144 - assert!(has_explicit_slur("NIGGER")); 145 - assert!(has_explicit_slur("nigga")); 146 - assert!(has_explicit_slur("niggas")); 148 + assert!(has_explicit_slur(&d("bmlnZ2Vy"))); 149 + assert!(has_explicit_slur(&d("bmlnZ2Vycw=="))); 150 + assert!(has_explicit_slur(&d("TklHR0VS"))); 151 + assert!(has_explicit_slur(&d("bmlnZ2E="))); 152 + assert!(has_explicit_slur(&d("bmlnZ2Fz"))); 147 153 } 148 154 149 155 #[test] 150 156 fn test_tranny_pattern() { 151 - assert!(has_explicit_slur("tranny")); 152 - assert!(has_explicit_slur("trannies")); 153 - assert!(has_explicit_slur("TRANNY")); 157 + assert!(has_explicit_slur(&d("dHJhbm55"))); 158 + assert!(has_explicit_slur(&d("dHJhbm5pZXM="))); 159 + assert!(has_explicit_slur(&d("VFJBTk5Z"))); 154 160 } 155 161 156 162 #[test] 157 163 fn test_normalization_bypass() { 158 - assert!(has_explicit_slur("n.i.g.g.e.r")); 159 - assert!(has_explicit_slur("n-i-g-g-e-r")); 160 - assert!(has_explicit_slur("n_i_g_g_e_r")); 161 - assert!(has_explicit_slur("f.a.g")); 162 - assert!(has_explicit_slur("f-a-g")); 163 - assert!(has_explicit_slur("c.h.i.n.k")); 164 - assert!(has_explicit_slur("k_i_k_e")); 164 + assert!(has_explicit_slur(&d("bi5pLmcuZy5lLnI="))); 165 + assert!(has_explicit_slur(&d("bi1pLWctZy1lLXI="))); 166 + assert!(has_explicit_slur(&d("bl9pX2dfZ19lX3I="))); 167 + assert!(has_explicit_slur(&d("Zi5hLmc="))); 168 + assert!(has_explicit_slur(&d("Zi1hLWc="))); 169 + assert!(has_explicit_slur(&d("Yy5oLmkubi5r"))); 170 + assert!(has_explicit_slur(&d("a19pX2tfZQ=="))); 165 171 } 166 172 167 173 #[test] 168 174 fn test_trailing_digits_bypass() { 169 - assert!(has_explicit_slur("faggot123")); 170 - assert!(has_explicit_slur("nigger69")); 171 - assert!(has_explicit_slur("chink420")); 172 - assert!(has_explicit_slur("fag1")); 173 - assert!(has_explicit_slur("kike2024")); 174 - assert!(has_explicit_slur("n_i_g_g_e_r123")); 175 + assert!(has_explicit_slur(&d("ZmFnZ290MTIz"))); 176 + assert!(has_explicit_slur(&d("bmlnZ2VyNjk="))); 177 + assert!(has_explicit_slur(&d("Y2hpbms0MjA="))); 178 + assert!(has_explicit_slur(&d("ZmFnMQ=="))); 179 + assert!(has_explicit_slur(&d("a2lrZTIwMjQ="))); 180 + assert!(has_explicit_slur(&d("bl9pX2dfZ19lX3IxMjM="))); 175 181 } 176 182 177 183 #[test] 178 184 fn test_embedded_in_sentence() { 179 - assert!(has_explicit_slur("you are a faggot")); 180 - assert!(has_explicit_slur("stupid nigger")); 181 - assert!(has_explicit_slur("go away chink")); 185 + assert!(has_explicit_slur(&d("eW91IGFyZSBhIGZhZ2dvdA=="))); 186 + assert!(has_explicit_slur(&d("c3R1cGlkIG5pZ2dlcg=="))); 187 + assert!(has_explicit_slur(&d("Z28gYXdheSBjaGluaw=="))); 182 188 } 183 189 184 190 #[test] ··· 210 216 211 217 #[test] 212 218 fn test_case_insensitive() { 213 - assert!(has_explicit_slur("NIGGER")); 214 - assert!(has_explicit_slur("Nigger")); 215 - assert!(has_explicit_slur("NiGgEr")); 216 - assert!(has_explicit_slur("FAGGOT")); 217 - assert!(has_explicit_slur("Faggot")); 219 + assert!(has_explicit_slur(&d("TklHR0VS"))); 220 + assert!(has_explicit_slur(&d("TmlnZ2Vy"))); 221 + assert!(has_explicit_slur(&d("TmlHZ0Vy"))); 222 + assert!(has_explicit_slur(&d("RkFHR09U"))); 223 + assert!(has_explicit_slur(&d("RmFnZ290"))); 218 224 } 219 225 220 226 #[test] 221 227 fn test_leetspeak_bypass() { 222 - assert!(has_explicit_slur("f4ggot")); 223 - assert!(has_explicit_slur("f4gg0t")); 224 - assert!(has_explicit_slur("n1gger")); 225 - assert!(has_explicit_slur("n1gg3r")); 226 - assert!(has_explicit_slur("k1ke")); 227 - assert!(has_explicit_slur("ch1nk")); 228 - assert!(has_explicit_slur("tr4nny")); 228 + assert!(has_explicit_slur(&d("ZjRnZ290"))); 229 + assert!(has_explicit_slur(&d("ZjRnZzB0"))); 230 + assert!(has_explicit_slur(&d("bjFnZ2Vy"))); 231 + assert!(has_explicit_slur(&d("bjFnZzNy"))); 232 + assert!(has_explicit_slur(&d("azFrZQ=="))); 233 + assert!(has_explicit_slur(&d("Y2gxbms="))); 234 + assert!(has_explicit_slur(&d("dHI0bm55"))); 229 235 } 230 236 231 237 #[test] ··· 253 259 assert!(has_explicit_slur_with_extra_words("b4dw0rd", &extra)); 254 260 assert!(has_explicit_slur_with_extra_words("b4dw0rd789", &extra)); 255 261 assert!(has_explicit_slur_with_extra_words("b.4.d.w.0.r.d", &extra)); 256 - assert!(has_explicit_slur_with_extra_words("this contains badword here", &extra)); 262 + assert!(has_explicit_slur_with_extra_words( 263 + "this contains badword here", 264 + &extra 265 + )); 257 266 assert!(has_explicit_slur_with_extra_words("0ff3n$1v3", &extra)); 258 267 259 268 assert!(!has_explicit_slur_with_extra_words("goodword", &extra));
+12 -9
src/oauth/endpoints/delegation.rs
··· 88 88 } 89 89 }; 90 90 91 - if let Err(_) = db::set_request_did(&state.db, &form.request_uri, &delegated_did).await { 91 + if db::set_request_did(&state.db, &form.request_uri, &delegated_did) 92 + .await 93 + .is_err() 94 + { 92 95 tracing::warn!("Failed to set delegated DID on authorization request"); 93 96 } 94 97 ··· 168 171 .into_response(); 169 172 } 170 173 171 - let password_valid = match &controller.password_hash { 172 - Some(hash) => match bcrypt::verify(&form.password, hash) { 173 - Ok(valid) => valid, 174 - Err(_) => false, 175 - }, 176 - None => false, 177 - }; 174 + let password_valid = controller 175 + .password_hash 176 + .as_ref() 177 + .map(|hash| bcrypt::verify(&form.password, hash).unwrap_or_default()) 178 + .unwrap_or_default(); 178 179 179 180 if !password_valid { 180 181 return Json(DelegationAuthResponse { ··· 186 187 .into_response(); 187 188 } 188 189 189 - if let Err(_) = db::set_controller_did(&state.db, &form.request_uri, &form.controller_did).await 190 + if db::set_controller_did(&state.db, &form.request_uri, &form.controller_did) 191 + .await 192 + .is_err() 190 193 { 191 194 return Json(DelegationAuthResponse { 192 195 success: false,
+27 -26
src/sync/import.rs
··· 189 189 if let Some(Ipld::List(entries)) = obj.get("e") { 190 190 for entry in entries { 191 191 if let Ipld::Map(entry_obj) = entry { 192 - let prefix_len = entry_obj.get("p").and_then(|p| { 193 - if let Ipld::Integer(n) = p { 194 - Some(*n as usize) 195 - } else { 196 - None 197 - } 198 - }).unwrap_or(0); 192 + let prefix_len = entry_obj 193 + .get("p") 194 + .and_then(|p| { 195 + if let Ipld::Integer(n) = p { 196 + Some(*n as usize) 197 + } else { 198 + None 199 + } 200 + }) 201 + .unwrap_or(0); 199 202 200 203 let key_suffix = entry_obj.get("k").and_then(|k| { 201 204 if let Ipld::Bytes(b) = k { ··· 222 225 } 223 226 }); 224 227 225 - if let Some(record_cid) = record_cid { 226 - if let Ok(full_key) = String::from_utf8(current_key.clone()) { 227 - if let Some(record_block) = blocks.get(&record_cid) 228 - && let Ok(record_value) = 229 - serde_ipld_dagcbor::from_slice::<Ipld>(record_block) 230 - { 231 - let blob_refs = find_blob_refs_ipld(&record_value, 0); 232 - let parts: Vec<&str> = full_key.split('/').collect(); 233 - if parts.len() >= 2 { 234 - let collection = parts[..parts.len() - 1].join("/"); 235 - let rkey = parts[parts.len() - 1].to_string(); 236 - records.push(ImportedRecord { 237 - collection, 238 - rkey, 239 - cid: record_cid, 240 - blob_refs, 241 - }); 242 - } 243 - } 228 + if let Some(record_cid) = record_cid 229 + && let Ok(full_key) = String::from_utf8(current_key.clone()) 230 + && let Some(record_block) = blocks.get(&record_cid) 231 + && let Ok(record_value) = 232 + serde_ipld_dagcbor::from_slice::<Ipld>(record_block) 233 + { 234 + let blob_refs = find_blob_refs_ipld(&record_value, 0); 235 + let parts: Vec<&str> = full_key.split('/').collect(); 236 + if parts.len() >= 2 { 237 + let collection = parts[..parts.len() - 1].join("/"); 238 + let rkey = parts[parts.len() - 1].to_string(); 239 + records.push(ImportedRecord { 240 + collection, 241 + rkey, 242 + cid: record_cid, 243 + blob_refs, 244 + }); 244 245 } 245 246 } 246 247 }
+13 -14
src/validation/mod.rs
··· 161 161 .get("$type") 162 162 .and_then(|v| v.as_str()) 163 163 .is_some_and(|t| t == "app.bsky.richtext.facet#tag"); 164 - if is_tag { 165 - if let Some(tag) = feature.get("tag").and_then(|v| v.as_str()) { 166 - if crate::moderation::has_explicit_slur(tag) { 167 - return Err(ValidationError::BannedContent { 168 - path: format!("facets/{}/features/{}/tag", i, j), 169 - }); 170 - } 171 - } 164 + if is_tag 165 + && let Some(tag) = feature.get("tag").and_then(|v| v.as_str()) 166 + && crate::moderation::has_explicit_slur(tag) 167 + { 168 + return Err(ValidationError::BannedContent { 169 + path: format!("facets/{}/features/{}/tag", i, j), 170 + }); 172 171 } 173 172 } 174 173 } ··· 332 331 if !obj.contains_key("createdAt") { 333 332 return Err(ValidationError::MissingField("createdAt".to_string())); 334 333 } 335 - if let Some(rkey) = rkey { 336 - if crate::moderation::has_explicit_slur(rkey) { 337 - return Err(ValidationError::BannedContent { 338 - path: "rkey".to_string(), 339 - }); 340 - } 334 + if let Some(rkey) = rkey 335 + && crate::moderation::has_explicit_slur(rkey) 336 + { 337 + return Err(ValidationError::BannedContent { 338 + path: "rkey".to_string(), 339 + }); 341 340 } 342 341 if let Some(display_name) = obj.get("displayName").and_then(|v| v.as_str()) { 343 342 if display_name.is_empty() || display_name.len() > 240 {
+34 -22
tests/banned_words.rs
··· 1 1 /* 2 - * CONTENT WARNING 3 - * 4 - * This file contains explicit slurs and hateful language. We're sorry you have to see them. 5 - * 6 - * These words exist here for one reason: to ensure our moderation system correctly blocks them. 7 - * We can't verify the filter catches the n-word without testing against the actual word. 8 - * Euphemisms wouldn't prove the protection works. 9 - * 10 - * If reading this file has caused you distress, please know: 11 - * - you are valued and welcome in this community 12 - * - these words do not reflect the views of this project or its contributors 13 - * - we maintain this code precisely because we believe everyone deserves an experience on the web that is free from this kinda language 14 - */ 2 + * CONTENT WARNING 3 + * 4 + * This file contains explicit slurs and hateful language. We're sorry you have to see them. 5 + * 6 + * These words exist here for one reason: to ensure our moderation system correctly blocks them. 7 + * We can't verify the filter catches the n-word without testing against the actual word. 8 + * Euphemisms wouldn't prove the protection works. 9 + * 10 + * If reading this file has caused you distress, please know: 11 + * - you are valued and welcome in this community 12 + * - these words do not reflect the views of this project or its contributors 13 + * - we maintain this code precisely because we believe everyone deserves an experience on the web that is free from this kinda language 14 + */ 15 15 16 16 mod common; 17 17 mod helpers; ··· 20 20 use reqwest::StatusCode; 21 21 use serde_json::json; 22 22 23 + fn decode(b64: &str) -> String { 24 + use base64::Engine; 25 + let bytes = base64::engine::general_purpose::STANDARD 26 + .decode(b64) 27 + .expect("invalid base64 in test"); 28 + String::from_utf8(bytes).expect("invalid utf8 in test") 29 + } 30 + 23 31 #[tokio::test] 24 32 async fn test_handle_with_slur_rejected() { 25 33 let client = client(); 26 34 let timestamp = chrono::Utc::now().timestamp_millis(); 27 - let offensive_handle = format!("nigger{}", timestamp); 35 + let slur = decode("bmlnZ2Vy"); 36 + let offensive_handle = format!("{}{}", slur, timestamp); 28 37 29 38 let create_payload = json!({ 30 39 "handle": offensive_handle, ··· 45 54 assert_eq!(res.status(), StatusCode::BAD_REQUEST); 46 55 let body: serde_json::Value = res.json().await.unwrap(); 47 56 assert_eq!(body["error"], "InvalidHandle"); 48 - assert!(body["message"] 49 - .as_str() 50 - .unwrap_or("") 51 - .contains("Inappropriate language")); 57 + assert!( 58 + body["message"] 59 + .as_str() 60 + .unwrap_or("") 61 + .contains("Inappropriate language") 62 + ); 52 63 } 53 64 54 65 #[tokio::test] 55 66 async fn test_handle_with_normalized_slur_rejected() { 56 67 let client = client(); 57 68 let timestamp = chrono::Utc::now().timestamp_millis(); 58 - let offensive_handle = format!("n-i-g-g-e-r{}", timestamp); 69 + let slur = decode("bi1pLWctZy1lLXI="); 70 + let offensive_handle = format!("{}{}", slur, timestamp); 59 71 60 72 let create_payload = json!({ 61 73 "handle": offensive_handle, ··· 84 96 let (_, jwt) = setup_new_user("handleupdate").await; 85 97 86 98 let update_payload = json!({ 87 - "handle": "faggots" 99 + "handle": decode("ZmFnZ290cw==") 88 100 }); 89 101 90 102 let res = client ··· 114 126 "rkey": "self", 115 127 "record": { 116 128 "$type": "app.bsky.actor.profile", 117 - "displayName": "I am a kike" 129 + "displayName": decode("SSBhbSBhIGtpa2U=") 118 130 } 119 131 }); 120 132 ··· 146 158 "record": { 147 159 "$type": "app.bsky.actor.profile", 148 160 "displayName": "Normal Name", 149 - "description": "I hate all chinks" 161 + "description": decode("SSBoYXRlIGFsbCBjaGlua3M=") 150 162 } 151 163 }); 152 164
+10 -24
tests/firehose_validation.rs
··· 364 364 let client = client(); 365 365 let (token, did) = create_account_and_login(&client).await; 366 366 367 - let url = format!( 368 - "ws://127.0.0.1:{}/xrpc/com.atproto.sync.subscribeRepos", 369 - app_port() 370 - ); 371 - let (mut ws_stream, _) = connect_async(&url).await.expect("Failed to connect"); 372 - 373 367 let profile_payload = json!({ 374 368 "repo": did, 375 369 "collection": "app.bsky.actor.profile", ··· 393 387 let first_profile: Value = res.json().await.unwrap(); 394 388 let first_cid = first_profile["cid"].as_str().unwrap(); 395 389 396 - let timeout = tokio::time::timeout(std::time::Duration::from_secs(5), async { 397 - loop { 398 - let msg = ws_stream.next().await.unwrap().unwrap(); 399 - let raw_bytes = match msg { 400 - tungstenite::Message::Binary(bin) => bin, 401 - _ => continue, 402 - }; 403 - if let Ok((_, f)) = parse_frame(&raw_bytes) { 404 - if f.repo == did { 405 - break; 406 - } 407 - } 408 - } 409 - }) 410 - .await; 411 - assert!(timeout.is_ok(), "Timed out waiting for first commit"); 390 + let url = format!( 391 + "ws://127.0.0.1:{}/xrpc/com.atproto.sync.subscribeRepos", 392 + app_port() 393 + ); 394 + let (mut ws_stream, _) = connect_async(&url).await.expect("Failed to connect"); 412 395 413 396 let update_payload = json!({ 414 397 "repo": did, ··· 432 415 assert_eq!(res.status(), StatusCode::OK); 433 416 434 417 let mut frame_opt: Option<CommitFrame> = None; 435 - let timeout = tokio::time::timeout(std::time::Duration::from_secs(5), async { 418 + let timeout = tokio::time::timeout(std::time::Duration::from_secs(15), async { 436 419 loop { 437 - let msg = ws_stream.next().await.unwrap().unwrap(); 420 + let msg = match ws_stream.next().await { 421 + Some(Ok(m)) => m, 422 + _ => continue, 423 + }; 438 424 let raw_bytes = match msg { 439 425 tungstenite::Message::Binary(bin) => bin, 440 426 _ => continue,
+9 -2
tests/oauth_security.rs
··· 1116 1116 1117 1117 let delegated_handle = format!("deleg-{}", ts); 1118 1118 let delegated_res = http_client 1119 - .post(format!("{}/xrpc/com.tranquil.delegation.createDelegatedAccount", url)) 1119 + .post(format!( 1120 + "{}/xrpc/com.tranquil.delegation.createDelegatedAccount", 1121 + url 1122 + )) 1120 1123 .bearer_auth(controller_jwt) 1121 1124 .json(&json!({ 1122 1125 "handle": delegated_handle, ··· 1174 1177 panic!("Delegation auth failed: {}", error_body); 1175 1178 } 1176 1179 let auth_body: Value = auth_res.json().await.unwrap(); 1177 - assert!(auth_body["success"].as_bool().unwrap_or(false), "Delegation auth should succeed: {:?}", auth_body); 1180 + assert!( 1181 + auth_body["success"].as_bool().unwrap_or(false), 1182 + "Delegation auth should succeed: {:?}", 1183 + auth_body 1184 + ); 1178 1185 1179 1186 let consent_res = http_client 1180 1187 .post(format!("{}/oauth/authorize/consent", url))