+9
KNOWN_ISSUES.md
+9
KNOWN_ISSUES.md
···
···
1
+
# Known Issues
2
+
3
+
## Account migration from bsky.social
4
+
5
+
Migrating your account from bsky.social to this PDS works, but Bluesky's appview may not recognize your new signing key. This means you can post and your followers will see it, but some authenticated requests might fail with "jwt signature does not match jwt issuer".
6
+
7
+
We've been trying hard to verify that our side is correct (PLC updated, signing keys match, relays have the account) but something about how we're emitting events isn't triggering Bluesky's appview to refresh its identity data. Still investigating.
8
+
9
+
No workaround yet.
+90
-14
frontend/src/lib/migration/atproto-client.ts
+90
-14
frontend/src/lib/migration/atproto-client.ts
···
11
Session,
12
} from "./types";
13
14
export class AtprotoClient {
15
private baseUrl: string;
16
private accessToken: string | null = null;
···
107
body.authFactorToken = authFactorToken;
108
}
109
110
-
const session = await this.xrpc<Session>("com.atproto.server.createSession", {
111
-
httpMethod: "POST",
112
-
body,
113
-
});
114
115
this.accessToken = session.accessJwt;
116
return session;
···
239
async listMissingBlobs(
240
cursor?: string,
241
limit = 100,
242
-
): Promise<{ blobs: Array<{ cid: string; recordUri: string }>; cursor?: string }> {
243
const params: Record<string, string> = { limit: String(limit) };
244
if (cursor) {
245
params.cursor = cursor;
···
267
}
268
269
async submitPlcOperation(operation: PlcOperation): Promise<void> {
270
await this.xrpc("com.atproto.identity.submitPlcOperation", {
271
httpMethod: "POST",
272
body: { operation },
273
});
274
}
275
276
async getRecommendedDidCredentials(): Promise<DidCredentials> {
···
278
}
279
280
async activateAccount(): Promise<void> {
281
await this.xrpc("com.atproto.server.activateAccount", {
282
httpMethod: "POST",
283
});
284
}
285
286
async deactivateAccount(): Promise<void> {
287
-
await this.xrpc("com.atproto.server.deactivateAccount", {
288
-
httpMethod: "POST",
289
-
});
290
}
291
292
async checkAccountStatus(): Promise<AccountStatus> {
···
330
identifier: string,
331
password: string,
332
): Promise<Session> {
333
-
const session = await this.xrpc<Session>("com.atproto.server.createSession", {
334
-
httpMethod: "POST",
335
-
body: { identifier, password, allowDeactivated: true },
336
-
});
337
this.accessToken = session.accessJwt;
338
return session;
339
}
···
341
async verifyToken(
342
token: string,
343
identifier: string,
344
-
): Promise<{ success: boolean; did: string; purpose: string; channel: string }> {
345
return this.xrpc("com.tranquil.account.verifyToken", {
346
httpMethod: "POST",
347
body: { token, identifier },
···
392
393
if (handle.endsWith(".bsky.social")) {
394
const res = await fetch(
395
-
`https://public.api.bsky.app/xrpc/com.atproto.identity.resolveHandle?handle=${encodeURIComponent(handle)}`,
396
);
397
if (!res.ok) {
398
throw new Error(`Failed to resolve handle: ${res.statusText}`);
···
11
Session,
12
} from "./types";
13
14
+
function apiLog(
15
+
method: string,
16
+
endpoint: string,
17
+
data?: Record<string, unknown>,
18
+
) {
19
+
const timestamp = new Date().toISOString();
20
+
const msg = `[API ${timestamp}] ${method} ${endpoint}`;
21
+
if (data) {
22
+
console.log(msg, JSON.stringify(data, null, 2));
23
+
} else {
24
+
console.log(msg);
25
+
}
26
+
}
27
+
28
export class AtprotoClient {
29
private baseUrl: string;
30
private accessToken: string | null = null;
···
121
body.authFactorToken = authFactorToken;
122
}
123
124
+
const session = await this.xrpc<Session>(
125
+
"com.atproto.server.createSession",
126
+
{
127
+
httpMethod: "POST",
128
+
body,
129
+
},
130
+
);
131
132
this.accessToken = session.accessJwt;
133
return session;
···
256
async listMissingBlobs(
257
cursor?: string,
258
limit = 100,
259
+
): Promise<
260
+
{ blobs: Array<{ cid: string; recordUri: string }>; cursor?: string }
261
+
> {
262
const params: Record<string, string> = { limit: String(limit) };
263
if (cursor) {
264
params.cursor = cursor;
···
286
}
287
288
async submitPlcOperation(operation: PlcOperation): Promise<void> {
289
+
apiLog(
290
+
"POST",
291
+
`${this.baseUrl}/xrpc/com.atproto.identity.submitPlcOperation`,
292
+
{
293
+
operationType: operation.type,
294
+
operationPrev: operation.prev,
295
+
},
296
+
);
297
+
const start = Date.now();
298
await this.xrpc("com.atproto.identity.submitPlcOperation", {
299
httpMethod: "POST",
300
body: { operation },
301
});
302
+
apiLog(
303
+
"POST",
304
+
`${this.baseUrl}/xrpc/com.atproto.identity.submitPlcOperation COMPLETE`,
305
+
{
306
+
durationMs: Date.now() - start,
307
+
},
308
+
);
309
}
310
311
async getRecommendedDidCredentials(): Promise<DidCredentials> {
···
313
}
314
315
async activateAccount(): Promise<void> {
316
+
apiLog("POST", `${this.baseUrl}/xrpc/com.atproto.server.activateAccount`);
317
+
const start = Date.now();
318
await this.xrpc("com.atproto.server.activateAccount", {
319
httpMethod: "POST",
320
});
321
+
apiLog(
322
+
"POST",
323
+
`${this.baseUrl}/xrpc/com.atproto.server.activateAccount COMPLETE`,
324
+
{
325
+
durationMs: Date.now() - start,
326
+
},
327
+
);
328
}
329
330
async deactivateAccount(): Promise<void> {
331
+
apiLog("POST", `${this.baseUrl}/xrpc/com.atproto.server.deactivateAccount`);
332
+
const start = Date.now();
333
+
try {
334
+
await this.xrpc("com.atproto.server.deactivateAccount", {
335
+
httpMethod: "POST",
336
+
});
337
+
apiLog(
338
+
"POST",
339
+
`${this.baseUrl}/xrpc/com.atproto.server.deactivateAccount COMPLETE`,
340
+
{
341
+
durationMs: Date.now() - start,
342
+
success: true,
343
+
},
344
+
);
345
+
} catch (e) {
346
+
const err = e as Error & { error?: string; status?: number };
347
+
apiLog(
348
+
"POST",
349
+
`${this.baseUrl}/xrpc/com.atproto.server.deactivateAccount FAILED`,
350
+
{
351
+
durationMs: Date.now() - start,
352
+
error: err.message,
353
+
errorCode: err.error,
354
+
status: err.status,
355
+
},
356
+
);
357
+
throw e;
358
+
}
359
}
360
361
async checkAccountStatus(): Promise<AccountStatus> {
···
399
identifier: string,
400
password: string,
401
): Promise<Session> {
402
+
const session = await this.xrpc<Session>(
403
+
"com.atproto.server.createSession",
404
+
{
405
+
httpMethod: "POST",
406
+
body: { identifier, password, allowDeactivated: true },
407
+
},
408
+
);
409
this.accessToken = session.accessJwt;
410
return session;
411
}
···
413
async verifyToken(
414
token: string,
415
identifier: string,
416
+
): Promise<
417
+
{ success: boolean; did: string; purpose: string; channel: string }
418
+
> {
419
return this.xrpc("com.tranquil.account.verifyToken", {
420
httpMethod: "POST",
421
body: { token, identifier },
···
466
467
if (handle.endsWith(".bsky.social")) {
468
const res = await fetch(
469
+
`https://public.api.bsky.app/xrpc/com.atproto.identity.resolveHandle?handle=${
470
+
encodeURIComponent(handle)
471
+
}`,
472
);
473
if (!res.ok) {
474
throw new Error(`Failed to resolve handle: ${res.statusText}`);
+224
-29
frontend/src/lib/migration/flow.svelte.ts
+224
-29
frontend/src/lib/migration/flow.svelte.ts
···
20
updateStep,
21
} from "./storage";
22
23
function createInitialProgress(): MigrationProgress {
24
return {
25
repoExported: false,
···
105
password: string,
106
twoFactorCode?: string,
107
): Promise<void> {
108
if (!state.sourcePdsUrl) {
109
await resolveSourcePds(handle);
110
}
···
114
}
115
116
try {
117
const session = await sourceClient.login(handle, password, twoFactorCode);
118
state.sourceAccessToken = session.accessJwt;
119
state.sourceRefreshToken = session.refreshJwt;
120
state.sourceDid = session.did;
···
123
saveMigrationState(state);
124
} catch (e) {
125
const err = e as Error & { error?: string };
126
if (err.error === "AuthFactorTokenRequired") {
127
state.requires2FA = true;
128
-
throw new Error("Two-factor authentication required. Please enter the code sent to your email.");
129
}
130
throw e;
131
}
···
143
}
144
}
145
146
-
async function authenticateToLocal(email: string, password: string): Promise<void> {
147
if (!localClient) {
148
localClient = createLocalClient();
149
}
···
151
}
152
153
async function startMigration(): Promise<void> {
154
if (!sourceClient || !state.sourceAccessToken) {
155
throw new Error("Not logged in to source PDS");
156
}
157
···
163
setProgress({ currentOperation: "Getting service auth token..." });
164
165
try {
166
const serverInfo = await loadLocalServerInfo();
167
const { token } = await sourceClient.getServiceAuth(
168
serverInfo.did,
169
"com.atproto.server.createAccount",
170
);
171
state.serviceAuthToken = token;
172
173
setProgress({ currentOperation: "Creating account on new PDS..." });
···
180
inviteCode: state.inviteCode || undefined,
181
};
182
183
const session = await localClient.createAccount(accountParams, token);
184
localClient.setAccessToken(session.accessJwt);
185
186
setProgress({ currentOperation: "Exporting repository..." });
187
-
188
const car = await sourceClient.getRepo(state.sourceDid);
189
-
setProgress({ repoExported: true, currentOperation: "Importing repository..." });
190
191
await localClient.importRepo(car);
192
-
setProgress({ repoImported: true, currentOperation: "Counting blobs..." });
193
194
const accountStatus = await localClient.checkAccountStatus();
195
setProgress({
196
blobsTotal: accountStatus.expectedBlobs,
197
currentOperation: "Migrating blobs...",
···
202
setProgress({ currentOperation: "Migrating preferences..." });
203
await migratePreferences();
204
205
setStep("email-verify");
206
} catch (e) {
207
const err = e as Error & { error?: string; status?: number };
208
-
const message = err.message || err.error || `Unknown error (status ${err.status || 'unknown'})`;
209
setError(message);
210
setStep("error");
211
}
···
226
for (const blob of blobs) {
227
try {
228
setProgress({
229
-
currentOperation: `Migrating blob ${migrated + 1}/${state.progress.blobsTotal}...`,
230
});
231
232
-
const blobData = await sourceClient.getBlob(state.sourceDid, blob.cid);
233
await localClient.uploadBlob(blobData, "application/octet-stream");
234
migrated++;
235
setProgress({ blobsMigrated: migrated });
···
253
}
254
}
255
256
-
async function submitEmailVerifyToken(token: string, localPassword?: string): Promise<void> {
257
if (!localClient) {
258
localClient = createLocalClient();
259
}
···
266
267
if (!sourceClient) {
268
setStep("source-login");
269
-
setError("Email verified! Please log in to your old account again to complete the migration.");
270
return;
271
}
272
···
285
setStep("plc-token");
286
} catch (e) {
287
const err = e as Error & { error?: string; status?: number };
288
-
const message = err.message || err.error || `Unknown error (status ${err.status || 'unknown'})`;
289
setError(message);
290
}
291
}
···
305
306
checkingEmailVerification = true;
307
try {
308
-
await localClient.loginDeactivated(state.targetEmail, state.targetPassword);
309
await sourceClient.requestPlcOperationSignature();
310
setStep("plc-token");
311
return true;
···
321
}
322
323
async function submitPlcToken(token: string): Promise<void> {
324
if (!sourceClient || !localClient) {
325
throw new Error("Not connected to PDSes");
326
}
327
···
330
setProgress({ currentOperation: "Signing PLC operation..." });
331
332
try {
333
const credentials = await localClient.getRecommendedDidCredentials();
334
335
const { operation } = await sourceClient.signPlcOperation({
336
token,
337
...credentials,
338
});
339
340
-
setProgress({ plcSigned: true, currentOperation: "Submitting PLC operation..." });
341
await localClient.submitPlcOperation(operation);
342
343
-
setProgress({ currentOperation: "Activating account (waiting for DID propagation)..." });
344
await localClient.activateAccount();
345
setProgress({ activated: true });
346
347
setProgress({ currentOperation: "Deactivating old account..." });
348
try {
349
await sourceClient.deactivateAccount();
350
setProgress({ deactivated: true });
351
-
} catch {
352
}
353
354
setStep("success");
355
clearMigrationState();
356
} catch (e) {
357
const err = e as Error & { error?: string; status?: number };
358
-
const message = err.message || err.error || `Unknown error (status ${err.status || 'unknown'})`;
359
state.step = "plc-token";
360
state.error = message;
361
saveMigrationState(state);
···
418
state.step = "source-login";
419
}
420
421
-
function getLocalSession(): { accessJwt: string; did: string; handle: string } | null {
422
if (!localClient) return null;
423
const token = localClient.getAccessToken();
424
if (!token) return null;
···
430
}
431
432
return {
433
-
get state() { return state; },
434
setStep,
435
setError,
436
loadLocalServerInfo,
···
513
}
514
}
515
516
-
function initLocalClient(accessToken: string, did?: string, handle?: string): void {
517
localClient = createLocalClient();
518
localClient.setAccessToken(accessToken);
519
if (did) {
···
557
setProgress({ currentOperation: "Exporting repository..." });
558
559
const car = await localClient.getRepo(currentDid);
560
-
setProgress({ repoExported: true, currentOperation: "Importing repository..." });
561
562
await targetClient.importRepo(car);
563
-
setProgress({ repoImported: true, currentOperation: "Counting blobs..." });
564
565
const accountStatus = await targetClient.checkAccountStatus();
566
setProgress({
···
579
setStep("plc-token");
580
} catch (e) {
581
const err = e as Error & { error?: string; status?: number };
582
-
const message = err.message || err.error || `Unknown error (status ${err.status || 'unknown'})`;
583
setError(message);
584
setStep("error");
585
}
···
600
for (const blob of blobs) {
601
try {
602
setProgress({
603
-
currentOperation: `Migrating blob ${migrated + 1}/${state.progress.blobsTotal}...`,
604
});
605
606
const blobData = await localClient.getBlob(did, blob.cid);
···
644
...credentials,
645
});
646
647
-
setProgress({ plcSigned: true, currentOperation: "Submitting PLC operation..." });
648
649
await targetClient.submitPlcOperation(operation);
650
···
660
}
661
662
if (state.localDid.startsWith("did:web:")) {
663
-
setProgress({ currentOperation: "Updating DID document forwarding..." });
664
try {
665
await localClient.updateMigrationForwarding(state.targetPdsUrl);
666
} catch (e) {
···
672
clearMigrationState();
673
} catch (e) {
674
const err = e as Error & { error?: string; status?: number };
675
-
const message = err.message || err.error || `Unknown error (status ${err.status || 'unknown'})`;
676
setError(message);
677
setStep("plc-token");
678
}
···
711
}
712
713
return {
714
-
get state() { return state; },
715
setStep,
716
setError,
717
validateTargetPds,
···
730
};
731
}
732
733
-
export type InboundMigrationFlow = ReturnType<typeof createInboundMigrationFlow>;
734
-
export type OutboundMigrationFlow = ReturnType<typeof createOutboundMigrationFlow>;
···
20
updateStep,
21
} from "./storage";
22
23
+
function migrationLog(stage: string, data?: Record<string, unknown>) {
24
+
const timestamp = new Date().toISOString();
25
+
const msg = `[MIGRATION ${timestamp}] ${stage}`;
26
+
if (data) {
27
+
console.log(msg, JSON.stringify(data, null, 2));
28
+
} else {
29
+
console.log(msg);
30
+
}
31
+
}
32
+
33
function createInitialProgress(): MigrationProgress {
34
return {
35
repoExported: false,
···
115
password: string,
116
twoFactorCode?: string,
117
): Promise<void> {
118
+
migrationLog("loginToSource START", { handle, has2FA: !!twoFactorCode });
119
+
120
if (!state.sourcePdsUrl) {
121
await resolveSourcePds(handle);
122
}
···
126
}
127
128
try {
129
+
migrationLog("loginToSource: Calling createSession on OLD PDS", {
130
+
pdsUrl: state.sourcePdsUrl,
131
+
});
132
const session = await sourceClient.login(handle, password, twoFactorCode);
133
+
migrationLog("loginToSource SUCCESS", {
134
+
did: session.did,
135
+
handle: session.handle,
136
+
pdsUrl: state.sourcePdsUrl,
137
+
});
138
state.sourceAccessToken = session.accessJwt;
139
state.sourceRefreshToken = session.refreshJwt;
140
state.sourceDid = session.did;
···
143
saveMigrationState(state);
144
} catch (e) {
145
const err = e as Error & { error?: string };
146
+
migrationLog("loginToSource FAILED", {
147
+
error: err.message,
148
+
errorCode: err.error,
149
+
});
150
if (err.error === "AuthFactorTokenRequired") {
151
state.requires2FA = true;
152
+
throw new Error(
153
+
"Two-factor authentication required. Please enter the code sent to your email.",
154
+
);
155
}
156
throw e;
157
}
···
169
}
170
}
171
172
+
async function authenticateToLocal(
173
+
email: string,
174
+
password: string,
175
+
): Promise<void> {
176
if (!localClient) {
177
localClient = createLocalClient();
178
}
···
180
}
181
182
async function startMigration(): Promise<void> {
183
+
migrationLog("startMigration START", {
184
+
sourceDid: state.sourceDid,
185
+
sourceHandle: state.sourceHandle,
186
+
targetHandle: state.targetHandle,
187
+
sourcePdsUrl: state.sourcePdsUrl,
188
+
});
189
+
190
if (!sourceClient || !state.sourceAccessToken) {
191
+
migrationLog("startMigration ERROR: Not logged in to source PDS");
192
throw new Error("Not logged in to source PDS");
193
}
194
···
200
setProgress({ currentOperation: "Getting service auth token..." });
201
202
try {
203
+
migrationLog("startMigration: Loading local server info");
204
const serverInfo = await loadLocalServerInfo();
205
+
migrationLog("startMigration: Got server info", {
206
+
serverDid: serverInfo.did,
207
+
});
208
+
209
+
migrationLog("startMigration: Getting service auth token from OLD PDS");
210
const { token } = await sourceClient.getServiceAuth(
211
serverInfo.did,
212
"com.atproto.server.createAccount",
213
);
214
+
migrationLog("startMigration: Got service auth token");
215
state.serviceAuthToken = token;
216
217
setProgress({ currentOperation: "Creating account on new PDS..." });
···
224
inviteCode: state.inviteCode || undefined,
225
};
226
227
+
migrationLog("startMigration: Creating account on NEW PDS", {
228
+
did: accountParams.did,
229
+
handle: accountParams.handle,
230
+
});
231
const session = await localClient.createAccount(accountParams, token);
232
+
migrationLog("startMigration: Account created on NEW PDS", {
233
+
did: session.did,
234
+
});
235
localClient.setAccessToken(session.accessJwt);
236
237
setProgress({ currentOperation: "Exporting repository..." });
238
+
migrationLog("startMigration: Exporting repo from OLD PDS");
239
+
const exportStart = Date.now();
240
const car = await sourceClient.getRepo(state.sourceDid);
241
+
migrationLog("startMigration: Repo exported", {
242
+
durationMs: Date.now() - exportStart,
243
+
sizeBytes: car.byteLength,
244
+
});
245
+
setProgress({
246
+
repoExported: true,
247
+
currentOperation: "Importing repository...",
248
+
});
249
250
+
migrationLog("startMigration: Importing repo to NEW PDS");
251
+
const importStart = Date.now();
252
await localClient.importRepo(car);
253
+
migrationLog("startMigration: Repo imported", {
254
+
durationMs: Date.now() - importStart,
255
+
});
256
+
setProgress({
257
+
repoImported: true,
258
+
currentOperation: "Counting blobs...",
259
+
});
260
261
const accountStatus = await localClient.checkAccountStatus();
262
+
migrationLog("startMigration: Account status", {
263
+
expectedBlobs: accountStatus.expectedBlobs,
264
+
importedBlobs: accountStatus.importedBlobs,
265
+
});
266
setProgress({
267
blobsTotal: accountStatus.expectedBlobs,
268
currentOperation: "Migrating blobs...",
···
273
setProgress({ currentOperation: "Migrating preferences..." });
274
await migratePreferences();
275
276
+
migrationLog(
277
+
"startMigration: Initial migration complete, waiting for email verification",
278
+
);
279
setStep("email-verify");
280
} catch (e) {
281
const err = e as Error & { error?: string; status?: number };
282
+
const message = err.message || err.error ||
283
+
`Unknown error (status ${err.status || "unknown"})`;
284
+
migrationLog("startMigration FAILED", {
285
+
error: message,
286
+
errorCode: err.error,
287
+
status: err.status,
288
+
stack: err.stack,
289
+
});
290
setError(message);
291
setStep("error");
292
}
···
307
for (const blob of blobs) {
308
try {
309
setProgress({
310
+
currentOperation: `Migrating blob ${
311
+
migrated + 1
312
+
}/${state.progress.blobsTotal}...`,
313
});
314
315
+
const blobData = await sourceClient.getBlob(
316
+
state.sourceDid,
317
+
blob.cid,
318
+
);
319
await localClient.uploadBlob(blobData, "application/octet-stream");
320
migrated++;
321
setProgress({ blobsMigrated: migrated });
···
339
}
340
}
341
342
+
async function submitEmailVerifyToken(
343
+
token: string,
344
+
localPassword?: string,
345
+
): Promise<void> {
346
if (!localClient) {
347
localClient = createLocalClient();
348
}
···
355
356
if (!sourceClient) {
357
setStep("source-login");
358
+
setError(
359
+
"Email verified! Please log in to your old account again to complete the migration.",
360
+
);
361
return;
362
}
363
···
376
setStep("plc-token");
377
} catch (e) {
378
const err = e as Error & { error?: string; status?: number };
379
+
const message = err.message || err.error ||
380
+
`Unknown error (status ${err.status || "unknown"})`;
381
setError(message);
382
}
383
}
···
397
398
checkingEmailVerification = true;
399
try {
400
+
await localClient.loginDeactivated(
401
+
state.targetEmail,
402
+
state.targetPassword,
403
+
);
404
await sourceClient.requestPlcOperationSignature();
405
setStep("plc-token");
406
return true;
···
416
}
417
418
async function submitPlcToken(token: string): Promise<void> {
419
+
migrationLog("submitPlcToken START", {
420
+
sourceDid: state.sourceDid,
421
+
sourceHandle: state.sourceHandle,
422
+
targetHandle: state.targetHandle,
423
+
sourcePdsUrl: state.sourcePdsUrl,
424
+
});
425
+
426
if (!sourceClient || !localClient) {
427
+
migrationLog("submitPlcToken ERROR: Not connected to PDSes", {
428
+
hasSourceClient: !!sourceClient,
429
+
hasLocalClient: !!localClient,
430
+
});
431
throw new Error("Not connected to PDSes");
432
}
433
···
436
setProgress({ currentOperation: "Signing PLC operation..." });
437
438
try {
439
+
migrationLog("Step 1: Getting recommended DID credentials from NEW PDS");
440
const credentials = await localClient.getRecommendedDidCredentials();
441
+
migrationLog("Step 1 COMPLETE: Got credentials", {
442
+
rotationKeys: credentials.rotationKeys,
443
+
alsoKnownAs: credentials.alsoKnownAs,
444
+
verificationMethods: credentials.verificationMethods,
445
+
services: credentials.services,
446
+
});
447
448
+
migrationLog("Step 2: Signing PLC operation on OLD PDS", {
449
+
sourcePdsUrl: state.sourcePdsUrl,
450
+
});
451
+
const signStart = Date.now();
452
const { operation } = await sourceClient.signPlcOperation({
453
token,
454
...credentials,
455
});
456
+
migrationLog("Step 2 COMPLETE: PLC operation signed", {
457
+
durationMs: Date.now() - signStart,
458
+
operationType: operation.type,
459
+
operationPrev: operation.prev,
460
+
});
461
462
+
setProgress({
463
+
plcSigned: true,
464
+
currentOperation: "Submitting PLC operation...",
465
+
});
466
+
migrationLog("Step 3: Submitting PLC operation to NEW PDS");
467
+
const submitStart = Date.now();
468
await localClient.submitPlcOperation(operation);
469
+
migrationLog("Step 3 COMPLETE: PLC operation submitted", {
470
+
durationMs: Date.now() - submitStart,
471
+
});
472
473
+
setProgress({
474
+
currentOperation: "Activating account (waiting for DID propagation)...",
475
+
});
476
+
migrationLog("Step 4: Activating account on NEW PDS");
477
+
const activateStart = Date.now();
478
await localClient.activateAccount();
479
+
migrationLog("Step 4 COMPLETE: Account activated on NEW PDS", {
480
+
durationMs: Date.now() - activateStart,
481
+
});
482
setProgress({ activated: true });
483
484
setProgress({ currentOperation: "Deactivating old account..." });
485
+
migrationLog("Step 5: Deactivating account on OLD PDS", {
486
+
sourcePdsUrl: state.sourcePdsUrl,
487
+
});
488
+
const deactivateStart = Date.now();
489
try {
490
await sourceClient.deactivateAccount();
491
+
migrationLog("Step 5 COMPLETE: Account deactivated on OLD PDS", {
492
+
durationMs: Date.now() - deactivateStart,
493
+
success: true,
494
+
});
495
setProgress({ deactivated: true });
496
+
} catch (deactivateErr) {
497
+
const err = deactivateErr as Error & {
498
+
error?: string;
499
+
status?: number;
500
+
};
501
+
migrationLog("Step 5 FAILED: Could not deactivate on OLD PDS", {
502
+
durationMs: Date.now() - deactivateStart,
503
+
error: err.message,
504
+
errorCode: err.error,
505
+
status: err.status,
506
+
});
507
}
508
509
+
migrationLog("submitPlcToken SUCCESS: Migration complete", {
510
+
sourceDid: state.sourceDid,
511
+
newHandle: state.targetHandle,
512
+
});
513
setStep("success");
514
clearMigrationState();
515
} catch (e) {
516
const err = e as Error & { error?: string; status?: number };
517
+
const message = err.message || err.error ||
518
+
`Unknown error (status ${err.status || "unknown"})`;
519
+
migrationLog("submitPlcToken FAILED", {
520
+
error: message,
521
+
errorCode: err.error,
522
+
status: err.status,
523
+
stack: err.stack,
524
+
});
525
state.step = "plc-token";
526
state.error = message;
527
saveMigrationState(state);
···
584
state.step = "source-login";
585
}
586
587
+
function getLocalSession():
588
+
| { accessJwt: string; did: string; handle: string }
589
+
| null {
590
if (!localClient) return null;
591
const token = localClient.getAccessToken();
592
if (!token) return null;
···
598
}
599
600
return {
601
+
get state() {
602
+
return state;
603
+
},
604
setStep,
605
setError,
606
loadLocalServerInfo,
···
683
}
684
}
685
686
+
function initLocalClient(
687
+
accessToken: string,
688
+
did?: string,
689
+
handle?: string,
690
+
): void {
691
localClient = createLocalClient();
692
localClient.setAccessToken(accessToken);
693
if (did) {
···
731
setProgress({ currentOperation: "Exporting repository..." });
732
733
const car = await localClient.getRepo(currentDid);
734
+
setProgress({
735
+
repoExported: true,
736
+
currentOperation: "Importing repository...",
737
+
});
738
739
await targetClient.importRepo(car);
740
+
setProgress({
741
+
repoImported: true,
742
+
currentOperation: "Counting blobs...",
743
+
});
744
745
const accountStatus = await targetClient.checkAccountStatus();
746
setProgress({
···
759
setStep("plc-token");
760
} catch (e) {
761
const err = e as Error & { error?: string; status?: number };
762
+
const message = err.message || err.error ||
763
+
`Unknown error (status ${err.status || "unknown"})`;
764
setError(message);
765
setStep("error");
766
}
···
781
for (const blob of blobs) {
782
try {
783
setProgress({
784
+
currentOperation: `Migrating blob ${
785
+
migrated + 1
786
+
}/${state.progress.blobsTotal}...`,
787
});
788
789
const blobData = await localClient.getBlob(did, blob.cid);
···
827
...credentials,
828
});
829
830
+
setProgress({
831
+
plcSigned: true,
832
+
currentOperation: "Submitting PLC operation...",
833
+
});
834
835
await targetClient.submitPlcOperation(operation);
836
···
846
}
847
848
if (state.localDid.startsWith("did:web:")) {
849
+
setProgress({
850
+
currentOperation: "Updating DID document forwarding...",
851
+
});
852
try {
853
await localClient.updateMigrationForwarding(state.targetPdsUrl);
854
} catch (e) {
···
860
clearMigrationState();
861
} catch (e) {
862
const err = e as Error & { error?: string; status?: number };
863
+
const message = err.message || err.error ||
864
+
`Unknown error (status ${err.status || "unknown"})`;
865
setError(message);
866
setStep("plc-token");
867
}
···
900
}
901
902
return {
903
+
get state() {
904
+
return state;
905
+
},
906
setStep,
907
setError,
908
validateTargetPds,
···
921
};
922
}
923
924
+
export type InboundMigrationFlow = ReturnType<
925
+
typeof createInboundMigrationFlow
926
+
>;
927
+
export type OutboundMigrationFlow = ReturnType<
928
+
typeof createOutboundMigrationFlow
929
+
>;
+11
-3
frontend/src/lib/migration/storage.ts
+11
-3
frontend/src/lib/migration/storage.ts
···
1
-
import type { MigrationDirection, MigrationState, StoredMigrationState } from "./types";
2
3
const STORAGE_KEY = "tranquil_migration_state";
4
const MAX_AGE_MS = 24 * 60 * 60 * 1000;
···
9
direction: state.direction,
10
step: state.direction === "inbound" ? state.step : state.step,
11
startedAt: new Date().toISOString(),
12
-
sourcePdsUrl: state.direction === "inbound" ? state.sourcePdsUrl : window.location.origin,
13
-
targetPdsUrl: state.direction === "inbound" ? window.location.origin : state.targetPdsUrl,
14
sourceDid: state.direction === "inbound" ? state.sourceDid : "",
15
sourceHandle: state.direction === "inbound" ? state.sourceHandle : "",
16
targetHandle: state.targetHandle,
···
1
+
import type {
2
+
MigrationDirection,
3
+
MigrationState,
4
+
StoredMigrationState,
5
+
} from "./types";
6
7
const STORAGE_KEY = "tranquil_migration_state";
8
const MAX_AGE_MS = 24 * 60 * 60 * 1000;
···
13
direction: state.direction,
14
step: state.direction === "inbound" ? state.step : state.step,
15
startedAt: new Date().toISOString(),
16
+
sourcePdsUrl: state.direction === "inbound"
17
+
? state.sourcePdsUrl
18
+
: window.location.origin,
19
+
targetPdsUrl: state.direction === "inbound"
20
+
? window.location.origin
21
+
: state.targetPdsUrl,
22
sourceDid: state.direction === "inbound" ? state.sourceDid : "",
23
sourceHandle: state.direction === "inbound" ? state.sourceHandle : "",
24
targetHandle: state.targetHandle,
+4
-5
src/api/delegation.rs
+4
-5
src/api/delegation.rs
···
468
auth: BearerAuth,
469
Query(params): Query<AuditLogParams>,
470
) -> Response {
471
-
let limit = params.limit.min(100).max(1);
472
let offset = params.offset.max(0);
473
474
let entries =
···
489
}
490
};
491
492
-
let total = match delegation::audit::count_audit_log_entries(&state.db, &auth.0.did).await {
493
-
Ok(t) => t,
494
-
Err(_) => 0,
495
-
};
496
497
Json(GetAuditLogResponse {
498
entries: entries
···
468
auth: BearerAuth,
469
Query(params): Query<AuditLogParams>,
470
) -> Response {
471
+
let limit = params.limit.clamp(1, 100);
472
let offset = params.offset.max(0);
473
474
let entries =
···
489
}
490
};
491
492
+
let total = delegation::audit::count_audit_log_entries(&state.db, &auth.0.did)
493
+
.await
494
+
.unwrap_or_default();
495
496
Json(GetAuditLogResponse {
497
entries: entries
+71
-27
src/api/identity/account.rs
+71
-27
src/api/identity/account.rs
···
69
headers: HeaderMap,
70
Json(input): Json<CreateAccountInput>,
71
) -> Response {
72
-
info!("create_account called");
73
let client_ip = extract_client_ip(&headers);
74
if !state
75
.check_rate_limit(RateLimitKind::AccountCreation, &client_ip)
···
136
&& let (Some(provided_did), Some(auth_did)) = (input.did.as_ref(), migration_auth.as_ref())
137
{
138
if provided_did != auth_did {
139
return (
140
StatusCode::FORBIDDEN,
141
Json(json!({
···
148
if is_did_web_byod {
149
info!(did = %provided_did, "Processing did:web BYOD account creation");
150
} else {
151
-
info!(did = %provided_did, "Processing account migration");
152
}
153
}
154
···
1005
}
1006
1007
let (access_jwt, refresh_jwt) = if is_migration {
1008
-
let access_meta =
1009
-
match crate::auth::create_access_token_with_metadata(&did, &secret_key_bytes) {
1010
-
Ok(m) => m,
1011
-
Err(e) => {
1012
-
error!("Error creating access token for migration: {:?}", e);
1013
-
return (
1014
-
StatusCode::INTERNAL_SERVER_ERROR,
1015
-
Json(json!({"error": "InternalError"})),
1016
-
)
1017
-
.into_response();
1018
-
}
1019
-
};
1020
-
let refresh_meta =
1021
-
match crate::auth::create_refresh_token_with_metadata(&did, &secret_key_bytes) {
1022
-
Ok(m) => m,
1023
-
Err(e) => {
1024
-
error!("Error creating refresh token for migration: {:?}", e);
1025
-
return (
1026
-
StatusCode::INTERNAL_SERVER_ERROR,
1027
-
Json(json!({"error": "InternalError"})),
1028
-
)
1029
-
.into_response();
1030
-
}
1031
-
};
1032
if let Err(e) = sqlx::query!(
1033
"INSERT INTO session_tokens (did, access_jti, refresh_jti, access_expires_at, refresh_expires_at) VALUES ($1, $2, $3, $4, $5)",
1034
did,
···
1040
.execute(&state.db)
1041
.await
1042
{
1043
-
error!("Error creating session for migration: {:?}", e);
1044
return (
1045
StatusCode::INTERNAL_SERVER_ERROR,
1046
Json(json!({"error": "InternalError"})),
1047
)
1048
.into_response();
1049
}
1050
(Some(access_meta.token), Some(refresh_meta.token))
1051
} else {
1052
(None, None)
1053
};
1054
1055
(
1056
StatusCode::OK,
···
69
headers: HeaderMap,
70
Json(input): Json<CreateAccountInput>,
71
) -> Response {
72
+
let is_potential_migration = input
73
+
.did
74
+
.as_ref()
75
+
.map(|d| d.starts_with("did:plc:"))
76
+
.unwrap_or(false);
77
+
if is_potential_migration {
78
+
info!(
79
+
"[MIGRATION] createAccount called for potential migration did={:?} handle={}",
80
+
input.did, input.handle
81
+
);
82
+
} else {
83
+
info!("create_account called");
84
+
}
85
let client_ip = extract_client_ip(&headers);
86
if !state
87
.check_rate_limit(RateLimitKind::AccountCreation, &client_ip)
···
148
&& let (Some(provided_did), Some(auth_did)) = (input.did.as_ref(), migration_auth.as_ref())
149
{
150
if provided_did != auth_did {
151
+
info!(
152
+
"[MIGRATION] createAccount: Service token mismatch - token_did={} provided_did={}",
153
+
auth_did, provided_did
154
+
);
155
return (
156
StatusCode::FORBIDDEN,
157
Json(json!({
···
164
if is_did_web_byod {
165
info!(did = %provided_did, "Processing did:web BYOD account creation");
166
} else {
167
+
info!(
168
+
"[MIGRATION] createAccount: Service token verified, processing migration for did={}",
169
+
provided_did
170
+
);
171
}
172
}
173
···
1024
}
1025
1026
let (access_jwt, refresh_jwt) = if is_migration {
1027
+
info!(
1028
+
"[MIGRATION] createAccount: Creating session tokens for migration did={}",
1029
+
did
1030
+
);
1031
+
let access_meta = match crate::auth::create_access_token_with_metadata(
1032
+
&did,
1033
+
&secret_key_bytes,
1034
+
) {
1035
+
Ok(m) => m,
1036
+
Err(e) => {
1037
+
error!(
1038
+
"[MIGRATION] createAccount: Error creating access token for migration: {:?}",
1039
+
e
1040
+
);
1041
+
return (
1042
+
StatusCode::INTERNAL_SERVER_ERROR,
1043
+
Json(json!({"error": "InternalError"})),
1044
+
)
1045
+
.into_response();
1046
+
}
1047
+
};
1048
+
let refresh_meta = match crate::auth::create_refresh_token_with_metadata(
1049
+
&did,
1050
+
&secret_key_bytes,
1051
+
) {
1052
+
Ok(m) => m,
1053
+
Err(e) => {
1054
+
error!(
1055
+
"[MIGRATION] createAccount: Error creating refresh token for migration: {:?}",
1056
+
e
1057
+
);
1058
+
return (
1059
+
StatusCode::INTERNAL_SERVER_ERROR,
1060
+
Json(json!({"error": "InternalError"})),
1061
+
)
1062
+
.into_response();
1063
+
}
1064
+
};
1065
if let Err(e) = sqlx::query!(
1066
"INSERT INTO session_tokens (did, access_jti, refresh_jti, access_expires_at, refresh_expires_at) VALUES ($1, $2, $3, $4, $5)",
1067
did,
···
1073
.execute(&state.db)
1074
.await
1075
{
1076
+
error!("[MIGRATION] createAccount: Error creating session for migration: {:?}", e);
1077
return (
1078
StatusCode::INTERNAL_SERVER_ERROR,
1079
Json(json!({"error": "InternalError"})),
1080
)
1081
.into_response();
1082
}
1083
+
info!(
1084
+
"[MIGRATION] createAccount: Session created successfully for did={}",
1085
+
did
1086
+
);
1087
(Some(access_meta.token), Some(refresh_meta.token))
1088
} else {
1089
(None, None)
1090
};
1091
+
1092
+
if is_migration {
1093
+
info!(
1094
+
"[MIGRATION] createAccount: SUCCESS - Account ready for migration did={} handle={}",
1095
+
did, handle
1096
+
);
1097
+
}
1098
1099
(
1100
StatusCode::OK,
+50
-8
src/api/identity/plc/submit.rs
+50
-8
src/api/identity/plc/submit.rs
···
23
headers: axum::http::HeaderMap,
24
Json(input): Json<SubmitPlcOperationInput>,
25
) -> Response {
26
let bearer = match crate::auth::extract_bearer_token_from_header(
27
headers.get("Authorization").and_then(|h| h.to_str().ok()),
28
) {
29
Some(t) => t,
30
-
None => return ApiError::AuthenticationRequired.into_response(),
31
};
32
let auth_user =
33
match crate::auth::validate_bearer_token_allow_deactivated(&state.db, &bearer).await {
34
Ok(user) => user,
35
-
Err(e) => return ApiError::from(e).into_response(),
36
};
37
if let Err(e) = crate::auth::scope_check::check_identity_scope(
38
auth_user.is_oauth,
39
auth_user.scope.as_deref(),
40
crate::oauth::scopes::IdentityAttr::Wildcard,
41
) {
42
return e;
43
}
44
let did = &auth_user.did;
···
188
let plc_client = PlcClient::new(None);
189
let operation_clone = input.operation.clone();
190
let did_clone = did.clone();
191
let result: Result<(), CircuitBreakerError<PlcError>> =
192
with_circuit_breaker(&state.circuit_breakers.plc_directory, || async {
193
plc_client
···
196
})
197
.await;
198
match result {
199
-
Ok(()) => {}
200
Err(CircuitBreakerError::CircuitOpen(e)) => {
201
-
warn!("PLC directory circuit breaker open: {}", e);
202
return (
203
StatusCode::SERVICE_UNAVAILABLE,
204
Json(json!({
···
209
.into_response();
210
}
211
Err(CircuitBreakerError::OperationFailed(e)) => {
212
-
error!("Failed to submit PLC operation: {:?}", e);
213
return (
214
StatusCode::BAD_GATEWAY,
215
Json(json!({
···
220
.into_response();
221
}
222
}
223
match sqlx::query!(
224
"INSERT INTO repo_seq (did, event_type) VALUES ($1, 'identity') RETURNING seq",
225
did
···
228
.await
229
{
230
Ok(row) => {
231
if let Err(e) = sqlx::query(&format!("NOTIFY repo_updates, '{}'", row.seq))
232
.execute(&state.db)
233
.await
234
{
235
-
warn!("Failed to notify identity event: {:?}", e);
236
}
237
}
238
Err(e) => {
239
-
warn!("Failed to sequence identity event: {:?}", e);
240
}
241
}
242
-
info!("Submitted PLC operation for user {}", did);
243
(StatusCode::OK, Json(json!({}))).into_response()
244
}
···
23
headers: axum::http::HeaderMap,
24
Json(input): Json<SubmitPlcOperationInput>,
25
) -> Response {
26
+
info!("[MIGRATION] submitPlcOperation called");
27
let bearer = match crate::auth::extract_bearer_token_from_header(
28
headers.get("Authorization").and_then(|h| h.to_str().ok()),
29
) {
30
Some(t) => t,
31
+
None => {
32
+
info!("[MIGRATION] submitPlcOperation: No bearer token");
33
+
return ApiError::AuthenticationRequired.into_response();
34
+
}
35
};
36
let auth_user =
37
match crate::auth::validate_bearer_token_allow_deactivated(&state.db, &bearer).await {
38
Ok(user) => user,
39
+
Err(e) => {
40
+
info!("[MIGRATION] submitPlcOperation: Auth failed: {:?}", e);
41
+
return ApiError::from(e).into_response();
42
+
}
43
};
44
+
info!(
45
+
"[MIGRATION] submitPlcOperation: Authenticated user did={}",
46
+
auth_user.did
47
+
);
48
if let Err(e) = crate::auth::scope_check::check_identity_scope(
49
auth_user.is_oauth,
50
auth_user.scope.as_deref(),
51
crate::oauth::scopes::IdentityAttr::Wildcard,
52
) {
53
+
info!("[MIGRATION] submitPlcOperation: Scope check failed");
54
return e;
55
}
56
let did = &auth_user.did;
···
200
let plc_client = PlcClient::new(None);
201
let operation_clone = input.operation.clone();
202
let did_clone = did.clone();
203
+
info!(
204
+
"[MIGRATION] submitPlcOperation: Sending operation to PLC directory for did={}",
205
+
did
206
+
);
207
+
let plc_start = std::time::Instant::now();
208
let result: Result<(), CircuitBreakerError<PlcError>> =
209
with_circuit_breaker(&state.circuit_breakers.plc_directory, || async {
210
plc_client
···
213
})
214
.await;
215
match result {
216
+
Ok(()) => {
217
+
info!(
218
+
"[MIGRATION] submitPlcOperation: PLC directory accepted operation in {:?}",
219
+
plc_start.elapsed()
220
+
);
221
+
}
222
Err(CircuitBreakerError::CircuitOpen(e)) => {
223
+
warn!(
224
+
"[MIGRATION] submitPlcOperation: PLC directory circuit breaker open: {}",
225
+
e
226
+
);
227
return (
228
StatusCode::SERVICE_UNAVAILABLE,
229
Json(json!({
···
234
.into_response();
235
}
236
Err(CircuitBreakerError::OperationFailed(e)) => {
237
+
error!(
238
+
"[MIGRATION] submitPlcOperation: PLC operation failed: {:?}",
239
+
e
240
+
);
241
return (
242
StatusCode::BAD_GATEWAY,
243
Json(json!({
···
248
.into_response();
249
}
250
}
251
+
info!(
252
+
"[MIGRATION] submitPlcOperation: Sequencing identity event for did={}",
253
+
did
254
+
);
255
match sqlx::query!(
256
"INSERT INTO repo_seq (did, event_type) VALUES ($1, 'identity') RETURNING seq",
257
did
···
260
.await
261
{
262
Ok(row) => {
263
+
info!(
264
+
"[MIGRATION] submitPlcOperation: Identity event sequenced with seq={}",
265
+
row.seq
266
+
);
267
if let Err(e) = sqlx::query(&format!("NOTIFY repo_updates, '{}'", row.seq))
268
.execute(&state.db)
269
.await
270
{
271
+
warn!(
272
+
"[MIGRATION] submitPlcOperation: Failed to notify identity event: {:?}",
273
+
e
274
+
);
275
}
276
}
277
Err(e) => {
278
+
warn!(
279
+
"[MIGRATION] submitPlcOperation: Failed to sequence identity event: {:?}",
280
+
e
281
+
);
282
}
283
}
284
+
info!("[MIGRATION] submitPlcOperation: SUCCESS for did={}", did);
285
(StatusCode::OK, Json(json!({}))).into_response()
286
}
+22
-36
src/api/repo/blob.rs
+22
-36
src/api/repo/blob.rs
···
1
use crate::auth::{ServiceTokenVerifier, is_service_token};
2
use crate::delegation::{self, DelegationActionType};
3
use crate::state::AppState;
4
use axum::body::Bytes;
5
use axum::{
6
Json,
···
9
response::{IntoResponse, Response},
10
};
11
use cid::Cid;
12
use jacquard_repo::storage::BlockStore;
13
use multihash::Multihash;
14
use serde::{Deserialize, Serialize};
15
use serde_json::json;
16
use sha2::{Digest, Sha256};
17
use std::str::FromStr;
18
-
use tracing::{debug, error};
19
20
const MAX_BLOB_SIZE: usize = 10_000_000_000;
21
const MAX_VIDEO_BLOB_SIZE: usize = 10_000_000_000;
···
258
pub blobs: Vec<RecordBlob>,
259
}
260
261
-
fn find_blobs(val: &serde_json::Value, blobs: &mut Vec<String>) {
262
-
if let Some(obj) = val.as_object() {
263
-
if let Some(type_val) = obj.get("$type")
264
-
&& type_val == "blob"
265
-
&& let Some(r) = obj.get("ref")
266
-
&& let Some(link) = r.get("$link")
267
-
&& let Some(s) = link.as_str()
268
-
{
269
-
blobs.push(s.to_string());
270
-
}
271
-
for (_, v) in obj {
272
-
find_blobs(v, blobs);
273
-
}
274
-
} else if let Some(arr) = val.as_array() {
275
-
for v in arr {
276
-
find_blobs(v, blobs);
277
-
}
278
-
}
279
-
}
280
-
281
pub async fn list_missing_blobs(
282
State(state): State<AppState>,
283
headers: axum::http::HeaderMap,
···
295
.into_response();
296
}
297
};
298
-
let auth_user = match crate::auth::validate_bearer_token_allow_deactivated(&state.db, &token).await {
299
-
Ok(user) => user,
300
-
Err(_) => {
301
-
return (
302
-
StatusCode::UNAUTHORIZED,
303
-
Json(json!({"error": "AuthenticationFailed"})),
304
-
)
305
-
.into_response();
306
-
}
307
-
};
308
let did = auth_user.did;
309
let user_query = sqlx::query!("SELECT id FROM users WHERE did = $1", did)
310
.fetch_optional(&state.db)
···
362
Ok(Some(b)) => b,
363
_ => continue,
364
};
365
-
let record_val: serde_json::Value = match serde_ipld_dagcbor::from_slice(&block_bytes) {
366
Ok(v) => v,
367
-
Err(_) => continue,
368
};
369
-
let mut blobs = Vec::new();
370
-
find_blobs(&record_val, &mut blobs);
371
-
for blob_cid_str in blobs {
372
let exists = sqlx::query!(
373
"SELECT 1 as one FROM blobs WHERE cid = $1 AND created_by_user = $2",
374
blob_cid_str,
···
1
use crate::auth::{ServiceTokenVerifier, is_service_token};
2
use crate::delegation::{self, DelegationActionType};
3
use crate::state::AppState;
4
+
use crate::sync::import::find_blob_refs_ipld;
5
use axum::body::Bytes;
6
use axum::{
7
Json,
···
10
response::{IntoResponse, Response},
11
};
12
use cid::Cid;
13
+
use ipld_core::ipld::Ipld;
14
use jacquard_repo::storage::BlockStore;
15
use multihash::Multihash;
16
use serde::{Deserialize, Serialize};
17
use serde_json::json;
18
use sha2::{Digest, Sha256};
19
use std::str::FromStr;
20
+
use tracing::{debug, error, warn};
21
22
const MAX_BLOB_SIZE: usize = 10_000_000_000;
23
const MAX_VIDEO_BLOB_SIZE: usize = 10_000_000_000;
···
260
pub blobs: Vec<RecordBlob>,
261
}
262
263
pub async fn list_missing_blobs(
264
State(state): State<AppState>,
265
headers: axum::http::HeaderMap,
···
277
.into_response();
278
}
279
};
280
+
let auth_user =
281
+
match crate::auth::validate_bearer_token_allow_deactivated(&state.db, &token).await {
282
+
Ok(user) => user,
283
+
Err(_) => {
284
+
return (
285
+
StatusCode::UNAUTHORIZED,
286
+
Json(json!({"error": "AuthenticationFailed"})),
287
+
)
288
+
.into_response();
289
+
}
290
+
};
291
let did = auth_user.did;
292
let user_query = sqlx::query!("SELECT id FROM users WHERE did = $1", did)
293
.fetch_optional(&state.db)
···
345
Ok(Some(b)) => b,
346
_ => continue,
347
};
348
+
let record_ipld: Ipld = match serde_ipld_dagcbor::from_slice(&block_bytes) {
349
Ok(v) => v,
350
+
Err(e) => {
351
+
warn!("Failed to parse record {} as IPLD: {:?}", record_cid_str, e);
352
+
continue;
353
+
}
354
};
355
+
let blob_refs = find_blob_refs_ipld(&record_ipld, 0);
356
+
for blob_ref in blob_refs {
357
+
let blob_cid_str = blob_ref.cid;
358
let exists = sqlx::query!(
359
"SELECT 1 as one FROM blobs WHERE cid = $1 AND created_by_user = $2",
360
blob_cid_str,
+15
-15
src/api/repo/import.rs
+15
-15
src/api/repo/import.rs
···
350
.into_response();
351
}
352
};
353
-
let key_bytes = match crate::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version) {
354
-
Ok(k) => k,
355
-
Err(e) => {
356
-
error!("Failed to decrypt signing key: {}", e);
357
-
return (
358
-
StatusCode::INTERNAL_SERVER_ERROR,
359
-
Json(json!({"error": "InternalError"})),
360
-
)
361
-
.into_response();
362
-
}
363
-
};
364
let signing_key = match SigningKey::from_slice(&key_bytes) {
365
Ok(k) => k,
366
Err(e) => {
···
422
"Created new commit for imported repo: cid={}, rev={}",
423
new_root_str, new_rev_str
424
);
425
-
if !is_migration {
426
-
if let Err(e) = sequence_import_event(&state, did, &new_root_str).await {
427
-
warn!("Failed to sequence import event: {:?}", e);
428
-
}
429
}
430
(StatusCode::OK, Json(json!({}))).into_response()
431
}
···
350
.into_response();
351
}
352
};
353
+
let key_bytes =
354
+
match crate::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version) {
355
+
Ok(k) => k,
356
+
Err(e) => {
357
+
error!("Failed to decrypt signing key: {}", e);
358
+
return (
359
+
StatusCode::INTERNAL_SERVER_ERROR,
360
+
Json(json!({"error": "InternalError"})),
361
+
)
362
+
.into_response();
363
+
}
364
+
};
365
let signing_key = match SigningKey::from_slice(&key_bytes) {
366
Ok(k) => k,
367
Err(e) => {
···
423
"Created new commit for imported repo: cid={}, rev={}",
424
new_root_str, new_rev_str
425
);
426
+
if !is_migration && let Err(e) = sequence_import_event(&state, did, &new_root_str).await
427
+
{
428
+
warn!("Failed to sequence import event: {:?}", e);
429
}
430
(StatusCode::OK, Json(json!({}))).into_response()
431
}
+3
-5
src/api/repo/record/read.rs
+3
-5
src/api/repo/record/read.rs
···
11
use ipld_core::ipld::Ipld;
12
use jacquard_repo::storage::BlockStore;
13
use serde::{Deserialize, Serialize};
14
-
use serde_json::{json, Map, Value};
15
use std::collections::HashMap;
16
use std::str::FromStr;
17
use tracing::{error, info};
···
37
}
38
Ipld::List(arr) => Value::Array(arr.into_iter().map(ipld_to_json).collect()),
39
Ipld::Map(map) => {
40
-
let obj: Map<String, Value> = map
41
-
.into_iter()
42
-
.map(|(k, v)| (k, ipld_to_json(v)))
43
-
.collect();
44
Value::Object(obj)
45
}
46
Ipld::Link(cid) => json!({ "$link": cid.to_string() }),
···
11
use ipld_core::ipld::Ipld;
12
use jacquard_repo::storage::BlockStore;
13
use serde::{Deserialize, Serialize};
14
+
use serde_json::{Map, Value, json};
15
use std::collections::HashMap;
16
use std::str::FromStr;
17
use tracing::{error, info};
···
37
}
38
Ipld::List(arr) => Value::Array(arr.into_iter().map(ipld_to_json).collect()),
39
Ipld::Map(map) => {
40
+
let obj: Map<String, Value> =
41
+
map.into_iter().map(|(k, v)| (k, ipld_to_json(v))).collect();
42
Value::Object(obj)
43
}
44
Ipld::Link(cid) => json!({ "$link": cid.to_string() }),
+6
-7
src/api/repo/record/utils.rs
+6
-7
src/api/repo/record/utils.rs
···
5
use jacquard_repo::commit::Commit;
6
use jacquard_repo::storage::BlockStore;
7
use k256::ecdsa::SigningKey;
8
-
use serde_json::{json, Value};
9
use std::str::FromStr;
10
use uuid::Uuid;
11
···
18
fn extract_blob_cids_recursive(value: &Value, blobs: &mut Vec<String>) {
19
match value {
20
Value::Object(map) => {
21
-
if map.get("$type").and_then(|v| v.as_str()) == Some("blob") {
22
-
if let Some(ref_obj) = map.get("ref") {
23
-
if let Some(link) = ref_obj.get("$link").and_then(|v| v.as_str()) {
24
-
blobs.push(link.to_string());
25
-
}
26
-
}
27
}
28
for v in map.values() {
29
extract_blob_cids_recursive(v, blobs);
···
5
use jacquard_repo::commit::Commit;
6
use jacquard_repo::storage::BlockStore;
7
use k256::ecdsa::SigningKey;
8
+
use serde_json::{Value, json};
9
use std::str::FromStr;
10
use uuid::Uuid;
11
···
18
fn extract_blob_cids_recursive(value: &Value, blobs: &mut Vec<String>) {
19
match value {
20
Value::Object(map) => {
21
+
if map.get("$type").and_then(|v| v.as_str()) == Some("blob")
22
+
&& let Some(ref_obj) = map.get("ref")
23
+
&& let Some(link) = ref_obj.get("$link").and_then(|v| v.as_str())
24
+
{
25
+
blobs.push(link.to_string());
26
}
27
for v in map.values() {
28
extract_blob_cids_recursive(v, blobs);
+94
-21
src/api/server/account_status.rs
+94
-21
src/api/server/account_status.rs
···
8
response::{IntoResponse, Response},
9
};
10
use bcrypt::verify;
11
-
use cid::Cid;
12
use chrono::{Duration, Utc};
13
use jacquard_repo::commit::Commit;
14
use jacquard_repo::storage::BlockStore;
15
use k256::ecdsa::SigningKey;
···
216
})?;
217
218
if let Some(row) = user_row {
219
-
let key_bytes =
220
-
crate::config::decrypt_key(&row.key_bytes, row.encryption_version).map_err(|e| {
221
error!("Failed to decrypt user key: {}", e);
222
(
223
StatusCode::INTERNAL_SERVER_ERROR,
···
247
));
248
}
249
}
250
-
} else if did.starts_with("did:web:") {
251
let client = reqwest::Client::new();
252
-
let host_and_path = &did[8..];
253
let decoded = host_and_path.replace("%3A", ":");
254
let parts: Vec<&str> = decoded.split(':').collect();
255
-
let (host, path_parts) = if parts.len() > 1 && parts[1].chars().all(|c| c.is_ascii_digit()) {
256
(format!("{}:{}", parts[0], parts[1]), parts[2..].to_vec())
257
} else {
258
(parts[0].to_string(), parts[1..].to_vec())
259
};
260
-
let scheme = if host.starts_with("localhost") || host.starts_with("127.") || host.contains(':') {
261
-
"http"
262
-
} else {
263
-
"https"
264
-
};
265
let url = if path_parts.is_empty() {
266
format!("{}://{}/.well-known/did.json", scheme, host)
267
} else {
···
323
State(state): State<AppState>,
324
headers: axum::http::HeaderMap,
325
) -> Response {
326
let extracted = match crate::auth::extract_auth_token_from_header(
327
headers.get("Authorization").and_then(|h| h.to_str().ok()),
328
) {
329
Some(t) => t,
330
-
None => return ApiError::AuthenticationRequired.into_response(),
331
};
332
let dpop_proof = headers.get("DPoP").and_then(|h| h.to_str().ok());
333
let http_uri = format!(
···
346
.await
347
{
348
Ok(user) => user,
349
-
Err(e) => return ApiError::from(e).into_response(),
350
};
351
352
if let Err(e) = crate::auth::scope_check::check_account_scope(
353
auth_user.is_oauth,
···
355
crate::oauth::scopes::AccountAttr::Repo,
356
crate::oauth::scopes::AccountAction::Manage,
357
) {
358
return e;
359
}
360
361
let did = auth_user.did;
362
363
if let Err((status, json)) = assert_valid_did_document_for_service(&state.db, &did).await {
364
info!(
365
-
"activateAccount rejected for {}: DID document validation failed",
366
-
did
367
);
368
return (status, json).into_response();
369
}
370
371
let handle = sqlx::query_scalar!("SELECT handle FROM users WHERE did = $1", did)
372
.fetch_optional(&state.db)
373
.await
374
.ok()
375
.flatten();
376
let result = sqlx::query!("UPDATE users SET deactivated_at = NULL WHERE did = $1", did)
377
.execute(&state.db)
378
.await;
379
match result {
380
Ok(_) => {
381
if let Some(ref h) = handle {
382
let _ = state.cache.delete(&format!("handle:{}", h)).await;
383
}
384
if let Err(e) =
385
crate::api::repo::record::sequence_account_event(&state, &did, true, None).await
386
{
387
-
warn!("Failed to sequence account activation event: {}", e);
388
}
389
if let Err(e) =
390
crate::api::repo::record::sequence_identity_event(&state, &did, handle.as_deref())
391
.await
392
{
393
-
warn!("Failed to sequence identity event for activation: {}", e);
394
}
395
let repo_root = sqlx::query_scalar!(
396
"SELECT r.repo_root_cid FROM repos r JOIN users u ON r.user_id = u.id WHERE u.did = $1",
···
401
.ok()
402
.flatten();
403
if let Some(root_cid) = repo_root {
404
let rev = if let Ok(cid) = Cid::from_str(&root_cid) {
405
if let Ok(Some(block)) = state.block_store.get(&cid).await {
406
Commit::from_cbor(&block).ok().map(|c| c.rev().to_string())
···
410
} else {
411
None
412
};
413
-
if let Err(e) =
414
-
crate::api::repo::record::sequence_sync_event(&state, &did, &root_cid, rev.as_deref()).await
415
{
416
-
warn!("Failed to sequence sync event for activation: {}", e);
417
}
418
}
419
(StatusCode::OK, Json(json!({}))).into_response()
420
}
421
Err(e) => {
422
-
error!("DB error activating account: {:?}", e);
423
(
424
StatusCode::INTERNAL_SERVER_ERROR,
425
Json(json!({"error": "InternalError"})),
···
8
response::{IntoResponse, Response},
9
};
10
use bcrypt::verify;
11
use chrono::{Duration, Utc};
12
+
use cid::Cid;
13
use jacquard_repo::commit::Commit;
14
use jacquard_repo::storage::BlockStore;
15
use k256::ecdsa::SigningKey;
···
216
})?;
217
218
if let Some(row) = user_row {
219
+
let key_bytes = crate::config::decrypt_key(&row.key_bytes, row.encryption_version)
220
+
.map_err(|e| {
221
error!("Failed to decrypt user key: {}", e);
222
(
223
StatusCode::INTERNAL_SERVER_ERROR,
···
247
));
248
}
249
}
250
+
} else if let Some(host_and_path) = did.strip_prefix("did:web:") {
251
let client = reqwest::Client::new();
252
let decoded = host_and_path.replace("%3A", ":");
253
let parts: Vec<&str> = decoded.split(':').collect();
254
+
let (host, path_parts) = if parts.len() > 1 && parts[1].chars().all(|c| c.is_ascii_digit())
255
+
{
256
(format!("{}:{}", parts[0], parts[1]), parts[2..].to_vec())
257
} else {
258
(parts[0].to_string(), parts[1..].to_vec())
259
};
260
+
let scheme =
261
+
if host.starts_with("localhost") || host.starts_with("127.") || host.contains(':') {
262
+
"http"
263
+
} else {
264
+
"https"
265
+
};
266
let url = if path_parts.is_empty() {
267
format!("{}://{}/.well-known/did.json", scheme, host)
268
} else {
···
324
State(state): State<AppState>,
325
headers: axum::http::HeaderMap,
326
) -> Response {
327
+
info!("[MIGRATION] activateAccount called");
328
let extracted = match crate::auth::extract_auth_token_from_header(
329
headers.get("Authorization").and_then(|h| h.to_str().ok()),
330
) {
331
Some(t) => t,
332
+
None => {
333
+
info!("[MIGRATION] activateAccount: No auth token");
334
+
return ApiError::AuthenticationRequired.into_response();
335
+
}
336
};
337
let dpop_proof = headers.get("DPoP").and_then(|h| h.to_str().ok());
338
let http_uri = format!(
···
351
.await
352
{
353
Ok(user) => user,
354
+
Err(e) => {
355
+
info!("[MIGRATION] activateAccount: Auth failed: {:?}", e);
356
+
return ApiError::from(e).into_response();
357
+
}
358
};
359
+
info!(
360
+
"[MIGRATION] activateAccount: Authenticated user did={}",
361
+
auth_user.did
362
+
);
363
364
if let Err(e) = crate::auth::scope_check::check_account_scope(
365
auth_user.is_oauth,
···
367
crate::oauth::scopes::AccountAttr::Repo,
368
crate::oauth::scopes::AccountAction::Manage,
369
) {
370
+
info!("[MIGRATION] activateAccount: Scope check failed");
371
return e;
372
}
373
374
let did = auth_user.did;
375
376
+
info!(
377
+
"[MIGRATION] activateAccount: Validating DID document for did={}",
378
+
did
379
+
);
380
+
let did_validation_start = std::time::Instant::now();
381
if let Err((status, json)) = assert_valid_did_document_for_service(&state.db, &did).await {
382
info!(
383
+
"[MIGRATION] activateAccount: DID document validation FAILED for {} (took {:?})",
384
+
did,
385
+
did_validation_start.elapsed()
386
);
387
return (status, json).into_response();
388
}
389
+
info!(
390
+
"[MIGRATION] activateAccount: DID document validation SUCCESS for {} (took {:?})",
391
+
did,
392
+
did_validation_start.elapsed()
393
+
);
394
395
let handle = sqlx::query_scalar!("SELECT handle FROM users WHERE did = $1", did)
396
.fetch_optional(&state.db)
397
.await
398
.ok()
399
.flatten();
400
+
info!(
401
+
"[MIGRATION] activateAccount: Activating account did={} handle={:?}",
402
+
did, handle
403
+
);
404
let result = sqlx::query!("UPDATE users SET deactivated_at = NULL WHERE did = $1", did)
405
.execute(&state.db)
406
.await;
407
match result {
408
Ok(_) => {
409
+
info!(
410
+
"[MIGRATION] activateAccount: DB update success for did={}",
411
+
did
412
+
);
413
if let Some(ref h) = handle {
414
let _ = state.cache.delete(&format!("handle:{}", h)).await;
415
}
416
+
info!(
417
+
"[MIGRATION] activateAccount: Sequencing account event (active=true) for did={}",
418
+
did
419
+
);
420
if let Err(e) =
421
crate::api::repo::record::sequence_account_event(&state, &did, true, None).await
422
{
423
+
warn!(
424
+
"[MIGRATION] activateAccount: Failed to sequence account activation event: {}",
425
+
e
426
+
);
427
+
} else {
428
+
info!("[MIGRATION] activateAccount: Account event sequenced successfully");
429
}
430
+
info!(
431
+
"[MIGRATION] activateAccount: Sequencing identity event for did={} handle={:?}",
432
+
did, handle
433
+
);
434
if let Err(e) =
435
crate::api::repo::record::sequence_identity_event(&state, &did, handle.as_deref())
436
.await
437
{
438
+
warn!(
439
+
"[MIGRATION] activateAccount: Failed to sequence identity event for activation: {}",
440
+
e
441
+
);
442
+
} else {
443
+
info!("[MIGRATION] activateAccount: Identity event sequenced successfully");
444
}
445
let repo_root = sqlx::query_scalar!(
446
"SELECT r.repo_root_cid FROM repos r JOIN users u ON r.user_id = u.id WHERE u.did = $1",
···
451
.ok()
452
.flatten();
453
if let Some(root_cid) = repo_root {
454
+
info!(
455
+
"[MIGRATION] activateAccount: Sequencing sync event for did={} root_cid={}",
456
+
did, root_cid
457
+
);
458
let rev = if let Ok(cid) = Cid::from_str(&root_cid) {
459
if let Ok(Some(block)) = state.block_store.get(&cid).await {
460
Commit::from_cbor(&block).ok().map(|c| c.rev().to_string())
···
464
} else {
465
None
466
};
467
+
if let Err(e) = crate::api::repo::record::sequence_sync_event(
468
+
&state,
469
+
&did,
470
+
&root_cid,
471
+
rev.as_deref(),
472
+
)
473
+
.await
474
{
475
+
warn!(
476
+
"[MIGRATION] activateAccount: Failed to sequence sync event for activation: {}",
477
+
e
478
+
);
479
+
} else {
480
+
info!("[MIGRATION] activateAccount: Sync event sequenced successfully");
481
}
482
+
} else {
483
+
warn!(
484
+
"[MIGRATION] activateAccount: No repo root found for did={}",
485
+
did
486
+
);
487
}
488
+
info!("[MIGRATION] activateAccount: SUCCESS for did={}", did);
489
(StatusCode::OK, Json(json!({}))).into_response()
490
}
491
Err(e) => {
492
+
error!(
493
+
"[MIGRATION] activateAccount: DB error activating account: {:?}",
494
+
e
495
+
);
496
(
497
StatusCode::INTERNAL_SERVER_ERROR,
498
Json(json!({"error": "InternalError"})),
+3
-3
src/api/server/mod.rs
+3
-3
src/api/server/mod.rs
···
26
pub use invite::{create_invite_code, create_invite_codes, get_account_invite_codes};
27
pub use logo::get_logo;
28
pub use meta::{describe_server, health, robots_txt};
29
pub use passkey_account::{
30
complete_passkey_setup, create_passkey_account, recover_passkey_account,
31
request_passkey_recovery, start_passkey_registration_for_setup,
···
56
pub use trusted_devices::{
57
extend_device_trust, is_device_trusted, list_trusted_devices, revoke_trusted_device,
58
trust_device, update_trusted_device,
59
-
};
60
-
pub use migration::{
61
-
clear_migration_forwarding, get_migration_status, update_migration_forwarding,
62
};
63
pub use verify_email::{resend_migration_verification, verify_migration_email};
64
pub use verify_token::{VerifyTokenInput, VerifyTokenOutput, verify_token, verify_token_internal};
···
26
pub use invite::{create_invite_code, create_invite_codes, get_account_invite_codes};
27
pub use logo::get_logo;
28
pub use meta::{describe_server, health, robots_txt};
29
+
pub use migration::{
30
+
clear_migration_forwarding, get_migration_status, update_migration_forwarding,
31
+
};
32
pub use passkey_account::{
33
complete_passkey_setup, create_passkey_account, recover_passkey_account,
34
request_passkey_recovery, start_passkey_registration_for_setup,
···
59
pub use trusted_devices::{
60
extend_device_trust, is_device_trusted, list_trusted_devices, revoke_trusted_device,
61
trust_device, update_trusted_device,
62
};
63
pub use verify_email::{resend_migration_verification, verify_migration_email};
64
pub use verify_token::{VerifyTokenInput, VerifyTokenOutput, verify_token, verify_token_internal};
+1
-5
src/api/server/verify_token.rs
+1
-5
src/api/server/verify_token.rs
+13
-16
src/auth/mod.rs
+13
-16
src/auth/mod.rs
···
229
.ok()
230
.flatten();
231
232
-
match session_row {
233
-
Some(row) => {
234
-
if row.access_expires_at > chrono::Utc::now() {
235
-
session_valid = true;
236
-
if let Some(c) = cache {
237
-
let _ = c
238
-
.set(
239
-
&session_cache_key,
240
-
"1",
241
-
Duration::from_secs(SESSION_CACHE_TTL_SECS),
242
-
)
243
-
.await;
244
-
}
245
-
} else {
246
-
return Err(TokenValidationError::TokenExpired);
247
}
248
}
249
-
None => {}
250
}
251
}
252
···
229
.ok()
230
.flatten();
231
232
+
if let Some(row) = session_row {
233
+
if row.access_expires_at > chrono::Utc::now() {
234
+
session_valid = true;
235
+
if let Some(c) = cache {
236
+
let _ = c
237
+
.set(
238
+
&session_cache_key,
239
+
"1",
240
+
Duration::from_secs(SESSION_CACHE_TTL_SECS),
241
+
)
242
+
.await;
243
}
244
+
} else {
245
+
return Err(TokenValidationError::TokenExpired);
246
}
247
}
248
}
249
+2
-3
src/comms/sender.rs
+2
-3
src/comms/sender.rs
···
1
use async_trait::async_trait;
2
-
use base64::{engine::general_purpose::STANDARD as BASE64, Engine};
3
use reqwest::Client;
4
use serde_json::json;
5
use std::process::Stdio;
···
103
}
104
105
pub fn format_email(&self, notification: &QueuedComms) -> String {
106
-
let subject =
107
-
mime_encode_header(notification.subject.as_deref().unwrap_or("Notification"));
108
let recipient = sanitize_header_value(¬ification.recipient);
109
let from_header = if self.from_name.is_empty() {
110
self.from_address.clone()
···
1
use async_trait::async_trait;
2
+
use base64::{Engine, engine::general_purpose::STANDARD as BASE64};
3
use reqwest::Client;
4
use serde_json::json;
5
use std::process::Stdio;
···
103
}
104
105
pub fn format_email(&self, notification: &QueuedComms) -> String {
106
+
let subject = mime_encode_header(notification.subject.as_deref().unwrap_or("Notification"));
107
let recipient = sanitize_header_value(¬ification.recipient);
108
let from_header = if self.from_name.is_empty() {
109
self.from_address.clone()
+9
-11
src/delegation/scopes.rs
+9
-11
src/delegation/scopes.rs
···
75
}
76
77
fn find_matching_scope<'a>(requested: &str, granted: &HashSet<&'a str>) -> Option<&'a str> {
78
-
for granted_scope in granted {
79
-
if scopes_compatible(granted_scope, requested) {
80
-
return Some(granted_scope);
81
-
}
82
-
}
83
-
None
84
}
85
86
fn scopes_compatible(granted: &str, requested: &str) -> bool {
···
97
return true;
98
}
99
100
-
if granted_base.ends_with(".*") {
101
-
let prefix = &granted_base[..granted_base.len() - 2];
102
-
if requested_base.starts_with(prefix) && requested_base.len() > prefix.len() {
103
-
return true;
104
-
}
105
}
106
107
false
···
75
}
76
77
fn find_matching_scope<'a>(requested: &str, granted: &HashSet<&'a str>) -> Option<&'a str> {
78
+
granted
79
+
.iter()
80
+
.find(|&granted_scope| scopes_compatible(granted_scope, requested))
81
+
.map(|v| v as _)
82
}
83
84
fn scopes_compatible(granted: &str, requested: &str) -> bool {
···
95
return true;
96
}
97
98
+
if let Some(prefix) = granted_base.strip_suffix(".*")
99
+
&& requested_base.starts_with(prefix)
100
+
&& requested_base.len() > prefix.len()
101
+
{
102
+
return true;
103
}
104
105
false
+75
-66
src/moderation/mod.rs
+75
-66
src/moderation/mod.rs
···
1
/*
2
-
* CONTENT WARNING
3
-
*
4
-
* This file contains explicit slurs and hateful language. We're sorry you have to see them.
5
-
*
6
-
* These words exist here for one reason: to ensure our moderation system correctly blocks them.
7
-
* We can't verify the filter catches the n-word without testing against the actual word.
8
-
* Euphemisms wouldn't prove the protection works.
9
-
*
10
-
* If reading this file has caused you distress, please know:
11
-
* - you are valued and welcome in this community
12
-
* - these words do not reflect the views of this project or its contributors
13
-
* - we maintain this code precisely because we believe everyone deserves an experience on the web that is free from this kinda language
14
*/
15
16
use regex::Regex;
···
70
71
fn has_explicit_slur_with_extra_words(text: &str, extra_words: &[String]) -> bool {
72
let text_lower = text.to_lowercase();
73
-
let normalized = text_lower.replace('.', "").replace('-', "").replace('_', "");
74
let stripped = strip_trailing_digits(&text_lower);
75
let normalized_stripped = strip_trailing_digits(&normalized);
76
···
104
#[cfg(test)]
105
mod tests {
106
use super::*;
107
108
#[test]
109
fn test_chink_pattern() {
110
-
assert!(has_explicit_slur("chink"));
111
-
assert!(has_explicit_slur("chinks"));
112
-
assert!(has_explicit_slur("CHINK"));
113
-
assert!(has_explicit_slur("Chinks"));
114
}
115
116
#[test]
117
fn test_coon_pattern() {
118
-
assert!(has_explicit_slur("coon"));
119
-
assert!(has_explicit_slur("coons"));
120
-
assert!(has_explicit_slur("COON"));
121
}
122
123
#[test]
124
fn test_fag_pattern() {
125
-
assert!(has_explicit_slur("fag"));
126
-
assert!(has_explicit_slur("fags"));
127
-
assert!(has_explicit_slur("faggot"));
128
-
assert!(has_explicit_slur("faggots"));
129
-
assert!(has_explicit_slur("faggotry"));
130
}
131
132
#[test]
133
fn test_kike_pattern() {
134
-
assert!(has_explicit_slur("kike"));
135
-
assert!(has_explicit_slur("kikes"));
136
-
assert!(has_explicit_slur("KIKE"));
137
-
assert!(has_explicit_slur("kikery"));
138
}
139
140
#[test]
141
fn test_nigger_pattern() {
142
-
assert!(has_explicit_slur("nigger"));
143
-
assert!(has_explicit_slur("niggers"));
144
-
assert!(has_explicit_slur("NIGGER"));
145
-
assert!(has_explicit_slur("nigga"));
146
-
assert!(has_explicit_slur("niggas"));
147
}
148
149
#[test]
150
fn test_tranny_pattern() {
151
-
assert!(has_explicit_slur("tranny"));
152
-
assert!(has_explicit_slur("trannies"));
153
-
assert!(has_explicit_slur("TRANNY"));
154
}
155
156
#[test]
157
fn test_normalization_bypass() {
158
-
assert!(has_explicit_slur("n.i.g.g.e.r"));
159
-
assert!(has_explicit_slur("n-i-g-g-e-r"));
160
-
assert!(has_explicit_slur("n_i_g_g_e_r"));
161
-
assert!(has_explicit_slur("f.a.g"));
162
-
assert!(has_explicit_slur("f-a-g"));
163
-
assert!(has_explicit_slur("c.h.i.n.k"));
164
-
assert!(has_explicit_slur("k_i_k_e"));
165
}
166
167
#[test]
168
fn test_trailing_digits_bypass() {
169
-
assert!(has_explicit_slur("faggot123"));
170
-
assert!(has_explicit_slur("nigger69"));
171
-
assert!(has_explicit_slur("chink420"));
172
-
assert!(has_explicit_slur("fag1"));
173
-
assert!(has_explicit_slur("kike2024"));
174
-
assert!(has_explicit_slur("n_i_g_g_e_r123"));
175
}
176
177
#[test]
178
fn test_embedded_in_sentence() {
179
-
assert!(has_explicit_slur("you are a faggot"));
180
-
assert!(has_explicit_slur("stupid nigger"));
181
-
assert!(has_explicit_slur("go away chink"));
182
}
183
184
#[test]
···
210
211
#[test]
212
fn test_case_insensitive() {
213
-
assert!(has_explicit_slur("NIGGER"));
214
-
assert!(has_explicit_slur("Nigger"));
215
-
assert!(has_explicit_slur("NiGgEr"));
216
-
assert!(has_explicit_slur("FAGGOT"));
217
-
assert!(has_explicit_slur("Faggot"));
218
}
219
220
#[test]
221
fn test_leetspeak_bypass() {
222
-
assert!(has_explicit_slur("f4ggot"));
223
-
assert!(has_explicit_slur("f4gg0t"));
224
-
assert!(has_explicit_slur("n1gger"));
225
-
assert!(has_explicit_slur("n1gg3r"));
226
-
assert!(has_explicit_slur("k1ke"));
227
-
assert!(has_explicit_slur("ch1nk"));
228
-
assert!(has_explicit_slur("tr4nny"));
229
}
230
231
#[test]
···
253
assert!(has_explicit_slur_with_extra_words("b4dw0rd", &extra));
254
assert!(has_explicit_slur_with_extra_words("b4dw0rd789", &extra));
255
assert!(has_explicit_slur_with_extra_words("b.4.d.w.0.r.d", &extra));
256
-
assert!(has_explicit_slur_with_extra_words("this contains badword here", &extra));
257
assert!(has_explicit_slur_with_extra_words("0ff3n$1v3", &extra));
258
259
assert!(!has_explicit_slur_with_extra_words("goodword", &extra));
···
1
/*
2
+
* CONTENT WARNING
3
+
*
4
+
* This file contains explicit slurs and hateful language. We're sorry you have to see them.
5
+
*
6
+
* These words exist here for one reason: to ensure our moderation system correctly blocks them.
7
+
* We can't verify the filter catches the n-word without testing against the actual word.
8
+
* Euphemisms wouldn't prove the protection works.
9
+
*
10
+
* If reading this file has caused you distress, please know:
11
+
* - you are valued and welcome in this community
12
+
* - these words do not reflect the views of this project or its contributors
13
+
* - we maintain this code precisely because we believe everyone deserves an experience on the web that is free from this kinda language
14
*/
15
16
use regex::Regex;
···
70
71
fn has_explicit_slur_with_extra_words(text: &str, extra_words: &[String]) -> bool {
72
let text_lower = text.to_lowercase();
73
+
let normalized = text_lower.replace(['.', '-', '_'], "");
74
let stripped = strip_trailing_digits(&text_lower);
75
let normalized_stripped = strip_trailing_digits(&normalized);
76
···
104
#[cfg(test)]
105
mod tests {
106
use super::*;
107
+
use base64::Engine;
108
+
109
+
fn d(b64: &str) -> String {
110
+
let bytes = base64::engine::general_purpose::STANDARD.decode(b64).unwrap();
111
+
String::from_utf8(bytes).unwrap()
112
+
}
113
114
#[test]
115
fn test_chink_pattern() {
116
+
assert!(has_explicit_slur(&d("Y2hpbms=")));
117
+
assert!(has_explicit_slur(&d("Y2hpbmtz")));
118
+
assert!(has_explicit_slur(&d("Q0hJTks=")));
119
+
assert!(has_explicit_slur(&d("Q2hpbmtz")));
120
}
121
122
#[test]
123
fn test_coon_pattern() {
124
+
assert!(has_explicit_slur(&d("Y29vbg==")));
125
+
assert!(has_explicit_slur(&d("Y29vbnM=")));
126
+
assert!(has_explicit_slur(&d("Q09PTg==")));
127
}
128
129
#[test]
130
fn test_fag_pattern() {
131
+
assert!(has_explicit_slur(&d("ZmFn")));
132
+
assert!(has_explicit_slur(&d("ZmFncw==")));
133
+
assert!(has_explicit_slur(&d("ZmFnZ290")));
134
+
assert!(has_explicit_slur(&d("ZmFnZ290cw==")));
135
+
assert!(has_explicit_slur(&d("ZmFnZ290cnk=")));
136
}
137
138
#[test]
139
fn test_kike_pattern() {
140
+
assert!(has_explicit_slur(&d("a2lrZQ==")));
141
+
assert!(has_explicit_slur(&d("a2lrZXM=")));
142
+
assert!(has_explicit_slur(&d("S0lLRQ==")));
143
+
assert!(has_explicit_slur(&d("a2lrZXJ5")));
144
}
145
146
#[test]
147
fn test_nigger_pattern() {
148
+
assert!(has_explicit_slur(&d("bmlnZ2Vy")));
149
+
assert!(has_explicit_slur(&d("bmlnZ2Vycw==")));
150
+
assert!(has_explicit_slur(&d("TklHR0VS")));
151
+
assert!(has_explicit_slur(&d("bmlnZ2E=")));
152
+
assert!(has_explicit_slur(&d("bmlnZ2Fz")));
153
}
154
155
#[test]
156
fn test_tranny_pattern() {
157
+
assert!(has_explicit_slur(&d("dHJhbm55")));
158
+
assert!(has_explicit_slur(&d("dHJhbm5pZXM=")));
159
+
assert!(has_explicit_slur(&d("VFJBTk5Z")));
160
}
161
162
#[test]
163
fn test_normalization_bypass() {
164
+
assert!(has_explicit_slur(&d("bi5pLmcuZy5lLnI=")));
165
+
assert!(has_explicit_slur(&d("bi1pLWctZy1lLXI=")));
166
+
assert!(has_explicit_slur(&d("bl9pX2dfZ19lX3I=")));
167
+
assert!(has_explicit_slur(&d("Zi5hLmc=")));
168
+
assert!(has_explicit_slur(&d("Zi1hLWc=")));
169
+
assert!(has_explicit_slur(&d("Yy5oLmkubi5r")));
170
+
assert!(has_explicit_slur(&d("a19pX2tfZQ==")));
171
}
172
173
#[test]
174
fn test_trailing_digits_bypass() {
175
+
assert!(has_explicit_slur(&d("ZmFnZ290MTIz")));
176
+
assert!(has_explicit_slur(&d("bmlnZ2VyNjk=")));
177
+
assert!(has_explicit_slur(&d("Y2hpbms0MjA=")));
178
+
assert!(has_explicit_slur(&d("ZmFnMQ==")));
179
+
assert!(has_explicit_slur(&d("a2lrZTIwMjQ=")));
180
+
assert!(has_explicit_slur(&d("bl9pX2dfZ19lX3IxMjM=")));
181
}
182
183
#[test]
184
fn test_embedded_in_sentence() {
185
+
assert!(has_explicit_slur(&d("eW91IGFyZSBhIGZhZ2dvdA==")));
186
+
assert!(has_explicit_slur(&d("c3R1cGlkIG5pZ2dlcg==")));
187
+
assert!(has_explicit_slur(&d("Z28gYXdheSBjaGluaw==")));
188
}
189
190
#[test]
···
216
217
#[test]
218
fn test_case_insensitive() {
219
+
assert!(has_explicit_slur(&d("TklHR0VS")));
220
+
assert!(has_explicit_slur(&d("TmlnZ2Vy")));
221
+
assert!(has_explicit_slur(&d("TmlHZ0Vy")));
222
+
assert!(has_explicit_slur(&d("RkFHR09U")));
223
+
assert!(has_explicit_slur(&d("RmFnZ290")));
224
}
225
226
#[test]
227
fn test_leetspeak_bypass() {
228
+
assert!(has_explicit_slur(&d("ZjRnZ290")));
229
+
assert!(has_explicit_slur(&d("ZjRnZzB0")));
230
+
assert!(has_explicit_slur(&d("bjFnZ2Vy")));
231
+
assert!(has_explicit_slur(&d("bjFnZzNy")));
232
+
assert!(has_explicit_slur(&d("azFrZQ==")));
233
+
assert!(has_explicit_slur(&d("Y2gxbms=")));
234
+
assert!(has_explicit_slur(&d("dHI0bm55")));
235
}
236
237
#[test]
···
259
assert!(has_explicit_slur_with_extra_words("b4dw0rd", &extra));
260
assert!(has_explicit_slur_with_extra_words("b4dw0rd789", &extra));
261
assert!(has_explicit_slur_with_extra_words("b.4.d.w.0.r.d", &extra));
262
+
assert!(has_explicit_slur_with_extra_words(
263
+
"this contains badword here",
264
+
&extra
265
+
));
266
assert!(has_explicit_slur_with_extra_words("0ff3n$1v3", &extra));
267
268
assert!(!has_explicit_slur_with_extra_words("goodword", &extra));
+12
-9
src/oauth/endpoints/delegation.rs
+12
-9
src/oauth/endpoints/delegation.rs
···
88
}
89
};
90
91
-
if let Err(_) = db::set_request_did(&state.db, &form.request_uri, &delegated_did).await {
92
tracing::warn!("Failed to set delegated DID on authorization request");
93
}
94
···
168
.into_response();
169
}
170
171
-
let password_valid = match &controller.password_hash {
172
-
Some(hash) => match bcrypt::verify(&form.password, hash) {
173
-
Ok(valid) => valid,
174
-
Err(_) => false,
175
-
},
176
-
None => false,
177
-
};
178
179
if !password_valid {
180
return Json(DelegationAuthResponse {
···
186
.into_response();
187
}
188
189
-
if let Err(_) = db::set_controller_did(&state.db, &form.request_uri, &form.controller_did).await
190
{
191
return Json(DelegationAuthResponse {
192
success: false,
···
88
}
89
};
90
91
+
if db::set_request_did(&state.db, &form.request_uri, &delegated_did)
92
+
.await
93
+
.is_err()
94
+
{
95
tracing::warn!("Failed to set delegated DID on authorization request");
96
}
97
···
171
.into_response();
172
}
173
174
+
let password_valid = controller
175
+
.password_hash
176
+
.as_ref()
177
+
.map(|hash| bcrypt::verify(&form.password, hash).unwrap_or_default())
178
+
.unwrap_or_default();
179
180
if !password_valid {
181
return Json(DelegationAuthResponse {
···
187
.into_response();
188
}
189
190
+
if db::set_controller_did(&state.db, &form.request_uri, &form.controller_did)
191
+
.await
192
+
.is_err()
193
{
194
return Json(DelegationAuthResponse {
195
success: false,
+27
-26
src/sync/import.rs
+27
-26
src/sync/import.rs
···
189
if let Some(Ipld::List(entries)) = obj.get("e") {
190
for entry in entries {
191
if let Ipld::Map(entry_obj) = entry {
192
-
let prefix_len = entry_obj.get("p").and_then(|p| {
193
-
if let Ipld::Integer(n) = p {
194
-
Some(*n as usize)
195
-
} else {
196
-
None
197
-
}
198
-
}).unwrap_or(0);
199
200
let key_suffix = entry_obj.get("k").and_then(|k| {
201
if let Ipld::Bytes(b) = k {
···
222
}
223
});
224
225
-
if let Some(record_cid) = record_cid {
226
-
if let Ok(full_key) = String::from_utf8(current_key.clone()) {
227
-
if let Some(record_block) = blocks.get(&record_cid)
228
-
&& let Ok(record_value) =
229
-
serde_ipld_dagcbor::from_slice::<Ipld>(record_block)
230
-
{
231
-
let blob_refs = find_blob_refs_ipld(&record_value, 0);
232
-
let parts: Vec<&str> = full_key.split('/').collect();
233
-
if parts.len() >= 2 {
234
-
let collection = parts[..parts.len() - 1].join("/");
235
-
let rkey = parts[parts.len() - 1].to_string();
236
-
records.push(ImportedRecord {
237
-
collection,
238
-
rkey,
239
-
cid: record_cid,
240
-
blob_refs,
241
-
});
242
-
}
243
-
}
244
}
245
}
246
}
···
189
if let Some(Ipld::List(entries)) = obj.get("e") {
190
for entry in entries {
191
if let Ipld::Map(entry_obj) = entry {
192
+
let prefix_len = entry_obj
193
+
.get("p")
194
+
.and_then(|p| {
195
+
if let Ipld::Integer(n) = p {
196
+
Some(*n as usize)
197
+
} else {
198
+
None
199
+
}
200
+
})
201
+
.unwrap_or(0);
202
203
let key_suffix = entry_obj.get("k").and_then(|k| {
204
if let Ipld::Bytes(b) = k {
···
225
}
226
});
227
228
+
if let Some(record_cid) = record_cid
229
+
&& let Ok(full_key) = String::from_utf8(current_key.clone())
230
+
&& let Some(record_block) = blocks.get(&record_cid)
231
+
&& let Ok(record_value) =
232
+
serde_ipld_dagcbor::from_slice::<Ipld>(record_block)
233
+
{
234
+
let blob_refs = find_blob_refs_ipld(&record_value, 0);
235
+
let parts: Vec<&str> = full_key.split('/').collect();
236
+
if parts.len() >= 2 {
237
+
let collection = parts[..parts.len() - 1].join("/");
238
+
let rkey = parts[parts.len() - 1].to_string();
239
+
records.push(ImportedRecord {
240
+
collection,
241
+
rkey,
242
+
cid: record_cid,
243
+
blob_refs,
244
+
});
245
}
246
}
247
}
+13
-14
src/validation/mod.rs
+13
-14
src/validation/mod.rs
···
161
.get("$type")
162
.and_then(|v| v.as_str())
163
.is_some_and(|t| t == "app.bsky.richtext.facet#tag");
164
-
if is_tag {
165
-
if let Some(tag) = feature.get("tag").and_then(|v| v.as_str()) {
166
-
if crate::moderation::has_explicit_slur(tag) {
167
-
return Err(ValidationError::BannedContent {
168
-
path: format!("facets/{}/features/{}/tag", i, j),
169
-
});
170
-
}
171
-
}
172
}
173
}
174
}
···
332
if !obj.contains_key("createdAt") {
333
return Err(ValidationError::MissingField("createdAt".to_string()));
334
}
335
-
if let Some(rkey) = rkey {
336
-
if crate::moderation::has_explicit_slur(rkey) {
337
-
return Err(ValidationError::BannedContent {
338
-
path: "rkey".to_string(),
339
-
});
340
-
}
341
}
342
if let Some(display_name) = obj.get("displayName").and_then(|v| v.as_str()) {
343
if display_name.is_empty() || display_name.len() > 240 {
···
161
.get("$type")
162
.and_then(|v| v.as_str())
163
.is_some_and(|t| t == "app.bsky.richtext.facet#tag");
164
+
if is_tag
165
+
&& let Some(tag) = feature.get("tag").and_then(|v| v.as_str())
166
+
&& crate::moderation::has_explicit_slur(tag)
167
+
{
168
+
return Err(ValidationError::BannedContent {
169
+
path: format!("facets/{}/features/{}/tag", i, j),
170
+
});
171
}
172
}
173
}
···
331
if !obj.contains_key("createdAt") {
332
return Err(ValidationError::MissingField("createdAt".to_string()));
333
}
334
+
if let Some(rkey) = rkey
335
+
&& crate::moderation::has_explicit_slur(rkey)
336
+
{
337
+
return Err(ValidationError::BannedContent {
338
+
path: "rkey".to_string(),
339
+
});
340
}
341
if let Some(display_name) = obj.get("displayName").and_then(|v| v.as_str()) {
342
if display_name.is_empty() || display_name.len() > 240 {
+34
-22
tests/banned_words.rs
+34
-22
tests/banned_words.rs
···
1
/*
2
-
* CONTENT WARNING
3
-
*
4
-
* This file contains explicit slurs and hateful language. We're sorry you have to see them.
5
-
*
6
-
* These words exist here for one reason: to ensure our moderation system correctly blocks them.
7
-
* We can't verify the filter catches the n-word without testing against the actual word.
8
-
* Euphemisms wouldn't prove the protection works.
9
-
*
10
-
* If reading this file has caused you distress, please know:
11
-
* - you are valued and welcome in this community
12
-
* - these words do not reflect the views of this project or its contributors
13
-
* - we maintain this code precisely because we believe everyone deserves an experience on the web that is free from this kinda language
14
-
*/
15
16
mod common;
17
mod helpers;
···
20
use reqwest::StatusCode;
21
use serde_json::json;
22
23
#[tokio::test]
24
async fn test_handle_with_slur_rejected() {
25
let client = client();
26
let timestamp = chrono::Utc::now().timestamp_millis();
27
-
let offensive_handle = format!("nigger{}", timestamp);
28
29
let create_payload = json!({
30
"handle": offensive_handle,
···
45
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
46
let body: serde_json::Value = res.json().await.unwrap();
47
assert_eq!(body["error"], "InvalidHandle");
48
-
assert!(body["message"]
49
-
.as_str()
50
-
.unwrap_or("")
51
-
.contains("Inappropriate language"));
52
}
53
54
#[tokio::test]
55
async fn test_handle_with_normalized_slur_rejected() {
56
let client = client();
57
let timestamp = chrono::Utc::now().timestamp_millis();
58
-
let offensive_handle = format!("n-i-g-g-e-r{}", timestamp);
59
60
let create_payload = json!({
61
"handle": offensive_handle,
···
84
let (_, jwt) = setup_new_user("handleupdate").await;
85
86
let update_payload = json!({
87
-
"handle": "faggots"
88
});
89
90
let res = client
···
114
"rkey": "self",
115
"record": {
116
"$type": "app.bsky.actor.profile",
117
-
"displayName": "I am a kike"
118
}
119
});
120
···
146
"record": {
147
"$type": "app.bsky.actor.profile",
148
"displayName": "Normal Name",
149
-
"description": "I hate all chinks"
150
}
151
});
152
···
1
/*
2
+
* CONTENT WARNING
3
+
*
4
+
* This file contains explicit slurs and hateful language. We're sorry you have to see them.
5
+
*
6
+
* These words exist here for one reason: to ensure our moderation system correctly blocks them.
7
+
* We can't verify the filter catches the n-word without testing against the actual word.
8
+
* Euphemisms wouldn't prove the protection works.
9
+
*
10
+
* If reading this file has caused you distress, please know:
11
+
* - you are valued and welcome in this community
12
+
* - these words do not reflect the views of this project or its contributors
13
+
* - we maintain this code precisely because we believe everyone deserves an experience on the web that is free from this kinda language
14
+
*/
15
16
mod common;
17
mod helpers;
···
20
use reqwest::StatusCode;
21
use serde_json::json;
22
23
+
fn decode(b64: &str) -> String {
24
+
use base64::Engine;
25
+
let bytes = base64::engine::general_purpose::STANDARD
26
+
.decode(b64)
27
+
.expect("invalid base64 in test");
28
+
String::from_utf8(bytes).expect("invalid utf8 in test")
29
+
}
30
+
31
#[tokio::test]
32
async fn test_handle_with_slur_rejected() {
33
let client = client();
34
let timestamp = chrono::Utc::now().timestamp_millis();
35
+
let slur = decode("bmlnZ2Vy");
36
+
let offensive_handle = format!("{}{}", slur, timestamp);
37
38
let create_payload = json!({
39
"handle": offensive_handle,
···
54
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
55
let body: serde_json::Value = res.json().await.unwrap();
56
assert_eq!(body["error"], "InvalidHandle");
57
+
assert!(
58
+
body["message"]
59
+
.as_str()
60
+
.unwrap_or("")
61
+
.contains("Inappropriate language")
62
+
);
63
}
64
65
#[tokio::test]
66
async fn test_handle_with_normalized_slur_rejected() {
67
let client = client();
68
let timestamp = chrono::Utc::now().timestamp_millis();
69
+
let slur = decode("bi1pLWctZy1lLXI=");
70
+
let offensive_handle = format!("{}{}", slur, timestamp);
71
72
let create_payload = json!({
73
"handle": offensive_handle,
···
96
let (_, jwt) = setup_new_user("handleupdate").await;
97
98
let update_payload = json!({
99
+
"handle": decode("ZmFnZ290cw==")
100
});
101
102
let res = client
···
126
"rkey": "self",
127
"record": {
128
"$type": "app.bsky.actor.profile",
129
+
"displayName": decode("SSBhbSBhIGtpa2U=")
130
}
131
});
132
···
158
"record": {
159
"$type": "app.bsky.actor.profile",
160
"displayName": "Normal Name",
161
+
"description": decode("SSBoYXRlIGFsbCBjaGlua3M=")
162
}
163
});
164
+10
-24
tests/firehose_validation.rs
+10
-24
tests/firehose_validation.rs
···
364
let client = client();
365
let (token, did) = create_account_and_login(&client).await;
366
367
-
let url = format!(
368
-
"ws://127.0.0.1:{}/xrpc/com.atproto.sync.subscribeRepos",
369
-
app_port()
370
-
);
371
-
let (mut ws_stream, _) = connect_async(&url).await.expect("Failed to connect");
372
-
373
let profile_payload = json!({
374
"repo": did,
375
"collection": "app.bsky.actor.profile",
···
393
let first_profile: Value = res.json().await.unwrap();
394
let first_cid = first_profile["cid"].as_str().unwrap();
395
396
-
let timeout = tokio::time::timeout(std::time::Duration::from_secs(5), async {
397
-
loop {
398
-
let msg = ws_stream.next().await.unwrap().unwrap();
399
-
let raw_bytes = match msg {
400
-
tungstenite::Message::Binary(bin) => bin,
401
-
_ => continue,
402
-
};
403
-
if let Ok((_, f)) = parse_frame(&raw_bytes) {
404
-
if f.repo == did {
405
-
break;
406
-
}
407
-
}
408
-
}
409
-
})
410
-
.await;
411
-
assert!(timeout.is_ok(), "Timed out waiting for first commit");
412
413
let update_payload = json!({
414
"repo": did,
···
432
assert_eq!(res.status(), StatusCode::OK);
433
434
let mut frame_opt: Option<CommitFrame> = None;
435
-
let timeout = tokio::time::timeout(std::time::Duration::from_secs(5), async {
436
loop {
437
-
let msg = ws_stream.next().await.unwrap().unwrap();
438
let raw_bytes = match msg {
439
tungstenite::Message::Binary(bin) => bin,
440
_ => continue,
···
364
let client = client();
365
let (token, did) = create_account_and_login(&client).await;
366
367
let profile_payload = json!({
368
"repo": did,
369
"collection": "app.bsky.actor.profile",
···
387
let first_profile: Value = res.json().await.unwrap();
388
let first_cid = first_profile["cid"].as_str().unwrap();
389
390
+
let url = format!(
391
+
"ws://127.0.0.1:{}/xrpc/com.atproto.sync.subscribeRepos",
392
+
app_port()
393
+
);
394
+
let (mut ws_stream, _) = connect_async(&url).await.expect("Failed to connect");
395
396
let update_payload = json!({
397
"repo": did,
···
415
assert_eq!(res.status(), StatusCode::OK);
416
417
let mut frame_opt: Option<CommitFrame> = None;
418
+
let timeout = tokio::time::timeout(std::time::Duration::from_secs(15), async {
419
loop {
420
+
let msg = match ws_stream.next().await {
421
+
Some(Ok(m)) => m,
422
+
_ => continue,
423
+
};
424
let raw_bytes = match msg {
425
tungstenite::Message::Binary(bin) => bin,
426
_ => continue,
+9
-2
tests/oauth_security.rs
+9
-2
tests/oauth_security.rs
···
1116
1117
let delegated_handle = format!("deleg-{}", ts);
1118
let delegated_res = http_client
1119
-
.post(format!("{}/xrpc/com.tranquil.delegation.createDelegatedAccount", url))
1120
.bearer_auth(controller_jwt)
1121
.json(&json!({
1122
"handle": delegated_handle,
···
1174
panic!("Delegation auth failed: {}", error_body);
1175
}
1176
let auth_body: Value = auth_res.json().await.unwrap();
1177
-
assert!(auth_body["success"].as_bool().unwrap_or(false), "Delegation auth should succeed: {:?}", auth_body);
1178
1179
let consent_res = http_client
1180
.post(format!("{}/oauth/authorize/consent", url))
···
1116
1117
let delegated_handle = format!("deleg-{}", ts);
1118
let delegated_res = http_client
1119
+
.post(format!(
1120
+
"{}/xrpc/com.tranquil.delegation.createDelegatedAccount",
1121
+
url
1122
+
))
1123
.bearer_auth(controller_jwt)
1124
.json(&json!({
1125
"handle": delegated_handle,
···
1177
panic!("Delegation auth failed: {}", error_body);
1178
}
1179
let auth_body: Value = auth_res.json().await.unwrap();
1180
+
assert!(
1181
+
auth_body["success"].as_bool().unwrap_or(false),
1182
+
"Delegation auth should succeed: {:?}",
1183
+
auth_body
1184
+
);
1185
1186
let consent_res = http_client
1187
.post(format!("{}/oauth/authorize/consent", url))